mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-01-08 15:52:15 +08:00
Add km module kafka gateway
This commit is contained in:
86
config/log4j.properties
Normal file
86
config/log4j.properties
Normal file
@@ -0,0 +1,86 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
log4j.rootLogger=INFO, stdout
|
||||
|
||||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.kafkaAppender=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
|
||||
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
log4j.appender.kafkaAppender.MaxFileSize=100MB
|
||||
log4j.appender.kafkaAppender.MaxBackupIndex=10
|
||||
|
||||
log4j.appender.metricsAppender=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.metricsAppender.File=${kafka.logs.dir}/metrics.log
|
||||
log4j.appender.metricsAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.metricsAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
log4j.appender.metricsAppender.MaxFileSize=100MB
|
||||
log4j.appender.metricsAppender.MaxBackupIndex=10
|
||||
|
||||
log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
|
||||
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
|
||||
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.userErrorAppender=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.userErrorAppender.File=${kafka.logs.dir}/kafka-userError.log
|
||||
log4j.appender.userErrorAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.userErrorAppender.layout.ConversionPattern=[%d][%p][%t](%F:%L): %m%n
|
||||
log4j.appender.userErrorAppender.MaxFileSize=100MB
|
||||
log4j.appender.userErrorAppender.MaxBackupIndex=10
|
||||
|
||||
log4j.appender.requestStatusAppender=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.requestStatusAppender.File=${kafka.logs.dir}/kafka-requestStatus.log
|
||||
log4j.appender.requestStatusAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.requestStatusAppender.layout.ConversionPattern=[%p][%d{yyyy-MM-dd'T'HH\:mm\:ss.SSS}][%F:%L] %m%n
|
||||
log4j.appender.requestStatusAppender.MaxFileSize=100MB
|
||||
log4j.appender.requestStatusAppender.MaxBackupIndex=10
|
||||
|
||||
# Turn on all our debugging info
|
||||
#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender
|
||||
#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender
|
||||
#log4j.logger.kafka.perf=DEBUG, kafkaAppender
|
||||
#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender
|
||||
#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG
|
||||
log4j.logger.kafka=INFO, kafkaAppender
|
||||
|
||||
log4j.logger.kafka.network.RequestChannel$=INFO, requestAppender
|
||||
log4j.additivity.kafka.network.RequestChannel$=false
|
||||
|
||||
log4j.logger.kafka.network.SocketServer$=INFO, requestAppender
|
||||
log4j.additivity.kafka.network.SocketServer$=false
|
||||
|
||||
log4j.logger.kafka.server.Discoverer=INFO, kafkaAppender
|
||||
log4j.additivity.kafka.server.Discoverer=false
|
||||
|
||||
log4j.logger.userError=ERROR, userErrorAppender
|
||||
log4j.additivity.userError=false
|
||||
|
||||
log4j.logger.requestStatus=INFO, requestStatusAppender
|
||||
log4j.additivity.requestStatus=false
|
||||
|
||||
log4j.logger.com.didichuxing.datachannel.kafka.jmx.MetricsLogReporter=INFO, metricsAppender
|
||||
log4j.additivity.com.didichuxing.datachannel.kafka.jmx.MetricsLogReporter=false
|
||||
|
||||
#log4j.logger.kafka.network.Processor=TRACE, requestAppender
|
||||
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
|
||||
#log4j.additivity.kafka.server.KafkaApis=false
|
||||
log4j.logger.kafka.request.logger=WARN, requestAppender
|
||||
log4j.additivity.kafka.request.logger=false
|
||||
Reference in New Issue
Block a user