Use separate module loggers instead of a single 'kafka' logger
This commit is contained in:
@@ -16,7 +16,8 @@ from kafka.conn import collect_hosts, KafkaConnection, DEFAULT_SOCKET_TIMEOUT_SE
|
||||
from kafka.protocol import KafkaProtocol
|
||||
from kafka.util import kafka_bytestring
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KafkaClient(object):
|
||||
|
||||
@@ -9,7 +9,8 @@ import six
|
||||
|
||||
from kafka.common import ConnectionError
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_SOCKET_TIMEOUT_SECONDS = 120
|
||||
DEFAULT_KAFKA_PORT = 9092
|
||||
|
||||
@@ -13,7 +13,8 @@ from kafka.common import (
|
||||
|
||||
from kafka.util import kafka_bytestring, ReentrantTimer
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
|
||||
log = logging.getLogger('kafka.consumer')
|
||||
|
||||
AUTO_COMMIT_MSG_COUNT = 100
|
||||
AUTO_COMMIT_INTERVAL = 5000
|
||||
|
||||
@@ -18,9 +18,11 @@ from .base import (
|
||||
)
|
||||
from .simple import Consumer, SimpleConsumer
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
Events = namedtuple("Events", ["start", "pause", "exit"])
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
|
||||
def _mp_consume(client, group, topic, queue, size, events, **consumer_options):
|
||||
"""
|
||||
|
||||
@@ -34,7 +34,9 @@ from .base import (
|
||||
NO_MESSAGES_WAIT_TIME_SECONDS
|
||||
)
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FetchContext(object):
|
||||
"""
|
||||
|
||||
@@ -25,7 +25,7 @@ from kafka.common import (
|
||||
from kafka.protocol import CODEC_NONE, ALL_CODECS, create_message_set
|
||||
from kafka.util import kafka_bytestring
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
log = logging.getLogger('kafka.producer')
|
||||
|
||||
BATCH_SEND_DEFAULT_INTERVAL = 20
|
||||
BATCH_SEND_MSG_COUNT = 20
|
||||
|
||||
@@ -11,7 +11,7 @@ from .base import (
|
||||
ASYNC_RETRY_LIMIT, ASYNC_RETRY_BACKOFF_MS, ASYNC_RETRY_ON_TIMEOUTS
|
||||
)
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KeyedProducer(Producer):
|
||||
|
||||
@@ -14,7 +14,7 @@ from .base import (
|
||||
ASYNC_RETRY_LIMIT, ASYNC_RETRY_BACKOFF_MS, ASYNC_RETRY_ON_TIMEOUTS
|
||||
)
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SimpleProducer(Producer):
|
||||
|
||||
@@ -21,7 +21,8 @@ from kafka.util import (
|
||||
write_short_string, write_int_string, group_by_topic_and_partition
|
||||
)
|
||||
|
||||
log = logging.getLogger("kafka")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ATTRIBUTE_CODEC_MASK = 0x03
|
||||
CODEC_NONE = 0x00
|
||||
|
||||
Reference in New Issue
Block a user