Use separate module loggers instead of a single 'kafka' logger

This commit is contained in:
Dana Powers
2015-06-05 23:42:07 -07:00
parent 6406747f37
commit 5d9b174cad
9 changed files with 17 additions and 9 deletions

View File

@@ -16,7 +16,8 @@ from kafka.conn import collect_hosts, KafkaConnection, DEFAULT_SOCKET_TIMEOUT_SE
from kafka.protocol import KafkaProtocol from kafka.protocol import KafkaProtocol
from kafka.util import kafka_bytestring from kafka.util import kafka_bytestring
log = logging.getLogger("kafka")
log = logging.getLogger(__name__)
class KafkaClient(object): class KafkaClient(object):

View File

@@ -9,7 +9,8 @@ import six
from kafka.common import ConnectionError from kafka.common import ConnectionError
log = logging.getLogger("kafka")
log = logging.getLogger(__name__)
DEFAULT_SOCKET_TIMEOUT_SECONDS = 120 DEFAULT_SOCKET_TIMEOUT_SECONDS = 120
DEFAULT_KAFKA_PORT = 9092 DEFAULT_KAFKA_PORT = 9092

View File

@@ -13,7 +13,8 @@ from kafka.common import (
from kafka.util import kafka_bytestring, ReentrantTimer from kafka.util import kafka_bytestring, ReentrantTimer
log = logging.getLogger("kafka")
log = logging.getLogger('kafka.consumer')
AUTO_COMMIT_MSG_COUNT = 100 AUTO_COMMIT_MSG_COUNT = 100
AUTO_COMMIT_INTERVAL = 5000 AUTO_COMMIT_INTERVAL = 5000

View File

@@ -18,9 +18,11 @@ from .base import (
) )
from .simple import Consumer, SimpleConsumer from .simple import Consumer, SimpleConsumer
log = logging.getLogger(__name__)
Events = namedtuple("Events", ["start", "pause", "exit"]) Events = namedtuple("Events", ["start", "pause", "exit"])
log = logging.getLogger("kafka")
def _mp_consume(client, group, topic, queue, size, events, **consumer_options): def _mp_consume(client, group, topic, queue, size, events, **consumer_options):
""" """

View File

@@ -34,7 +34,9 @@ from .base import (
NO_MESSAGES_WAIT_TIME_SECONDS NO_MESSAGES_WAIT_TIME_SECONDS
) )
log = logging.getLogger("kafka")
log = logging.getLogger(__name__)
class FetchContext(object): class FetchContext(object):
""" """

View File

@@ -25,7 +25,7 @@ from kafka.common import (
from kafka.protocol import CODEC_NONE, ALL_CODECS, create_message_set from kafka.protocol import CODEC_NONE, ALL_CODECS, create_message_set
from kafka.util import kafka_bytestring from kafka.util import kafka_bytestring
log = logging.getLogger("kafka") log = logging.getLogger('kafka.producer')
BATCH_SEND_DEFAULT_INTERVAL = 20 BATCH_SEND_DEFAULT_INTERVAL = 20
BATCH_SEND_MSG_COUNT = 20 BATCH_SEND_MSG_COUNT = 20

View File

@@ -11,7 +11,7 @@ from .base import (
ASYNC_RETRY_LIMIT, ASYNC_RETRY_BACKOFF_MS, ASYNC_RETRY_ON_TIMEOUTS ASYNC_RETRY_LIMIT, ASYNC_RETRY_BACKOFF_MS, ASYNC_RETRY_ON_TIMEOUTS
) )
log = logging.getLogger("kafka") log = logging.getLogger(__name__)
class KeyedProducer(Producer): class KeyedProducer(Producer):

View File

@@ -14,7 +14,7 @@ from .base import (
ASYNC_RETRY_LIMIT, ASYNC_RETRY_BACKOFF_MS, ASYNC_RETRY_ON_TIMEOUTS ASYNC_RETRY_LIMIT, ASYNC_RETRY_BACKOFF_MS, ASYNC_RETRY_ON_TIMEOUTS
) )
log = logging.getLogger("kafka") log = logging.getLogger(__name__)
class SimpleProducer(Producer): class SimpleProducer(Producer):

View File

@@ -21,7 +21,8 @@ from kafka.util import (
write_short_string, write_int_string, group_by_topic_and_partition write_short_string, write_int_string, group_by_topic_and_partition
) )
log = logging.getLogger("kafka")
log = logging.getLogger(__name__)
ATTRIBUTE_CODEC_MASK = 0x03 ATTRIBUTE_CODEC_MASK = 0x03
CODEC_NONE = 0x00 CODEC_NONE = 0x00