Updated documentation for Consumers to prefer KafkaConsumer instead

This commit is contained in:
John Anderson
2015-02-09 14:27:42 -08:00
parent f206a4bbf0
commit aa19d71f98
2 changed files with 10 additions and 5 deletions

View File

@@ -6,7 +6,7 @@ High level
.. code:: python
from kafka import KafkaClient, SimpleProducer, SimpleConsumer
from kafka import SimpleProducer, KafkaClient, KafkaConsumer
# To send messages synchronously
kafka = KafkaClient("localhost:9092")
@@ -52,7 +52,8 @@ High level
batch_send_every_t=60)
# To consume messages
consumer = SimpleConsumer(kafka, "my-group", "my-topic")
consumer = KafkaConsumer("my-topic", group_id="my_group",
metadata_broker_list=["localhost:9092"])
for message in consumer:
# message is raw byte string -- decode if necessary!
# e.g., for unicode: `message.decode('utf-8')`
@@ -66,7 +67,8 @@ Keyed messages
.. code:: python
from kafka import KafkaClient, KeyedProducer, HashedPartitioner, RoundRobinPartitioner
from kafka import (KafkaClient, KeyedProducer, HashedPartitioner,
RoundRobinPartitioner)
kafka = KafkaClient("localhost:9092")

View File

@@ -57,7 +57,8 @@ class KafkaConsumer(object):
.. code:: python
# A very basic 'tail' consumer, with no stored offset management
kafka = KafkaConsumer('topic1')
kafka = KafkaConsumer('topic1',
metadata_broker_list=['localhost:9092'])
for m in kafka:
print m
@@ -73,8 +74,10 @@ class KafkaConsumer(object):
.. code:: python
# more advanced consumer -- multiple topics w/ auto commit offset management
# more advanced consumer -- multiple topics w/ auto commit offset
# management
kafka = KafkaConsumer('topic1', 'topic2',
metadata_broker_list=['localhost:9092'],
group_id='my_consumer_group',
auto_commit_enable=True,
auto_commit_interval_ms=30 * 1000,