Skip to content

Commit a087411

Browse files
rnpridgeonRyan P
authored andcommitted
Update examples/tests to reflect enable.partition.eof default chanage from true to false
1 parent 5d13d5b commit a087411

File tree

5 files changed

+21
-48
lines changed

5 files changed

+21
-48
lines changed

README.md

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -85,11 +85,8 @@ while True:
8585
if msg is None:
8686
continue
8787
if msg.error():
88-
if msg.error().code() == KafkaError._PARTITION_EOF:
89-
continue
90-
else:
91-
print(msg.error())
92-
break
88+
print("Consumer error: {}".format(msg.error()))
89+
continue
9390

9491
print('Received message: {}'.format(msg.value().decode('utf-8')))
9592

@@ -172,11 +169,8 @@ while True:
172169
continue
173170

174171
if msg.error():
175-
if msg.error().code() == KafkaError._PARTITION_EOF:
176-
continue
177-
else:
178-
print(msg.error())
179-
break
172+
print("AvroConsumer error: {}".format(msg.error()))
173+
continue
180174

181175
print(msg.value())
182176

confluent_kafka/kafkatest/verifiable_consumer.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -188,11 +188,7 @@ def do_commit(self, immediate=False, asynchronous=None):
188188
def msg_consume(self, msg):
189189
""" Handle consumed message (or error event) """
190190
if msg.error():
191-
if msg.error().code() == KafkaError._PARTITION_EOF:
192-
# ignore EOF
193-
pass
194-
else:
195-
self.err('Consume failed: %s' % msg.error(), term=False)
191+
self.err('Consume failed: %s' % msg.error(), term=False)
196192
return
197193

198194
if False:

examples/confluent_cloud.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848

4949
import uuid
5050

51-
from confluent_kafka import Producer, Consumer, KafkaError
51+
from confluent_kafka import Producer, Consumer
5252

5353

5454
p = Producer({
@@ -98,14 +98,14 @@ def acked(err, msg):
9898
if msg is None:
9999
# No message available within timeout.
100100
# Initial message consumption may take up to `session.timeout.ms` for
101-
# the group to rebalance and start consuming
101+
# the group to rebalance and start consuming.
102102
continue
103-
elif not msg.error():
104-
print('consumed: {0}'.format(msg.value()))
105-
elif msg.error().code() == KafkaError._PARTITION_EOF:
106-
print('end of partition: {0} [{1}] @ {2}'.format(msg.topic(), msg.partition(), msg.offset()))
107-
else:
108-
print('error: {0}'.format(msg.error().str()))
103+
if msg.error():
104+
# Most error message are typically temporary log error and continue.
105+
print("Consumer error: {}".format(msg.error()))
106+
continue
107+
108+
print('consumed: {0}'.format(msg.value()))
109109

110110
except KeyboardInterrupt:
111111
pass

examples/consumer.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
#
1919
# Example high-level Kafka 0.9 balanced Consumer
2020
#
21-
from confluent_kafka import Consumer, KafkaException, KafkaError
21+
from confluent_kafka import Consumer, KafkaException
2222
import sys
2323
import getopt
2424
import json
@@ -95,14 +95,7 @@ def print_assignment(consumer, partitions):
9595
if msg is None:
9696
continue
9797
if msg.error():
98-
# Error or event
99-
if msg.error().code() == KafkaError._PARTITION_EOF:
100-
# End of partition event
101-
sys.stderr.write('%% %s [%d] reached end at offset %d\n' %
102-
(msg.topic(), msg.partition(), msg.offset()))
103-
else:
104-
# Error
105-
raise KafkaException(msg.error())
98+
raise KafkaException(msg.error())
10699
else:
107100
# Proper message
108101
sys.stderr.write('%% %s [%d] at offset %d with key %s:\n' %

tests/integration/integration_test.py

Lines changed: 6 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,8 @@ def verify_consumer():
368368
'enable.auto.commit': False,
369369
'on_commit': print_commit_result,
370370
'error_cb': error_cb,
371-
'auto.offset.reset': 'earliest'}
371+
'auto.offset.reset': 'earliest',
372+
'enable.partition.eof': True}
372373

373374
# Create consumer
374375
c = confluent_kafka.Consumer(conf)
@@ -535,11 +536,7 @@ def my_on_revoke(consumer, partitions):
535536
(msgcnt, max_msgcnt))
536537

537538
if msg.error():
538-
if msg.error().code() == confluent_kafka.KafkaError._PARTITION_EOF:
539-
# Reached EOF for a partition, ignore.
540-
continue
541-
else:
542-
raise confluent_kafka.KafkaException(msg.error())
539+
raise confluent_kafka.KafkaException(msg.error())
543540

544541
bytecnt += len(msg)
545542
msgcnt += 1
@@ -714,11 +711,7 @@ def my_on_revoke(consumer, partitions):
714711

715712
for msg in msglist:
716713
if msg.error():
717-
if msg.error().code() == confluent_kafka.KafkaError._PARTITION_EOF:
718-
# Reached EOF for a partition, ignore.
719-
continue
720-
else:
721-
raise confluent_kafka.KafkaException(msg.error())
714+
raise confluent_kafka.KafkaException(msg.error())
722715

723716
bytecnt += len(msg)
724717
msgcnt += 1
@@ -1008,11 +1001,7 @@ def stats_cb(stats_json_str):
10081001
(msgcnt, max_msgcnt))
10091002

10101003
if msg.error():
1011-
if msg.error().code() == confluent_kafka.KafkaError._PARTITION_EOF:
1012-
# Reached EOF for a partition, ignore.
1013-
continue
1014-
else:
1015-
raise confluent_kafka.KafkaException(msg.error())
1004+
raise confluent_kafka.KafkaException(msg.error())
10161005

10171006
bytecnt += len(msg)
10181007
msgcnt += 1
@@ -1227,6 +1216,7 @@ def verify_avro_explicit_read_schema():
12271216
conf = copy(cons_conf)
12281217
if schema_registry_url:
12291218
conf['schema.registry.url'] = schema_registry_url
1219+
conf['enable.partition.eof'] = True
12301220
c = avro.AvroConsumer(
12311221
conf,
12321222
reader_key_schema=reader_key_schema,

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy