39
39
``` python
40
40
from confluent_kafka import Producer
41
41
42
+
42
43
p = Producer({' bootstrap.servers' : ' mybroker,mybroker2' })
44
+
43
45
for data in some_data_source:
44
46
p.produce(' mytopic' , data.encode(' utf-8' ))
47
+
45
48
p.flush()
46
49
```
47
50
@@ -51,17 +54,29 @@ p.flush()
51
54
``` python
52
55
from confluent_kafka import Consumer, KafkaError
53
56
54
- c = Consumer({' bootstrap.servers' : ' mybroker' , ' group.id' : ' mygroup' ,
55
- ' default.topic.config' : {' auto.offset.reset' : ' smallest' }})
57
+
58
+ c = Consumer({
59
+ ' bootstrap.servers' : ' mybroker' ,
60
+ ' group.id' : ' mygroup' ,
61
+ ' default.topic.config' : {
62
+ ' auto.offset.reset' : ' smallest'
63
+ }
64
+ })
65
+
56
66
c.subscribe([' mytopic' ])
57
- running = True
58
- while running :
67
+
68
+ while True :
59
69
msg = c.poll()
60
- if not msg.error():
61
- print (' Received message: %s ' % msg.value().decode(' utf-8' ))
62
- elif msg.error().code() != KafkaError._PARTITION_EOF :
63
- print (msg.error())
64
- running = False
70
+
71
+ if msg.error():
72
+ if msg.error().code() == KafkaError._PARTITION_EOF :
73
+ continue
74
+ else :
75
+ print (msg.error())
76
+ break
77
+
78
+ print (' Received message: {} ' .format(msg.value().decode(' utf-8' )))
79
+
65
80
c.close()
66
81
```
67
82
@@ -71,12 +86,17 @@ c.close()
71
86
from confluent_kafka import avro
72
87
from confluent_kafka.avro import AvroProducer
73
88
89
+
74
90
value_schema = avro.load(' ValueSchema.avsc' )
75
91
key_schema = avro.load(' KeySchema.avsc' )
76
92
value = {" name" : " Value" }
77
93
key = {" name" : " Key" }
78
94
79
- avroProducer = AvroProducer({' bootstrap.servers' : ' mybroker,mybroker2' , ' schema.registry.url' : ' http://schem_registry_host:port' }, default_key_schema = key_schema, default_value_schema = value_schema)
95
+ avroProducer = AvroProducer({
96
+ ' bootstrap.servers' : ' mybroker,mybroker2' ,
97
+ ' schema.registry.url' : ' http://schem_registry_host:port'
98
+ }, default_key_schema = key_schema, default_value_schema = value_schema)
99
+
80
100
avroProducer.produce(topic = ' my_topic' , value = value, key = key)
81
101
avroProducer.flush()
82
102
```
@@ -88,21 +108,33 @@ from confluent_kafka import KafkaError
88
108
from confluent_kafka.avro import AvroConsumer
89
109
from confluent_kafka.avro.serializer import SerializerError
90
110
91
- c = AvroConsumer({' bootstrap.servers' : ' mybroker,mybroker2' , ' group.id' : ' groupid' , ' schema.registry.url' : ' http://127.0.0.1:8081' })
111
+
112
+ c = AvroConsumer({
113
+ ' bootstrap.servers' : ' mybroker,mybroker2' ,
114
+ ' group.id' : ' groupid' ,
115
+ ' schema.registry.url' : ' http://127.0.0.1:8081' })
116
+
92
117
c.subscribe([' my_topic' ])
93
- running = True
94
- while running :
118
+
119
+ while True :
95
120
try :
96
121
msg = c.poll(10 )
97
- if msg:
98
- if not msg.error():
99
- print (msg.value())
100
- elif msg.error().code() != KafkaError._PARTITION_EOF :
101
- print (msg.error())
102
- running = False
122
+
103
123
except SerializerError as e:
104
- print (" Message deserialization failed for %s : %s " % (msg, e))
105
- running = False
124
+ print (" Message deserialization failed for {} : {} " .format(msg, e))
125
+ break
126
+
127
+ if msg is None :
128
+ continue
129
+
130
+ if msg.error():
131
+ if msg.error().code() == KafkaError._PARTITION_EOF :
132
+ continue
133
+ else :
134
+ print (msg.error())
135
+ break
136
+
137
+ print (msg.value())
106
138
107
139
c.close()
108
140
```
0 commit comments