20
20
""" Test script for confluent_kafka module """
21
21
22
22
import confluent_kafka
23
- import re
23
+ import os
24
24
import time
25
25
import uuid
26
26
import sys
27
27
import json
28
+ from copy import copy
28
29
29
30
try :
30
31
from progress .bar import Bar
35
36
# Kafka bootstrap server(s)
36
37
bootstrap_servers = None
37
38
39
+ # Confluent schema-registry
40
+ schema_registry = None
41
+
38
42
# Topic to use
39
43
topic = 'test'
40
44
50
54
def error_cb (err ):
51
55
print ('Error: %s' % err )
52
56
53
-
57
+
54
58
class MyTestDr (object ):
55
59
""" Producer: Delivery report callback """
56
60
@@ -124,6 +128,83 @@ def verify_producer():
124
128
p .flush ()
125
129
126
130
131
+ def verify_avro ():
132
+ from confluent_kafka import avro
133
+ avsc_dir = os .path .join (os .path .dirname (__file__ ), os .pardir , 'tests' , 'avro' )
134
+
135
+ # Producer config
136
+ conf = {'bootstrap.servers' : bootstrap_servers ,
137
+ 'schema.registry.url' : schema_registry ,
138
+ 'error_cb' : error_cb ,
139
+ 'api.version.request' : api_version_request ,
140
+ 'default.topic.config' : {'produce.offset.report' : True }}
141
+
142
+ # Create producer
143
+ p = avro .AvroProducer (conf )
144
+
145
+ prim_float = avro .load (os .path .join (avsc_dir , "primitive_float.avsc" ))
146
+ prim_string = avro .load (os .path .join (avsc_dir , "primitive_string.avsc" ))
147
+ basic = avro .load (os .path .join (avsc_dir , "basic_schema.avsc" ))
148
+ str_value = 'abc'
149
+ float_value = 32.
150
+
151
+ combinations = [
152
+ dict (key = float_value , key_schema = prim_float ),
153
+ dict (value = float_value , value_schema = prim_float ),
154
+ dict (key = {'name' : 'abc' }, key_schema = basic ),
155
+ dict (value = {'name' : 'abc' }, value_schema = basic ),
156
+ dict (value = {'name' : 'abc' }, value_schema = basic , key = float_value , key_schema = prim_float ),
157
+ dict (value = {'name' : 'abc' }, value_schema = basic , key = str_value , key_schema = prim_string ),
158
+ dict (value = float_value , value_schema = prim_float , key = {'name' : 'abc' }, key_schema = basic ),
159
+ dict (value = float_value , value_schema = prim_float , key = str_value , key_schema = prim_string ),
160
+ dict (value = str_value , value_schema = prim_string , key = {'name' : 'abc' }, key_schema = basic ),
161
+ dict (value = str_value , value_schema = prim_string , key = float_value , key_schema = prim_float ),
162
+ ]
163
+
164
+ # Consumer config
165
+ cons_conf = {'bootstrap.servers' : bootstrap_servers ,
166
+ 'schema.registry.url' : schema_registry ,
167
+ 'group.id' : 'test.py' ,
168
+ 'session.timeout.ms' : 6000 ,
169
+ 'enable.auto.commit' : False ,
170
+ 'api.version.request' : api_version_request ,
171
+ 'on_commit' : print_commit_result ,
172
+ 'error_cb' : error_cb ,
173
+ 'default.topic.config' : {
174
+ 'auto.offset.reset' : 'earliest'
175
+ }}
176
+
177
+ for i , combo in enumerate (combinations ):
178
+ combo ['topic' ] = str (uuid .uuid4 ())
179
+ p .produce (** combo )
180
+ p .poll (0 )
181
+ p .flush ()
182
+
183
+ # Create consumer
184
+ c = avro .AvroConsumer (copy (cons_conf ))
185
+ c .subscribe ([combo ['topic' ]])
186
+
187
+ while True :
188
+ msg = c .poll (0 )
189
+ if msg is None :
190
+ continue
191
+
192
+ if msg .error ():
193
+ if msg .error ().code () == confluent_kafka .KafkaError ._PARTITION_EOF :
194
+ break
195
+ else :
196
+ continue
197
+
198
+ tstype , timestamp = msg .timestamp ()
199
+ print ('%s[%d]@%d: key=%s, value=%s, tstype=%d, timestamp=%s' %
200
+ (msg .topic (), msg .partition (), msg .offset (),
201
+ msg .key (), msg .value (), tstype , timestamp ))
202
+
203
+ c .commit (msg , async = False )
204
+
205
+ # Close consumer
206
+ c .close ()
207
+
127
208
128
209
def verify_producer_performance (with_dr_cb = True ):
129
210
""" Time how long it takes to produce and delivery X messages """
@@ -285,8 +366,6 @@ def verify_consumer():
285
366
c .close ()
286
367
287
368
288
-
289
-
290
369
def verify_consumer_performance ():
291
370
""" Verify Consumer performance """
292
371
@@ -450,8 +529,10 @@ def stats_cb(stats_json_str):
450
529
bootstrap_servers = sys .argv [1 ]
451
530
if len (sys .argv ) > 2 :
452
531
topic = sys .argv [2 ]
532
+ if len (sys .argv ) > 3 :
533
+ schema_registry = sys .argv [3 ]
453
534
else :
454
- print ('Usage: %s <broker> [<topic>]' % sys .argv [0 ])
535
+ print ('Usage: %s <broker> [<topic>] [<schema_registry>] ' % sys .argv [0 ])
455
536
sys .exit (1 )
456
537
457
538
print ('Using confluent_kafka module version %s (0x%x)' % confluent_kafka .version ())
@@ -475,6 +556,8 @@ def stats_cb(stats_json_str):
475
556
print ('=' * 30 , 'Verifying stats_cb' , '=' * 30 )
476
557
verify_stats_cb ()
477
558
478
- print ('=' * 30 , 'Done' , '=' * 30 )
479
-
559
+ if schema_registry :
560
+ print ('=' * 30 , 'Verifying AVRO' , '=' * 30 )
561
+ topics = verify_avro ()
480
562
563
+ print ('=' * 30 , 'Done' , '=' * 30 )
0 commit comments