Skip to content

Commit 56798e3

Browse files
rnpridgeonRyan P
authored andcommitted
Add Avro Serialization example
1 parent 47fabbf commit 56798e3

File tree

1 file changed

+180
-0
lines changed

1 file changed

+180
-0
lines changed
Lines changed: 180 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,180 @@
1+
#!/usr/bin/env python
2+
#
3+
# Copyright 2018 Confluent Inc.
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
import argparse
19+
20+
from six.moves import input
21+
22+
from confluent_kafka import avro as avro
23+
24+
record_schema_str = """
25+
{
26+
"type": "record",
27+
"name": "User",
28+
"fields": [
29+
{"name": "name", "type": "string"},
30+
{"name": "favorite_number", "type": ["int", "null"]},
31+
{"name": "favorite_color", "type": ["string", "null"]}
32+
]
33+
}
34+
"""
35+
36+
37+
class ParserOpts(object):
38+
def _build(self):
39+
conf = {'bootstrap.servers': self.bootstrap_servers,
40+
'schema.registry.url': self.schema_registry}
41+
42+
if self.userinfo:
43+
conf['schema.registry.basic.auth.credentials.source'] = 'USER_INFO'
44+
conf['schema.registry.basic.auth.user.info'] = self.userinfo
45+
return conf
46+
47+
def producer_conf(self):
48+
return self._build()
49+
50+
def consumer_conf(self):
51+
return dict({"group.id": self.group}, **self._build())
52+
53+
54+
class User(dict):
55+
_schema = avro.loads(record_schema_str)
56+
57+
def __init__(self, conf=None):
58+
if not conf:
59+
conf = {"name": "anonymous",
60+
"favorite_number": 0,
61+
"favorite_color": ""}
62+
super(User, self).__init__(conf)
63+
64+
@classmethod
65+
def schema(cls):
66+
return cls._schema
67+
68+
def prompt(self):
69+
self['name'] = input("Enter name:")
70+
num = input("Enter favorite number:")
71+
if num == '':
72+
num = 0
73+
self['favorite_number'] = int(num)
74+
self['favorite_color'] = input("Enter favorite color:")
75+
76+
77+
def on_delivery(err, msg):
78+
if err is not None:
79+
print('Message delivery failed ({} [{}]): %{}'.format(msg.topic(), str(msg.partition()), err))
80+
return 0
81+
elif err is not None:
82+
print('Message delivery failed {}'.format(err))
83+
return 0
84+
else:
85+
print('Message delivered to {} [{}] at offset [{}]: {}'.format(
86+
msg.topic(), msg.partition(), msg.offset(), msg.value()))
87+
return 1
88+
89+
90+
def produce(args):
91+
from confluent_kafka.avro import AvroProducer
92+
93+
record = User()
94+
topic = args.topic
95+
conf = args.producer_conf()
96+
97+
producer = AvroProducer(conf, default_value_schema=record.schema())
98+
99+
print("producing user records to topic {}. ^c to exit.".format(topic))
100+
101+
while True:
102+
try:
103+
record.prompt()
104+
producer.produce(topic=topic, partition=0, value=record, callback=on_delivery)
105+
producer.poll(0.1)
106+
except KeyboardInterrupt:
107+
break
108+
except ValueError:
109+
print("Invalid input, discarding record...")
110+
continue
111+
record = User()
112+
113+
print("\nFlushing records...")
114+
producer.flush()
115+
116+
117+
def consume(args):
118+
from confluent_kafka.avro import AvroConsumer
119+
from confluent_kafka import KafkaError
120+
from confluent_kafka.avro.serializer import SerializerError
121+
122+
topic = args.topic
123+
conf = args.consumer_conf()
124+
print("consuming user records from topic {}".format(topic))
125+
126+
c = AvroConsumer(conf)
127+
c.subscribe([topic])
128+
129+
while True:
130+
try:
131+
msg = c.poll(1)
132+
133+
except SerializerError as e:
134+
print("Message deserialization failed {}".format(e))
135+
break
136+
137+
if msg is None:
138+
continue
139+
140+
if msg.error():
141+
if msg.error().code() == KafkaError._PARTITION_EOF:
142+
break
143+
else:
144+
print(msg.error())
145+
break
146+
147+
record = User(msg.value())
148+
print("\nusername: {}\n\tfavorite_number:{}\n\tfavorite_color:{}".format(
149+
record["name"], record["favorite_number"], record["favorite_color"]))
150+
151+
c.close()
152+
153+
154+
def main():
155+
# To use the provided cluster execute <source root>/tests/docker/bin/cluster_up.sh.
156+
# Defaults assume the use of the provided test cluster.
157+
parser = argparse.ArgumentParser(description="Example client for handling Avro data")
158+
parser.add_argument('-b', dest="bootstrap_servers",
159+
default="localhost:29092", help="Bootstrap broker(s) (host[:port])")
160+
parser.add_argument('-s', dest="schema_registry",
161+
default="http://localhost:8083", help="Schema Registry (http(s)://host[:port]")
162+
parser.add_argument('-t', dest="topic", default="example_avro",
163+
help="Topic name")
164+
parser.add_argument('-u', dest="userinfo", default="ckp_tester:test_secret",
165+
help="Userinfo (username:password); requires Schema Registry with HTTP basic auth enabled")
166+
parser.add_argument('mode', choices=['produce', 'consume'],
167+
help="Execution mode (produce | consume)")
168+
parser.add_argument('-g', dest="group", default="example_avro",
169+
help="Consumer group; required if running 'consumer' mode")
170+
conf = ParserOpts()
171+
parser.parse_args(namespace=conf)
172+
173+
if conf.mode == "produce":
174+
produce(conf)
175+
else:
176+
consume(conf)
177+
178+
179+
if __name__ == '__main__':
180+
main()

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy