Skip to content

Commit d613894

Browse files
committed
Include avro in docs, fix docstrings in confluent_kafka, Consumer, avro
1 parent e4eeaa1 commit d613894

File tree

5 files changed

+77
-47
lines changed

5 files changed

+77
-47
lines changed

confluent_kafka/avro/__init__.py

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,12 @@ class AvroProducer(Producer):
1818
Kafka Producer client which does avro schema encoding to messages.
1919
Handles schema registration, Message serialization.
2020
21-
Constructor takes below parameters
21+
Constructor takes below parameters.
2222
23-
@:param: config: dict object with config parameters containing url for schema registry (schema.registry.url).
24-
@:param: default_key_schema: Optional avro schema for key
25-
@:param: default_value_schema: Optional avro schema for value
23+
:param dict config: Config parameters containing url for schema registry (``schema.registry.url``)
24+
and the standard Kafka client configuration (``bootstrap.servers`` et.al).
25+
:param str default_key_schema: Optional default avro schema for key
26+
:param str default_value_schema: Optional default avro schema for value
2627
"""
2728

2829
def __init__(self, config, default_key_schema=None,
@@ -42,13 +43,19 @@ def __init__(self, config, default_key_schema=None,
4243

4344
def produce(self, **kwargs):
4445
"""
45-
Sends message to kafka by encoding with specified avro schema
46-
@:param: topic: topic name
47-
@:param: value: An object to serialize
48-
@:param: value_schema : Avro schema for value
49-
@:param: key: An object to serialize
50-
@:param: key_schema : Avro schema for key
51-
@:exception: SerializerError
46+
Asynchronously sends message to Kafka by encoding with specified or default avro schema.
47+
48+
:param str topic: topic name
49+
:param object value: An object to serialize
50+
:param str value_schema: Avro schema for value
51+
:param object key: An object to serialize
52+
:param str key_schema: Avro schema for key
53+
54+
Plus any other parameters accepted by confluent_kafka.Producer.produce
55+
56+
:raises SerializerError: On serialization failure
57+
:raises BufferError: If producer queue is full.
58+
:raises KafkaException: For other produce failures.
5259
"""
5360
# get schemas from kwargs if defined
5461
key_schema = kwargs.pop('key_schema', self._key_schema)
@@ -81,7 +88,8 @@ class AvroConsumer(Consumer):
8188
8289
Constructor takes below parameters
8390
84-
@:param: config: dict object with config parameters containing url for schema registry (schema.registry.url).
91+
:param dict config: Config parameters containing url for schema registry (``schema.registry.url``)
92+
and the standard Kafka client configuration (``bootstrap.servers`` et.al).
8593
"""
8694
def __init__(self, config, schema_registry=None):
8795
schema_registry_url = config.pop("schema.registry.url", None)
@@ -100,8 +108,9 @@ def poll(self, timeout=None):
100108
This is an overriden method from confluent_kafka.Consumer class. This handles message
101109
deserialization using avro schema
102110
103-
@:param timeout
104-
@:return message object with deserialized key and value as dict objects
111+
:param float timeout: Poll timeout in seconds (default: indefinite)
112+
:returns: message object with deserialized key and value as dict objects
113+
:rtype: Message
105114
"""
106115
if timeout is None:
107116
timeout = -1

confluent_kafka/src/Consumer.c

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -890,7 +890,7 @@ static PyObject *Consumer_poll (Handle *self, PyObject *args,
890890

891891
msgobj = Message_new0(self, rkm);
892892
#ifdef RD_KAFKA_V_HEADERS
893-
// Have to deatch headers outside Message_new0 because it declares the
893+
// Have to detach headers outside Message_new0 because it declares the
894894
// rk message as a const
895895
rd_kafka_message_detach_headers(rkm, &((Message *)msgobj)->c_headers);
896896
#endif
@@ -956,7 +956,7 @@ static PyObject *Consumer_consume (Handle *self, PyObject *args,
956956
for (i = 0; i < n; i++) {
957957
PyObject *msgobj = Message_new0(self, rkmessages[i]);
958958
#ifdef RD_KAFKA_V_HEADERS
959-
// Have to deatch headers outside Message_new0 because it declares the
959+
// Have to detach headers outside Message_new0 because it declares the
960960
// rk message as a const
961961
rd_kafka_message_detach_headers(rkmessages[i], &((Message *)msgobj)->c_headers);
962962
#endif
@@ -1077,8 +1077,9 @@ static PyMethodDef Consumer_methods[] = {
10771077
" :param float timeout: Maximum time to block waiting for message, event or callback (default: infinite (-1)). (Seconds)\n"
10781078
" :returns: A list of Message objects (possibly empty on timeout)\n"
10791079
" :rtype: list(Message)\n"
1080-
" :raises: RuntimeError if called on a closed consumer, KafkaError "
1081-
"in case of internal error, or ValueError if num_messages > 1M.\n"
1080+
" :raises RuntimeError: if called on a closed consumer\n"
1081+
" :raises KafkaError: in case of internal error\n"
1082+
" :raises ValueError: if num_messages > 1M\n"
10821083
"\n"
10831084
},
10841085
{ "assign", (PyCFunction)Consumer_assign, METH_O,
@@ -1088,19 +1089,18 @@ static PyMethodDef Consumer_methods[] = {
10881089
":py:class:`TopicPartition` and starts consuming.\n"
10891090
"\n"
10901091
" :param list(TopicPartition) partitions: List of topic+partitions and optionally initial offsets to start consuming.\n"
1091-
" :raises: RuntimeError if called on a closed consumer\n"
1092+
" :raises: RuntimeError if called on a closed consumer\n"
10921093
"\n"
10931094
},
10941095
{ "unassign", (PyCFunction)Consumer_unassign, METH_NOARGS,
10951096
" Removes the current partition assignment and stops consuming.\n"
1096-
" :raises: KafkaException\n"
1097-
" :raises: RuntimeError if called on a closed consumer\n"
1097+
"\n"
1098+
" :raises KafkaException:\n"
1099+
" :raises RuntimeError: if called on a closed consumer\n"
10981100
"\n"
10991101
},
11001102
{ "assignment", (PyCFunction)Consumer_assignment,
11011103
METH_VARARGS|METH_KEYWORDS,
1102-
".. py:function:: assignment()\n"
1103-
"\n"
11041104
" Returns the current partition assignment.\n"
11051105
"\n"
11061106
" :returns: List of assigned topic+partitions.\n"
@@ -1223,11 +1223,11 @@ static PyMethodDef Consumer_methods[] = {
12231223
"\n"
12241224
" Retrieve low and high offsets for partition.\n"
12251225
"\n"
1226-
" :param TopicPartition partition: Topic+partition to return offsets for."
1226+
" :param TopicPartition partition: Topic+partition to return offsets for.\n"
12271227
" :param float timeout: Request timeout (when cached=False). (Seconds)\n"
12281228
" :param bool cached: Instead of querying the broker used cached information. "
12291229
"Cached values: The low offset is updated periodically (if statistics.interval.ms is set) while "
1230-
"the high offset is updated on each message fetched from the broker for this partition."
1230+
"the high offset is updated on each message fetched from the broker for this partition.\n"
12311231
" :returns: Tuple of (low,high) on success or None on timeout.\n"
12321232
" :rtype: tuple(int,int)\n"
12331233
" :raises: KafkaException\n"
@@ -1244,7 +1244,7 @@ static PyMethodDef Consumer_methods[] = {
12441244
" timestamp is greater than or equal to the given timestamp in the\n"
12451245
" corresponding partition.\n"
12461246
"\n"
1247-
" :param list(TopicPartition) partitions: topic+partitions with timestamps in the TopicPartition.offset field."
1247+
" :param list(TopicPartition) partitions: topic+partitions with timestamps in the TopicPartition.offset field.\n"
12481248
" :param float timeout: Request timeout. (Seconds)\n"
12491249
" :returns: list of topic+partition with offset field set and possibly error set\n"
12501250
" :rtype: list(TopicPartition)\n"

confluent_kafka/src/confluent_kafka.c

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -443,16 +443,16 @@ static PyMethodDef Message_methods[] = {
443443
"\n"
444444
},
445445
{ "timestamp", (PyCFunction)Message_timestamp, METH_NOARGS,
446-
" Retrieve timestamp type and timestamp from message.\n"
447-
" The timestamp type is one of:\n"
448-
" * :py:const:`TIMESTAMP_NOT_AVAILABLE`"
446+
"Retrieve timestamp type and timestamp from message.\n"
447+
"The timestamp type is one of:\n"
448+
" * :py:const:`TIMESTAMP_NOT_AVAILABLE`"
449449
" - Timestamps not supported by broker\n"
450-
" * :py:const:`TIMESTAMP_CREATE_TIME` "
450+
" * :py:const:`TIMESTAMP_CREATE_TIME` "
451451
" - Message creation time (or source / producer time)\n"
452-
" * :py:const:`TIMESTAMP_LOG_APPEND_TIME` "
452+
" * :py:const:`TIMESTAMP_LOG_APPEND_TIME` "
453453
" - Broker receive time\n"
454454
"\n"
455-
" The returned timestamp should be ignored if the timestamp type is "
455+
"The returned timestamp should be ignored if the timestamp type is "
456456
":py:const:`TIMESTAMP_NOT_AVAILABLE`.\n"
457457
"\n"
458458
" The timestamp is the number of milliseconds since the epoch (UTC).\n"
@@ -474,21 +474,24 @@ static PyMethodDef Message_methods[] = {
474474
},
475475
{ "set_headers", (PyCFunction)Message_set_headers, METH_O,
476476
" Set the field 'Message.headers' with new value.\n"
477-
" :param: object value: Message.headers.\n"
477+
"\n"
478+
" :param object value: Message.headers.\n"
478479
" :returns: None.\n"
479480
" :rtype: None\n"
480481
"\n"
481482
},
482483
{ "set_value", (PyCFunction)Message_set_value, METH_O,
483484
" Set the field 'Message.value' with new value.\n"
484-
" :param: object value: Message.value.\n"
485+
"\n"
486+
" :param object value: Message.value.\n"
485487
" :returns: None.\n"
486488
" :rtype: None\n"
487489
"\n"
488490
},
489491
{ "set_key", (PyCFunction)Message_set_key, METH_O,
490492
" Set the field 'Message.key' with new value.\n"
491-
" :param: object value: Message.key.\n"
493+
"\n"
494+
" :param object value: Message.key.\n"
492495
" :returns: None.\n"
493496
" :rtype: None\n"
494497
"\n"
@@ -728,19 +731,20 @@ static int TopicPartition_traverse (TopicPartition *self,
728731

729732
static PyMemberDef TopicPartition_members[] = {
730733
{ "topic", T_STRING, offsetof(TopicPartition, topic), READONLY,
731-
":py:attribute:topic - Topic name (string)" },
734+
":attribute topic: Topic name (string)" },
732735
{ "partition", T_INT, offsetof(TopicPartition, partition), 0,
733-
":py:attribute: Partition number (int)" },
736+
":attribute partition: Partition number (int)" },
734737
{ "offset", T_LONGLONG, offsetof(TopicPartition, offset), 0,
735-
" :py:attribute: Offset (long)\n"
736-
"Either an absolute offset (>=0) or a logical offset:"
738+
":attribute offset: Offset (long)\n"
739+
"\n"
740+
"Either an absolute offset (>=0) or a logical offset: "
737741
" :py:const:`OFFSET_BEGINNING`,"
738742
" :py:const:`OFFSET_END`,"
739743
" :py:const:`OFFSET_STORED`,"
740-
" :py:const:`OFFSET_INVALID`"
744+
" :py:const:`OFFSET_INVALID`\n"
741745
},
742746
{ "error", T_OBJECT, offsetof(TopicPartition, error), READONLY,
743-
":py:attribute: Indicates an error (with :py:class:`KafkaError`) unless None." },
747+
":attribute error: Indicates an error (with :py:class:`KafkaError`) unless None." },
744748
{ NULL }
745749
};
746750

docs/conf.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,9 +55,9 @@
5555
# built documents.
5656
#
5757
# The short X.Y version.
58-
version = '0.11.4'
58+
version = '0.11.5'
5959
# The full version, including alpha/beta/rc tags.
60-
release = '0.11.4'
60+
release = '0.11.5'
6161

6262
# The language for content autogenerated by Sphinx. Refer to documentation
6363
# for a list of supported languages.
@@ -133,7 +133,7 @@
133133
# Add any paths that contain custom static files (such as style sheets) here,
134134
# relative to this directory. They are copied after the builtin static files,
135135
# so a file named "default.css" will overwrite the builtin "default.css".
136-
html_static_path = ['_static']
136+
#html_static_path = ['_static']
137137

138138
# Add any extra paths that contain custom files (such as robots.txt or
139139
# .htaccess) here, relative to this directory. These files are copied

docs/index.rst

Lines changed: 20 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,24 @@ Producer
2727
.. autoclass:: confluent_kafka.Producer
2828
:members:
2929

30+
*****
31+
Admin
32+
*****
33+
34+
.. automodule:: confluent_kafka.admin
35+
:members:
36+
37+
.. autoclass:: confluent_kafka.admin.NewTopic
38+
.. autoclass:: confluent_kafka.admin.NewPartitions
39+
40+
****
41+
Avro
42+
****
43+
44+
.. automodule:: confluent_kafka.avro
45+
:members:
46+
47+
3048
*******
3149
Message
3250
*******
@@ -86,7 +104,7 @@ https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
86104

87105
The Python bindings also provide some additional configuration properties:
88106

89-
* ``default.topic.config``: value is a dict of topic-level configuration
107+
* ``default.topic.config``: value is a dict of client topic-level configuration
90108
properties that are applied to all used topics for the instance.
91109

92110
* ``error_cb(kafka.KafkaError)``: Callback for generic/global error events. This callback is served upon calling
@@ -120,5 +138,4 @@ The Python bindings also provide some additional configuration properties:
120138

121139
mylogger = logging.getLogger()
122140
mylogger.addHandler(logging.StreamHandler())
123-
producer = confluent_kafka.Producer({'bootstrap.servers': 'mybroker.com'},
124-
logger=mylogger)
141+
producer = confluent_kafka.Producer({'bootstrap.servers': 'mybroker.com'}, logger=mylogger)

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy