Skip to content

Commit b2568b4

Browse files
committed
Restructured: C bindings are now a submodule (cimpl)
1 parent b520c8d commit b2568b4

File tree

8 files changed

+69
-24
lines changed

8 files changed

+69
-24
lines changed

confluent_kafka/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
__all__ = ['cimpl','kafkatest']
2+
from .cimpl import *

Consumer.c renamed to confluent_kafka/cimpl/Consumer.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -522,7 +522,7 @@ static PyObject *Consumer_new (PyTypeObject *type, PyObject *args,
522522

523523
PyTypeObject ConsumerType = {
524524
PyVarObject_HEAD_INIT(NULL, 0)
525-
"confluent_kafka.Consumer", /*tp_name*/
525+
"cimpl.Consumer", /*tp_name*/
526526
sizeof(Consumer), /*tp_basicsize*/
527527
0, /*tp_itemsize*/
528528
(destructor)Consumer_dealloc, /*tp_dealloc*/

Producer.c renamed to confluent_kafka/cimpl/Producer.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -430,7 +430,7 @@ static PyObject *Producer_new (PyTypeObject *type, PyObject *args,
430430

431431
PyTypeObject ProducerType = {
432432
PyVarObject_HEAD_INIT(NULL, 0)
433-
"confluent_kafka.Producer", /*tp_name*/
433+
"cimpl.Producer", /*tp_name*/
434434
sizeof(Producer), /*tp_basicsize*/
435435
0, /*tp_itemsize*/
436436
(destructor)Producer_dealloc, /*tp_dealloc*/

confluent_kafka.c renamed to confluent_kafka/cimpl/confluent_kafka.c

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ static PyObject* KafkaError_richcompare (KafkaError *self, PyObject *o2,
168168

169169
static PyTypeObject KafkaErrorType = {
170170
PyVarObject_HEAD_INIT(NULL, 0)
171-
"confluent_kafka.KafkaError", /*tp_name*/
171+
"cimpl.KafkaError", /*tp_name*/
172172
sizeof(KafkaError), /*tp_basicsize*/
173173
0, /*tp_itemsize*/
174174
(destructor)KafkaError_dealloc, /*tp_dealloc*/
@@ -416,7 +416,7 @@ static PySequenceMethods Message_seq_methods = {
416416

417417
PyTypeObject MessageType = {
418418
PyVarObject_HEAD_INIT(NULL, 0)
419-
"confluent_kafka.Message", /*tp_name*/
419+
"cimpl.Message", /*tp_name*/
420420
sizeof(Message), /*tp_basicsize*/
421421
0, /*tp_itemsize*/
422422
(destructor)Message_dealloc, /*tp_dealloc*/
@@ -660,7 +660,7 @@ static long TopicPartition_hash (TopicPartition *self) {
660660

661661
static PyTypeObject TopicPartitionType = {
662662
PyVarObject_HEAD_INIT(NULL, 0)
663-
"confluent_kafka.TopicPartition", /*tp_name*/
663+
"cimpl.TopicPartition", /*tp_name*/
664664
sizeof(TopicPartition), /*tp_basicsize*/
665665
0, /*tp_itemsize*/
666666
(destructor)TopicPartition_dealloc, /*tp_dealloc*/
@@ -772,7 +772,7 @@ rd_kafka_topic_partition_list_t *py_to_c_parts (PyObject *plist) {
772772

773773
if (!PyList_Check(plist)) {
774774
PyErr_SetString(PyExc_TypeError,
775-
"requires list of confluent_kafka.TopicPartition");
775+
"requires list of TopicPartition");
776776
return NULL;
777777
}
778778

@@ -1137,7 +1137,7 @@ static PyObject *version (PyObject *self, PyObject *args) {
11371137
return Py_BuildValue("si", "0.9.1", 0x00090100);
11381138
}
11391139

1140-
static PyMethodDef confluent_kafka_methods[] = {
1140+
static PyMethodDef cimpl_methods[] = {
11411141
{"libversion", libversion, METH_NOARGS,
11421142
" Retrieve librdkafka version string and integer\n"
11431143
"\n"
@@ -1230,17 +1230,17 @@ static char *KafkaError_add_errs (PyObject *dict, const char *origdoc) {
12301230

12311231

12321232
#ifdef PY3
1233-
static struct PyModuleDef confluent_kafka_moduledef = {
1233+
static struct PyModuleDef cimpl_moduledef = {
12341234
PyModuleDef_HEAD_INIT,
1235-
"confluent_kafka", /* m_name */
1236-
"Confluent's Apache Kafka Python client", /* m_doc */
1235+
"cimpl", /* m_name */
1236+
"Confluent's Apache Kafka Python client (C implementation)", /* m_doc */
12371237
-1, /* m_size */
1238-
confluent_kafka_methods, /* m_methods */
1238+
cimpl_methods, /* m_methods */
12391239
};
12401240
#endif
12411241

12421242

1243-
static PyObject *_init_confluent_kafka (void) {
1243+
static PyObject *_init_cimpl (void) {
12441244
PyObject *m;
12451245

12461246
if (PyType_Ready(&KafkaErrorType) < 0)
@@ -1255,10 +1255,10 @@ static PyObject *_init_confluent_kafka (void) {
12551255
return NULL;
12561256

12571257
#ifdef PY3
1258-
m = PyModule_Create(&confluent_kafka_moduledef);
1258+
m = PyModule_Create(&cimpl_moduledef);
12591259
#else
1260-
m = Py_InitModule3("confluent_kafka", confluent_kafka_methods,
1261-
"Confluent's Apache Kafka Python client");
1260+
m = Py_InitModule3("cimpl", cimpl_methods,
1261+
"Confluent's Apache Kafka Python client (C implementation)");
12621262
#endif
12631263
if (!m)
12641264
return NULL;
@@ -1283,7 +1283,7 @@ static PyObject *_init_confluent_kafka (void) {
12831283
PyModule_AddObject(m, "Consumer", (PyObject *)&ConsumerType);
12841284

12851285
KafkaException = PyErr_NewExceptionWithDoc(
1286-
"confluent_kafka.KafkaException",
1286+
"cimpl.KafkaException",
12871287
"Kafka exception that wraps the :py:class:`KafkaError` "
12881288
"class.\n"
12891289
"\n"
@@ -1299,11 +1299,11 @@ static PyObject *_init_confluent_kafka (void) {
12991299

13001300

13011301
#ifdef PY3
1302-
PyMODINIT_FUNC PyInit_confluent_kafka (void) {
1303-
return _init_confluent_kafka();
1302+
PyMODINIT_FUNC PyInit_cimpl (void) {
1303+
return _init_cimpl();
13041304
}
13051305
#else
1306-
PyMODINIT_FUNC initconfluent_kafka (void) {
1307-
_init_confluent_kafka();
1306+
PyMODINIT_FUNC initcimpl (void) {
1307+
_init_cimpl();
13081308
}
13091309
#endif
File renamed without changes.

docs/index.rst

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,49 @@ Indices and tables
1313
:synopsis: Confluent's Apache Kafka Python client.
1414
:members:
1515

16+
********
17+
Consumer
18+
********
19+
20+
.. autoclass:: confluent_kafka.Consumer
21+
:members:
22+
23+
********
24+
Producer
25+
********
26+
27+
.. autoclass:: confluent_kafka.Producer
28+
:members:
29+
30+
*******
31+
Message
32+
*******
33+
34+
.. autoclass:: confluent_kafka.Message
35+
:members:
36+
37+
**************
38+
TopicPartition
39+
**************
40+
41+
.. autoclass:: confluent_kafka.TopicPartition
42+
:members:
43+
44+
**********
45+
KafkaError
46+
**********
47+
48+
.. autoclass:: confluent_kafka.KafkaError
49+
:members:
50+
51+
**************
52+
KafkaException
53+
**************
54+
55+
.. autoclass:: confluent_kafka.KafkaException
56+
:members:
57+
58+
1659

1760
Configuration
1861
=============
File renamed without changes.

setup.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,16 @@
44
from distutils.core import Extension
55

66

7-
module = Extension('confluent_kafka',
7+
module = Extension('confluent_kafka.cimpl',
88
include_dirs = ['/usr/local/include'],
99
libraries= ['rdkafka'],
10-
sources=['confluent_kafka.c', 'Producer.c', 'Consumer.c'])
10+
sources=['confluent_kafka/cimpl/confluent_kafka.c', 'confluent_kafka/cimpl/Producer.c', 'confluent_kafka/cimpl/Consumer.c'])
1111

1212
setup (name='confluent-kafka',
1313
version='0.9.1',
1414
description='Confluent\'s Apache Kafka client for Python',
1515
author='Confluent Inc',
1616
author_email='support@confluent.io',
1717
url='https://github.com/confluentinc/confluent-kafka-python',
18-
ext_modules=[module])
19-
18+
ext_modules=[module],
19+
packages=['confluent_kafka', 'confluent_kafka.cimpl', 'confluent_kafka.kafkatest'])

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy