Skip to content

Commit 6c2b44c

Browse files
committed
Added Consumer.store_offsets() API
1 parent 5205231 commit 6c2b44c

File tree

2 files changed

+115
-0
lines changed

2 files changed

+115
-0
lines changed

confluent_kafka/src/Consumer.c

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -325,6 +325,83 @@ static PyObject *Consumer_commit (Handle *self, PyObject *args,
325325

326326

327327

328+
static PyObject *Consumer_store_offsets (Handle *self, PyObject *args,
329+
PyObject *kwargs) {
330+
331+
rd_kafka_resp_err_t err;
332+
PyObject *msg = NULL, *offsets = NULL;
333+
rd_kafka_topic_partition_list_t *c_offsets;
334+
static char *kws[] = { "message", "offsets", NULL };
335+
336+
#if RD_KAFKA_VERSION < 0x000b0000
337+
PyErr_Format(PyExc_NotImplementedError,
338+
"Consumer store_offsets require "
339+
"confluent-kafka-python built for librdkafka "
340+
"version >=v0.11.0 (librdkafka runtime 0x%x, "
341+
"buildtime 0x%x)",
342+
rd_kafka_version(), RD_KAFKA_VERSION);
343+
return NULL;
344+
#endif
345+
346+
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|OO", kws,
347+
&msg, &offsets))
348+
return NULL;
349+
350+
if (msg && offsets) {
351+
PyErr_SetString(PyExc_ValueError,
352+
"message and offsets are mutually exclusive");
353+
return NULL;
354+
}
355+
356+
if (!msg && !offsets) {
357+
PyErr_SetString(PyExc_ValueError,
358+
"expected either message or offsets");
359+
return NULL;
360+
}
361+
362+
if (offsets) {
363+
364+
if (!(c_offsets = py_to_c_parts(offsets)))
365+
return NULL;
366+
} else {
367+
Message *m;
368+
PyObject *uo8;
369+
370+
if (PyObject_Type((PyObject *)msg) !=
371+
(PyObject *)&MessageType) {
372+
PyErr_Format(PyExc_TypeError,
373+
"expected %s", MessageType.tp_name);
374+
return NULL;
375+
}
376+
377+
m = (Message *)msg;
378+
379+
c_offsets = rd_kafka_topic_partition_list_new(1);
380+
rd_kafka_topic_partition_list_add(
381+
c_offsets, cfl_PyUnistr_AsUTF8(m->topic, &uo8),
382+
m->partition)->offset =m->offset + 1;
383+
Py_XDECREF(uo8);
384+
}
385+
386+
387+
err = rd_kafka_offsets_store(self->rk, c_offsets);
388+
389+
if (c_offsets)
390+
rd_kafka_topic_partition_list_destroy(c_offsets);
391+
392+
393+
394+
if (err) {
395+
cfl_PyErr_Format(err,
396+
"StoreOffsets failed: %s", rd_kafka_err2str(err));
397+
return NULL;
398+
}
399+
400+
Py_RETURN_NONE;
401+
}
402+
403+
404+
328405
static PyObject *Consumer_committed (Handle *self, PyObject *args,
329406
PyObject *kwargs) {
330407

@@ -570,6 +647,22 @@ static PyMethodDef Consumer_methods[] = {
570647
" :raises: KafkaException\n"
571648
"\n"
572649
},
650+
{ "store_offsets", (PyCFunction)Consumer_store_offsets, METH_VARARGS|METH_KEYWORDS,
651+
".. py:function:: store_offsets([message=None], [offsets=None])\n"
652+
"\n"
653+
" Store offsets for a message or a list of offsets.\n"
654+
"\n"
655+
" ``message`` and ``offsets`` are mutually exclusive. "
656+
"The stored offsets will be committed according to 'auto.commit.interval.ms' or manual "
657+
"offset-less :py:meth:`commit`. "
658+
"Note that 'enable.auto.offset.store' must be set to False when using this API.\n"
659+
"\n"
660+
" :param confluent_kafka.Message message: Store message's offset+1.\n"
661+
" :param list(TopicPartition) offsets: List of topic+partitions+offsets to store.\n"
662+
" :rtype: None\n"
663+
" :raises: KafkaException\n"
664+
"\n"
665+
},
573666
{ "commit", (PyCFunction)Consumer_commit, METH_VARARGS|METH_KEYWORDS,
574667
".. py:function:: commit([message=None], [offsets=None], [async=True])\n"
575668
"\n"

tests/test_Consumer.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,28 @@ def dummy_assign_revoke(consumer, partitions):
8181
kc.close()
8282

8383

84+
@pytest.mark.skipif(libversion()[1] < 0x000b0000,
85+
reason="requires librdkafka >=0.11.0")
86+
def test_store_offsets():
87+
""" Basic store_offsets() tests """
88+
89+
c = Consumer({'group.id': 'test',
90+
'enable.auto.commit': True,
91+
'enable.auto.offset.store': False,
92+
'socket.timeout.ms': 50,
93+
'session.timeout.ms': 100})
94+
95+
c.subscribe(["test"])
96+
97+
try:
98+
c.store_offsets(offsets=[TopicPartition("test", 0, 42)])
99+
except KafkaException as e:
100+
assert e.args[0].code() == KafkaError._UNKNOWN_PARTITION
101+
102+
c.unsubscribe()
103+
c.close()
104+
105+
84106
# librdkafka <=0.9.2 has a race-issue where it will hang indefinately
85107
# if a commit is issued when no coordinator is available.
86108
@pytest.mark.skipif(libversion()[1] <= 0x000902ff,

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy