Skip to content

Commit 322f930

Browse files
committed
Reapply "Allow passing headers as both list(tuples) and dict()""
Reapplies commit c55ffc1
1 parent 80c0733 commit 322f930

File tree

4 files changed

+145
-49
lines changed

4 files changed

+145
-49
lines changed

confluent_kafka/src/confluent_kafka.c

Lines changed: 111 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -939,49 +939,125 @@ rd_kafka_topic_partition_list_t *py_to_c_parts (PyObject *plist) {
939939
}
940940

941941
#ifdef RD_KAFKA_V_HEADERS
942+
943+
942944
/**
943-
* @brief Convert Python list[(header_key, header_value),...]) to C rd_kafka_topic_partition_list_t.
944-
*
945-
* @returns The new Python list[(header_key, header_value),...] object.
945+
* @brief Convert Python list of tuples to rd_kafka_headers_t
946946
*/
947-
rd_kafka_headers_t *py_headers_to_c (PyObject *headers_plist) {
948-
int i, len;
949-
rd_kafka_headers_t *rd_headers = NULL;
950-
rd_kafka_resp_err_t err;
951-
const char *header_key, *header_value = NULL;
952-
int header_key_len = 0, header_value_len = 0;
953-
954-
if (!PyList_Check(headers_plist)) {
955-
PyErr_SetString(PyExc_TypeError,
956-
"Headers are expected to be a "
957-
"list of (key,value) tuples");
958-
return NULL;
959-
}
947+
static rd_kafka_headers_t *py_headers_list_to_c (PyObject *hdrs) {
948+
int i, len;
949+
rd_kafka_headers_t *rd_headers = NULL;
950+
951+
len = (int)PyList_Size(hdrs);
952+
rd_headers = rd_kafka_headers_new(len);
953+
954+
for (i = 0; i < len; i++) {
955+
rd_kafka_resp_err_t err;
956+
const char *header_key, *header_value = NULL;
957+
int header_key_len = 0, header_value_len = 0;
958+
959+
if(!PyArg_ParseTuple(PyList_GET_ITEM(hdrs, i), "s#z#",
960+
&header_key, &header_key_len,
961+
&header_value, &header_value_len)){
962+
rd_kafka_headers_destroy(rd_headers);
963+
PyErr_SetString(PyExc_TypeError,
964+
"Headers are expected to be a "
965+
"tuple of (key, value)");
966+
return NULL;
967+
}
968+
969+
err = rd_kafka_header_add(rd_headers,
970+
header_key, header_key_len,
971+
header_value, header_value_len);
972+
if (err) {
973+
cfl_PyErr_Format(err,
974+
"Unable to add message header \"%s\": "
975+
"%s",
976+
header_key, rd_kafka_err2str(err));
977+
rd_kafka_headers_destroy(rd_headers);
978+
return NULL;
979+
}
980+
}
981+
return rd_headers;
982+
}
983+
984+
985+
/**
986+
* @brief Convert Python dict to rd_kafka_headers_t
987+
*/
988+
static rd_kafka_headers_t *py_headers_dict_to_c (PyObject *hdrs) {
989+
int len;
990+
Py_ssize_t pos = 0;
991+
rd_kafka_headers_t *rd_headers = NULL;
992+
PyObject *ko, *vo;
993+
994+
len = (int)PyDict_Size(hdrs);
995+
rd_headers = rd_kafka_headers_new(len);
996+
997+
while (PyDict_Next(hdrs, &pos, &ko, &vo)) {
998+
PyObject *ks, *ks8;
999+
const char *k;
1000+
const void *v = NULL;
1001+
Py_ssize_t vsize = 0;
1002+
rd_kafka_resp_err_t err;
1003+
1004+
if (!(ks = cfl_PyObject_Unistr(ko))) {
1005+
PyErr_SetString(PyExc_TypeError,
1006+
"expected header key to be unicode "
1007+
"string");
1008+
rd_kafka_headers_destroy(rd_headers);
1009+
return NULL;
1010+
}
9601011

961-
len = PyList_Size(headers_plist);
962-
rd_headers = rd_kafka_headers_new(len);
1012+
k = cfl_PyUnistr_AsUTF8(ks, &ks8);
9631013

964-
for (i = 0; i < len; i++) {
965-
if(!PyArg_ParseTuple(PyList_GET_ITEM(headers_plist, i), "s#z#", &header_key,
966-
&header_key_len, &header_value, &header_value_len)){
967-
rd_kafka_headers_destroy(rd_headers);
968-
PyErr_SetString(PyExc_TypeError,
969-
"Headers are expected to be a list of (key,value) tuples");
970-
return NULL;
1014+
if (vo != Py_None) {
1015+
if (PyString_AsStringAndSize(vo, (char **)&v,
1016+
&vsize) == -1) {
1017+
Py_DECREF(ks);
1018+
rd_kafka_headers_destroy(rd_headers);
1019+
return NULL;
1020+
}
1021+
}
1022+
1023+
if ((err = rd_kafka_header_add(rd_headers, k, -1, v, vsize))) {
1024+
cfl_PyErr_Format(err,
1025+
"Unable to add message header \"%s\": "
1026+
"%s",
1027+
k, rd_kafka_err2str(err));
1028+
Py_DECREF(ks);
1029+
rd_kafka_headers_destroy(rd_headers);
1030+
return NULL;
1031+
}
1032+
1033+
Py_DECREF(ks);
9711034
}
9721035

973-
err = rd_kafka_header_add(rd_headers, header_key, header_key_len, header_value, header_value_len);
974-
if (err) {
975-
rd_kafka_headers_destroy(rd_headers);
976-
cfl_PyErr_Format(err,
977-
"Unable to create message headers: %s",
978-
rd_kafka_err2str(err));
979-
return NULL;
1036+
return rd_headers;
1037+
}
1038+
1039+
1040+
/**
1041+
* @brief Convert Python list[(header_key, header_value),...]) to C rd_kafka_topic_partition_list_t.
1042+
*
1043+
* @returns The new Python list[(header_key, header_value),...] object.
1044+
*/
1045+
rd_kafka_headers_t *py_headers_to_c (PyObject *hdrs) {
1046+
1047+
if (PyList_Check(hdrs)) {
1048+
return py_headers_list_to_c(hdrs);
1049+
} else if (PyDict_Check(hdrs)) {
1050+
return py_headers_dict_to_c(hdrs);
1051+
} else {
1052+
PyErr_Format(PyExc_TypeError,
1053+
"expected headers to be "
1054+
"dict or list of (key, value) tuples, not %s",
1055+
((PyTypeObject *)PyObject_Type(hdrs))->tp_name);
1056+
return NULL;
9801057
}
981-
}
982-
return rd_headers;
9831058
}
9841059

1060+
9851061
/**
9861062
* @brief Convert rd_kafka_headers_t to Python list[(header_key, header_value),...])
9871063
*
@@ -995,7 +1071,7 @@ PyObject *c_headers_to_py (rd_kafka_headers_t *headers) {
9951071
size_t header_value_size;
9961072
PyObject *header_list;
9971073

998-
header_size = rd_kafka_header_cnt(headers);
1074+
header_size = rd_kafka_header_cnt(headers);
9991075
header_list = PyList_New(header_size);
10001076

10011077
while (!rd_kafka_header_get_all(headers, idx++,

confluent_kafka/src/confluent_kafka.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -270,7 +270,7 @@ PyObject *c_parts_to_py (const rd_kafka_topic_partition_list_t *c_parts);
270270
rd_kafka_topic_partition_list_t *py_to_c_parts (PyObject *plist);
271271

272272
#ifdef RD_KAFKA_V_HEADERS
273-
rd_kafka_headers_t *py_headers_to_c (PyObject *headers_plist);
273+
rd_kafka_headers_t *py_headers_to_c (PyObject *hdrs);
274274
PyObject *c_headers_to_py (rd_kafka_headers_t *headers);
275275
#endif
276276
/****************************************************************************

examples/integration_test.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import sys
2727
import json
2828
import gc
29+
import struct
2930
from copy import copy
3031

3132
try:
@@ -117,7 +118,8 @@ def verify_producer():
117118
p = confluent_kafka.Producer(**conf)
118119
print('producer at %s' % p)
119120

120-
headers = [('foo1', 'bar'), ('foo1', 'bar2'), ('foo2', b'1')]
121+
headers = [('foo1', 'bar'), ('foo1', 'bar2'), ('foo2', b'1'),
122+
('foobin', struct.pack('hhl', 10, 20, 30))]
121123

122124
# Produce some messages
123125
p.produce(topic, 'Hello Python!', headers=headers)
@@ -464,6 +466,8 @@ def print_wmark(consumer, parts):
464466

465467
first_msg = None
466468

469+
example_header = None
470+
467471
while True:
468472
# Consume until EOF or error
469473

@@ -517,12 +521,15 @@ def print_wmark(consumer, parts):
517521
print('Sync committed offset: %s' % offsets)
518522

519523
msgcnt += 1
520-
if msgcnt >= max_msgcnt:
524+
if msgcnt >= max_msgcnt and example_header is not None:
521525
print('max_msgcnt %d reached' % msgcnt)
522526
break
523527

524528
assert example_header, "We should have received at least one header"
525-
assert example_header == [(u'foo1', 'bar'), (u'foo1', 'bar2'), (u'foo2', '1')]
529+
assert example_header == [(u'foo1', 'bar'),
530+
(u'foo1', 'bar2'),
531+
(u'foo2', '1'),
532+
('foobin', struct.pack('hhl', 10, 20, 30))]
526533

527534
# Get current assignment
528535
assignment = c.assignment()

tests/test_Producer.py

Lines changed: 23 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import pytest
33

44
from confluent_kafka import Producer, KafkaError, KafkaException, libversion
5+
from struct import pack
56

67

78
def error_cb(err):
@@ -65,19 +66,31 @@ def test_produce_headers():
6566
'error_cb': error_cb,
6667
'default.topic.config': {'message.timeout.ms': 10}})
6768

68-
p.produce('mytopic', value='somedata', key='a key', headers=[('headerkey', 'headervalue')])
69-
p.produce('mytopic', value='somedata', key='a key', headers=[('dupkey', 'dupvalue'), ('dupkey', 'dupvalue')])
70-
p.produce('mytopic', value='somedata', key='a key', headers=[('dupkey', 'dupvalue'), ('dupkey', 'diffvalue')])
71-
p.produce('mytopic', value='somedata', key='a key', headers=[('key_with_null_value', None)])
72-
p.produce('mytopic', value='somedata', key='a key', headers=[])
69+
binval = pack('hhl', 1, 2, 3)
7370

74-
with pytest.raises(TypeError) as ex:
75-
p.produce('mytopic', value='somedata', key='a key', headers={'my': 'dict'})
76-
assert 'Headers are expected to be a list of (key,value) tuples' == str(ex.value)
71+
headers_to_test = [
72+
[('headerkey', 'headervalue')],
73+
[('dupkey', 'dupvalue'), ('empty', ''), ('dupkey', 'dupvalue')],
74+
[('dupkey', 'dupvalue'), ('dupkey', 'diffvalue')],
75+
[('key_with_null_value', None)],
76+
[('binaryval', binval)],
7777

78-
with pytest.raises(TypeError) as ex:
78+
{'headerkey': 'headervalue'},
79+
{'dupkey': 'dupvalue', 'empty': '', 'dupkey': 'dupvalue'}, # noqa: F601
80+
{'dupkey': 'dupvalue', 'dupkey': 'diffvalue'}, # noqa: F601
81+
{'key_with_null_value': None},
82+
{'binaryval': binval}
83+
]
84+
85+
for headers in headers_to_test:
86+
p.produce('mytopic', value='somedata', key='a key', headers=headers)
87+
p.produce('mytopic', value='somedata', headers=headers)
88+
89+
with pytest.raises(TypeError):
90+
p.produce('mytopic', value='somedata', key='a key', headers=('a', 'b'))
91+
92+
with pytest.raises(TypeError):
7993
p.produce('mytopic', value='somedata', key='a key', headers=[('malformed_header')])
80-
assert 'Headers are expected to be a list of (key,value) tuples' == str(ex.value)
8194

8295
p.flush()
8396

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy