Skip to content

Commit cdc5f3b

Browse files
authored
Merge pull request confluentinc#1400 from confluentinc/v1.9.2rc
2 parents c2c8b6b + 1021f55 commit cdc5f3b

File tree

15 files changed

+317
-135
lines changed

15 files changed

+317
-135
lines changed

.semaphore/semaphore.yml

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
version: v1.0
2+
name: Test on PR or create and upload wheels on tag.
3+
agent:
4+
machine:
5+
type: s1-prod-mac-m1
6+
global_job_config:
7+
secrets:
8+
- name: vault_sem2_approle
9+
env_vars:
10+
- name: LIBRDKAFKA_VERSION
11+
value: v1.9.2
12+
blocks:
13+
- name: "Wheels: OSX x64"
14+
run:
15+
when: "tag =~ '.*'"
16+
dependencies: []
17+
task:
18+
agent:
19+
machine:
20+
type: s1-prod-macos
21+
env_vars:
22+
- name: OS_NAME
23+
value: osx
24+
- name: ARCH
25+
value: x64
26+
jobs:
27+
- name: Build
28+
commands:
29+
- cd $SEM_WORKSPACE
30+
- export HOME=$SEM_WORKSPACE
31+
- checkout
32+
# needed on the self-hosted agent
33+
- if [ ! -d ./tools ]; then cd $SEM_WORKSPACE/confluent-kafka-python; fi
34+
- PIP_INSTALL_OPTIONS="--user" tools/wheels/build-wheels.sh "${LIBRDKAFKA_VERSION#v}" wheelhouse
35+
- tar -czf wheelhouse-macOS-${ARCH}.tgz wheelhouse
36+
- artifact push workflow wheelhouse-macOS-${ARCH}.tgz
37+
- name: "Wheels: OSX arm64"
38+
run:
39+
when: "tag =~ '.*'"
40+
dependencies: []
41+
task:
42+
env_vars:
43+
- name: OS_NAME
44+
value: osx
45+
- name: CIBW_ARCHS
46+
value: arm64
47+
- name: ARCH
48+
value: arm64
49+
jobs:
50+
- name: Build
51+
commands:
52+
- cd $SEM_WORKSPACE
53+
- export HOME=$SEM_WORKSPACE
54+
- checkout
55+
# needed on the self-hosted agent
56+
- if [ ! -d ./tools ]; then cd $SEM_WORKSPACE/confluent-kafka-python; fi
57+
- PIP_INSTALL_OPTIONS="--user" tools/wheels/build-wheels.sh "${LIBRDKAFKA_VERSION#v}" wheelhouse
58+
- tar -czf wheelhouse-macOS-${ARCH}.tgz wheelhouse
59+
- artifact push workflow wheelhouse-macOS-${ARCH}.tgz
60+
61+
- name: Source package verification with Python 3 (OSX x64) +docs
62+
dependencies: []
63+
task:
64+
agent:
65+
machine:
66+
type: s1-prod-macos
67+
env_vars:
68+
- name: OS_NAME
69+
value: osx
70+
- name: ARCH
71+
value: arm64
72+
jobs:
73+
- name: Build
74+
commands:
75+
- cd $SEM_WORKSPACE
76+
- export HOME=$SEM_WORKSPACE
77+
- checkout
78+
# needed on the self-hosted agent
79+
- if [ ! -d ./tools ]; then cd $SEM_WORKSPACE/confluent-kafka-python; fi
80+
# use a virtualenv
81+
- python3 -m venv _venv && source _venv/bin/activate
82+
- pip install -r docs/requirements.txt
83+
- pip install -U protobuf
84+
# install librdkafka
85+
- lib_dir=dest/runtimes/$OS_NAME-$ARCH/native
86+
- tools/wheels/install-librdkafka.sh "${LIBRDKAFKA_VERSION#v}" dest
87+
- export CFLAGS="$CFLAGS -I${PWD}/dest/build/native/include"
88+
- export LDFLAGS="$LDFLAGS -L${PWD}/${lib_dir}"
89+
- export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$PWD/$lib_dir"
90+
- export DYLD_LIBRARY_PATH="$DYLD_LIBRARY_PATH:$PWD/$lib_dir"
91+
# install confluent-kafka
92+
- python setup.py build && python setup.py install
93+
- make docs

.travis.yml

Lines changed: 3 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,17 @@
11
env:
22
global:
3-
- LIBRDKAFKA_VERSION=v1.9.0
3+
- LIBRDKAFKA_VERSION=v1.9.2
44

55
jobs:
66
include:
7-
- name: "Source package verification with Python 2.7 (Linux)"
8-
if: false
7+
- name: "Source package verification with Python 3.8 (Linux)"
98
os: linux
109
language: python
1110
dist: xenial
12-
python: "2.7"
13-
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
14-
services: docker
15-
16-
- name: "Source package verification with Python 3.6 (Linux)"
17-
os: linux
18-
language: python
19-
dist: xenial
20-
python: "3.6"
11+
python: "3.8"
2112
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
2213
services: docker
2314

24-
- name: "Source package verification with Python 2.7 (OSX)"
25-
if: false
26-
os: osx
27-
python: "2.7"
28-
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" INTERPRETER_VERSION="2.7.17"
29-
30-
- name: "Source package verification with Python 3.6 (OSX) +docs"
31-
os: osx
32-
python: "3.6"
33-
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" MK_DOCS="y" INTERPRETER_VERSION="3.6.5"
34-
3515
- name: "Wheels: Windows x64"
3616
if: tag is present
3717
os: windows
@@ -77,13 +57,6 @@ jobs:
7757
env: BUILD_WHEELS=1
7858
script: tools/wheels/build-wheels.sh ${LIBRDKAFKA_VERSION#v} wheelhouse
7959

80-
81-
# Install test dependencies unconditionally
82-
# Travis OSX envs requires some setup; see tools/prepare-osx.sh
83-
# Install cibuildwheel if this is a tagged PR
84-
before_install:
85-
- if [[ $TRAVIS_OS_NAME == "osx" && $BUILD_WHEELS != 1 ]]; then tools/prepare-osx.sh ${INTERPRETER_VERSION} /tmp/venv && source /tmp/venv/bin/activate; fi
86-
8760
install:
8861
# Install interceptors
8962
- tools/install-interceptors.sh

CHANGELOG.md

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,33 @@
11
# Confluent's Python client for Apache Kafka
22

3+
## v1.9.2
4+
5+
v1.9.2 is a maintenance release with the following fixes and enhancements:
6+
7+
- Support for setting principal and SASL extensions in oauth_cb
8+
and handle failures (@Manicben, #1402)
9+
- Wheel for macOS M1/arm64
10+
- KIP-140 Admin API ACL fix:
11+
When requesting multiple create_acls or delete_acls operations,
12+
if the provided ACL bindings or ACL binding filters are not
13+
unique, an exception will be thrown immediately rather than later
14+
when the responses are read. (#1370).
15+
- KIP-140 Admin API ACL fix:
16+
Better documentation of the describe and delete ACLs behavior
17+
when using the MATCH resource patter type in a filter. (#1373).
18+
- Avro serialization examples:
19+
added a parameter for using a generic or specific Avro schema. (#1381).
20+
21+
confluent-kafka-python is based on librdkafka v1.9.2, see the
22+
[librdkafka release notes](https://github.com/edenhill/librdkafka/releases/tag/v1.9.2)
23+
for a complete list of changes, enhancements, fixes and upgrade considerations.
24+
25+
26+
## v1.9.1
27+
28+
There was no 1.9.1 release of the Python Client.
29+
30+
331
## v1.9.0
432

533
This is a feature release:

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
# built documents.
3535
#
3636
# The short X.Y version.
37-
version = '1.9.0'
37+
version = '1.9.2'
3838
# The full version, including alpha/beta/rc tags.
3939
release = version
4040
######################################################################

examples/docker/Dockerfile.alpine

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ FROM alpine:3.12
3030

3131
COPY . /usr/src/confluent-kafka-python
3232

33-
ENV LIBRDKAFKA_VERSION v1.9.0
33+
ENV LIBRDKAFKA_VERSION v1.9.2
3434
ENV KAFKACAT_VERSION master
3535

3636

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def get_install_requirements(path):
7575
setup(name='confluent-kafka',
7676
# Make sure to bump CFL_VERSION* in confluent_kafka/src/confluent_kafka.h
7777
# and version in docs/conf.py.
78-
version='1.9.0',
78+
version='1.9.2',
7979
description='Confluent\'s Python client for Apache Kafka',
8080
author='Confluent Inc',
8181
author_email='support@confluent.io',

src/confluent_kafka/src/confluent_kafka.c

Lines changed: 98 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1522,13 +1522,73 @@ static void log_cb (const rd_kafka_t *rk, int level,
15221522
CallState_resume(cs);
15231523
}
15241524

1525+
/**
1526+
* @brief Translate Python \p key and \p value to C types and set on
1527+
* provided \p extensions char* array at the provided index.
1528+
*
1529+
* @returns 1 on success or 0 if an exception was raised.
1530+
*/
1531+
static int py_extensions_to_c (char **extensions, Py_ssize_t idx,
1532+
PyObject *key, PyObject *value) {
1533+
PyObject *ks, *ks8, *vo8 = NULL;
1534+
const char *k;
1535+
const char *v;
1536+
Py_ssize_t ksize = 0;
1537+
Py_ssize_t vsize = 0;
1538+
1539+
if (!(ks = cfl_PyObject_Unistr(key))) {
1540+
PyErr_SetString(PyExc_TypeError,
1541+
"expected extension key to be unicode "
1542+
"string");
1543+
return 0;
1544+
}
1545+
1546+
k = cfl_PyUnistr_AsUTF8(ks, &ks8);
1547+
ksize = (Py_ssize_t)strlen(k);
1548+
1549+
if (cfl_PyUnistr(_Check(value))) {
1550+
/* Unicode string, translate to utf-8. */
1551+
v = cfl_PyUnistr_AsUTF8(value, &vo8);
1552+
if (!v) {
1553+
Py_DECREF(ks);
1554+
Py_XDECREF(ks8);
1555+
return 0;
1556+
}
1557+
vsize = (Py_ssize_t)strlen(v);
1558+
} else {
1559+
PyErr_Format(PyExc_TypeError,
1560+
"expected extension value to be "
1561+
"unicode string, not %s",
1562+
((PyTypeObject *)PyObject_Type(value))->
1563+
tp_name);
1564+
Py_DECREF(ks);
1565+
Py_XDECREF(ks8);
1566+
return 0;
1567+
}
1568+
1569+
extensions[idx] = (char*)malloc(ksize);
1570+
strcpy(extensions[idx], k);
1571+
extensions[idx + 1] = (char*)malloc(vsize);
1572+
strcpy(extensions[idx + 1], v);
1573+
1574+
Py_DECREF(ks);
1575+
Py_XDECREF(ks8);
1576+
Py_XDECREF(vo8);
1577+
1578+
return 1;
1579+
}
1580+
15251581
static void oauth_cb (rd_kafka_t *rk, const char *oauthbearer_config,
15261582
void *opaque) {
15271583
Handle *h = opaque;
15281584
PyObject *eo, *result;
15291585
CallState *cs;
15301586
const char *token;
15311587
double expiry;
1588+
const char *principal = "";
1589+
PyObject *extensions = NULL;
1590+
char **rd_extensions = NULL;
1591+
Py_ssize_t rd_extensions_size = 0;
15321592
char err_msg[2048];
15331593
rd_kafka_resp_err_t err_code;
15341594

@@ -1539,26 +1599,57 @@ static void oauth_cb (rd_kafka_t *rk, const char *oauthbearer_config,
15391599
Py_DECREF(eo);
15401600

15411601
if (!result) {
1542-
goto err;
1602+
goto fail;
15431603
}
1544-
if (!PyArg_ParseTuple(result, "sd", &token, &expiry)) {
1604+
if (!PyArg_ParseTuple(result, "sd|sO!", &token, &expiry, &principal, &PyDict_Type, &extensions)) {
15451605
Py_DECREF(result);
1546-
PyErr_Format(PyExc_TypeError,
1606+
PyErr_SetString(PyExc_TypeError,
15471607
"expect returned value from oauth_cb "
1548-
"to be (token_str, expiry_time) tuple");
1608+
"to be (token_str, expiry_time[, principal, extensions]) tuple");
15491609
goto err;
15501610
}
1611+
1612+
if (extensions) {
1613+
int len = (int)PyDict_Size(extensions);
1614+
rd_extensions = (char **)malloc(2 * len * sizeof(char *));
1615+
Py_ssize_t pos = 0;
1616+
PyObject *ko, *vo;
1617+
while (PyDict_Next(extensions, &pos, &ko, &vo)) {
1618+
if (!py_extensions_to_c(rd_extensions, rd_extensions_size, ko, vo)) {
1619+
Py_DECREF(result);
1620+
free(rd_extensions);
1621+
goto err;
1622+
}
1623+
rd_extensions_size = rd_extensions_size + 2;
1624+
}
1625+
}
1626+
15511627
err_code = rd_kafka_oauthbearer_set_token(h->rk, token,
15521628
(int64_t)(expiry * 1000),
1553-
"", NULL, 0, err_msg,
1629+
principal, (const char **)rd_extensions, rd_extensions_size, err_msg,
15541630
sizeof(err_msg));
15551631
Py_DECREF(result);
1556-
if (err_code) {
1632+
if (rd_extensions) {
1633+
for(int i = 0; i < rd_extensions_size; i++) {
1634+
free(rd_extensions[i]);
1635+
}
1636+
free(rd_extensions);
1637+
}
1638+
1639+
if (err_code != RD_KAFKA_RESP_ERR_NO_ERROR) {
15571640
PyErr_Format(PyExc_ValueError, "%s", err_msg);
1558-
goto err;
1641+
goto fail;
15591642
}
15601643
goto done;
15611644

1645+
fail:
1646+
err_code = rd_kafka_oauthbearer_set_token_failure(h->rk, "OAuth callback raised exception");
1647+
if (err_code != RD_KAFKA_RESP_ERR_NO_ERROR) {
1648+
PyErr_SetString(PyExc_ValueError, "Failed to set token failure");
1649+
goto err;
1650+
}
1651+
PyErr_Clear();
1652+
goto done;
15621653
err:
15631654
CallState_crash(cs);
15641655
rd_kafka_yield(h->rk);

src/confluent_kafka/src/confluent_kafka.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@
4242
* 0xMMmmRRPP
4343
* MM=major, mm=minor, RR=revision, PP=patchlevel (not used)
4444
*/
45-
#define CFL_VERSION 0x01090000
46-
#define CFL_VERSION_STR "1.9.0"
45+
#define CFL_VERSION 0x01090200
46+
#define CFL_VERSION_STR "1.9.2"
4747

4848
/**
4949
* Minimum required librdkafka version. This is checked both during

tests/integration/integration_test.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1126,10 +1126,10 @@ def verify_avro_explicit_read_schema():
11261126
msgcount += 1
11271127
# Avro schema projection should return the two fields not present in the writer schema
11281128
try:
1129-
assert(msg.key().get('favorite_number') == 42)
1130-
assert(msg.key().get('favorite_color') == "purple")
1131-
assert(msg.value().get('favorite_number') == 42)
1132-
assert(msg.value().get('favorite_color') == "purple")
1129+
assert (msg.key().get('favorite_number') == 42)
1130+
assert (msg.key().get('favorite_color') == "purple")
1131+
assert (msg.value().get('favorite_number') == 42)
1132+
assert (msg.value().get('favorite_color') == "purple")
11331133
print("success: schema projection worked for explicit reader schema")
11341134
except KeyError:
11351135
raise confluent_kafka.avro.SerializerError("Schema projection failed when setting reader schema.")

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy