Skip to content

Commit 32ad9f6

Browse files
authored
Merge pull request confluentinc#1020 from confluentinc/tst152
Fix packaging, tests, etc, bump to 1.6.0
2 parents 54890e2 + 2e50f40 commit 32ad9f6

23 files changed

+274
-112
lines changed

.appveyor.yml

Lines changed: 0 additions & 47 deletions
This file was deleted.

.travis.yml

Lines changed: 68 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,73 +1,106 @@
11
env:
22
global:
3-
- LIBRDKAFKA_VERSION=v1.5.2
3+
- LIBRDKAFKA_VERSION=v1.6.0
4+
45
jobs:
56
include:
67
- name: "Source package verification with Python 2.7 (Linux)"
78
os: linux
89
language: python
9-
dist: trusty
10+
dist: xenial
1011
python: "2.7"
1112
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
13+
services: docker
14+
1215
- name: "Source package verification with Python 3.6 (Linux)"
1316
os: linux
1417
language: python
15-
dist: trusty
18+
dist: xenial
1619
python: "3.6"
1720
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
21+
services: docker
22+
1823
- name: "Source package verification with Python 2.7 (OSX)"
1924
os: osx
2025
python: "2.7"
2126
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" INTERPRETER_VERSION="2.7.17"
27+
2228
- name: "Source package verification with Python 3.6 (OSX) +docs"
2329
os: osx
2430
python: "3.6"
2531
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" MK_DOCS="y" INTERPRETER_VERSION="3.6.5"
26-
- name: "cibuildwheel (OSX)"
27-
os: osx
28-
env: CIBW_BEFORE_BUILD="tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp" CFLAGS="-Itmp/include" LDFLAGS="-Ltmp/lib" INTERPRETER_VERSION="2.7.17"
29-
- name: "cibuildwheel (manylinux)"
30-
os: linux
31-
dist: trusty
32-
env:
33-
- CIBW_BEFORE_BUILD="tools/prepare-cibuildwheel-linux.sh ${LIBRDKAFKA_VERSION}"
34-
- PYTHON_CONFIGURE_OPTS="--enable-unicode=ucs4 --with-wide-unicode"
35-
- CIBW_MANYLINUX_X86_64_IMAGE="manylinux1"
36-
- CIBW_MANYLINUX_I686_IMAGE="manylinux1"
32+
33+
- name: "Wheels: Windows x64"
34+
if: tag is present
35+
os: windows
36+
language: shell
37+
env: BUILD_WHEELS=1
38+
before_install:
39+
- choco install python --version 3.8.0
40+
- export PATH="/c/Python38:/c/Python38/Scripts:$PATH"
41+
# make sure it's on PATH as 'python3'
42+
- ln -s /c/Python38/python.exe /c/Python38/python3.exe
43+
install:
44+
- bash tools/wheels/install-librdkafka.sh ${LIBRDKAFKA_VERSION#v} dest
45+
script:
46+
- tools/wheels/build-wheels.bat x64 win_amd64 dest wheelhouse
47+
48+
- name: "Wheels: Windows x86"
49+
if: tag is present
50+
os: windows
51+
language: shell
52+
env: BUILD_WHEELS=1
53+
before_install:
54+
- choco install python --version 3.8.0
55+
- export PATH="/c/Python38:/c/Python38/Scripts:$PATH"
56+
# make sure it's on PATH as 'python3'
57+
- ln -s /c/Python38/python.exe /c/Python38/python3.exe
58+
install:
59+
- bash tools/wheels/install-librdkafka.sh ${LIBRDKAFKA_VERSION#v} dest
60+
script:
61+
- tools/wheels/build-wheels.bat x86 win32 dest wheelhouse
62+
63+
- name: "Wheels: Linux x64"
64+
if: tag is present
3765
language: python
38-
python: "2.7"
66+
python: "3.8"
3967
services: docker
68+
env: BUILD_WHEELS=1
69+
script: tools/wheels/build-wheels.sh ${LIBRDKAFKA_VERSION#v} wheelhouse
70+
71+
- name: "Wheels: MacOSX x64"
72+
if: tag is present
73+
os: osx
74+
language: shell
75+
env: BUILD_WHEELS=1
76+
script: tools/wheels/build-wheels.sh ${LIBRDKAFKA_VERSION#v} wheelhouse
4077

41-
# See https://cibuildwheel.readthedocs.io/en/latest/options/ for CIBW* vars
4278

4379
# Install test dependencies unconditionally
4480
# Travis OSX envs requires some setup; see tools/prepare-osx.sh
4581
# Install cibuildwheel if this is a tagged PR
4682
before_install:
47-
- if [[ $TRAVIS_OS_NAME == "osx" ]]; then tools/prepare-osx.sh ${INTERPRETER_VERSION} /tmp/venv && source /tmp/venv/bin/activate; fi
48-
- if [[ -n $TRAVIS_TAG && -n $CIBW_BEFORE_BUILD ]]; then pip install -U -r tools/requirements-manylinux.txt --force-reinstall ; fi
83+
- if [[ $TRAVIS_OS_NAME == "osx" && $BUILD_WHEELS != 1 ]]; then tools/prepare-osx.sh ${INTERPRETER_VERSION} /tmp/venv && source /tmp/venv/bin/activate; fi
4984

50-
# Install interceptors
51-
# Install librdkafka if not CIBW_BEFORE_BUILD
52-
# Install confluent_kafka[avro] if not CIBW_BEFORE_BUILD
5385
install:
86+
# Install interceptors
5487
- tools/install-interceptors.sh
55-
- pip install -r tests/requirements.txt
56-
- pip install tox
57-
- flake8
88+
- if [[ $BUILD_WHEELS != 1 ]]; then pip install -r tests/requirements.txt ; fi
5889
- if [[ $MK_DOCS == y ]]; then pip install -r docs/requirements.txt; fi
59-
- if [[ -z $CIBW_BEFORE_BUILD ]]; then tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp-build && pip install --global-option=build_ext --global-option="-Itmp-build/include/" --global-option="-Ltmp-build/lib" . .[avro]; fi
90+
# Install librdkafka and confluent_kafka[avro] if not building wheels
91+
- if [[ $BUILD_WHEELS != 1 ]]; then pip install -U protobuf && tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp-build ; fi
92+
6093

61-
# Build wheels
62-
# Make plugins available for tests
63-
# Execute tests if not CIBW_BEFORE_BUILD [osx, linux]
64-
# Execute integration tests if CIBW_BEFORE_BUILD
65-
# Build docs if MK_DOCS
94+
95+
# Note: Will not be run for wheel builds.
6696
script:
67-
- if [[ -n $TRAVIS_TAG && -n $CIBW_BEFORE_BUILD ]]; then cibuildwheel --output-dir wheelhouse1 && tools/fixup-wheels.sh wheelhouse1 wheelhouse; fi
97+
- flake8
98+
# Build package
99+
- pip install --global-option=build_ext --global-option="-Itmp-build/include/" --global-option="-Ltmp-build/lib" . .[avro] .[schema-registry] .[json] .[protobuf]
68100
- ldd staging/libs/* || otool -L staging/libs/* || true
69-
- [[ -n $CIBW_BEFORE_BUILD ]] || LD_LIBRARY_PATH=$LD_LIBRARY_PATH:staging/libs DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:staging/libs tox
70-
- if [[ -n $TRAVIS_TAG && $TRAVIS_OS_NAME == osx && -n $CIBW_BEFORE_BUILD ]]; then tools/test-wheel.sh wheelhouse; fi
101+
# Run tests
102+
- if [[ $TRAVIS_OS_NAME == "linux" ]]; then LD_LIBRARY_PATH=$LD_LIBRARY_PATH:staging/libs DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:staging/libs python -m pytest --timeout 600 --ignore=tmp-build || travis_terminate 1; fi
103+
# Build docs
71104
- if [[ $MK_DOCS == y ]]; then make docs; fi
72105

73106
deploy:
@@ -82,8 +115,8 @@ deploy:
82115
local-dir: wheelhouse
83116
upload_dir: confluent-kafka-python/p-confluent-kafka-python__bld-travis__plat-${TRAVIS_OS_NAME}__tag-${TRAVIS_TAG}__sha-${TRAVIS_COMMIT}__bid-${TRAVIS_BUILD_ID}__
84117
acl: public_read
85-
skip_cleanup: true
118+
cleanup: false
86119
on:
87120
repo: confluentinc/confluent-kafka-python
88121
tags: true
89-
condition: "-n $CIBW_BEFORE_BUILD"
122+
condition: "$BUILD_WHEELS == 1"

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
# built documents.
5858
#
5959
# The short X.Y version.
60-
version = '1.5.2'
60+
version = '1.6.0'
6161
# The full version, including alpha/beta/rc tags.
6262
release = version
6363

examples/docker/Dockerfile.alpine

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ FROM alpine:3.12
3030

3131
COPY . /usr/src/confluent-kafka-python
3232

33-
ENV LIBRDKAFKA_VERSION v1.5.2
33+
ENV LIBRDKAFKA_VERSION v1.6.0
3434
ENV KAFKACAT_VERSION master
3535

3636

examples/requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,7 @@ fastapi
33
pydantic
44
uvicorn
55
six
6+
pyrsistent==0.16.1;python_version<"3.0"
7+
pyrsistent;python_version>"3.0"
68
jsonschema
79
protobuf

setup.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,9 @@
3131
'avro-python3==1.10.0;python_version>"3.0"'
3232
] + SCHEMA_REGISTRY_REQUIRES
3333

34-
JSON_REQUIRES = ['jsonschema'] + SCHEMA_REGISTRY_REQUIRES
34+
JSON_REQUIRES = ['pyrsistent==0.16.1;python_version<"3.0"',
35+
'pyrsistent;python_version>"3.0"',
36+
'jsonschema'] + SCHEMA_REGISTRY_REQUIRES
3537

3638
PROTO_REQUIRES = ['protobuf'] + SCHEMA_REGISTRY_REQUIRES
3739

@@ -74,7 +76,7 @@ def get_install_requirements(path):
7476
setup(name='confluent-kafka',
7577
# Make sure to bump CFL_VERSION* in confluent_kafka/src/confluent_kafka.h
7678
# and version and release in docs/conf.py.
77-
version='1.5.2',
79+
version='1.6.0',
7880
description='Confluent\'s Python client for Apache Kafka',
7981
author='Confluent Inc',
8082
author_email='support@confluent.io',
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11
fastavro>=0.23.0
2+
pyrsistent==0.16.1;python_version<"3.0"
3+
pyrsistent;python_version>"3.0"
24
jsonschema
35
protobuf
46
requests

src/confluent_kafka/src/confluent_kafka.h

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,28 +42,28 @@
4242
* 0xMMmmRRPP
4343
* MM=major, mm=minor, RR=revision, PP=patchlevel (not used)
4444
*/
45-
#define CFL_VERSION 0x01050200
46-
#define CFL_VERSION_STR "1.5.2"
45+
#define CFL_VERSION 0x01060000
46+
#define CFL_VERSION_STR "1.6.0"
4747

4848
/**
4949
* Minimum required librdkafka version. This is checked both during
5050
* build-time (just below) and runtime (see confluent_kafka.c).
5151
* Make sure to keep the MIN_RD_KAFKA_VERSION, MIN_VER_ERRSTR and #error
5252
* defines and strings in sync.
5353
*/
54-
#define MIN_RD_KAFKA_VERSION 0x01040000
54+
#define MIN_RD_KAFKA_VERSION 0x01060000
5555

5656
#ifdef __APPLE__
57-
#define MIN_VER_ERRSTR "confluent-kafka-python requires librdkafka v1.4.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`"
57+
#define MIN_VER_ERRSTR "confluent-kafka-python requires librdkafka v1.6.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`"
5858
#else
59-
#define MIN_VER_ERRSTR "confluent-kafka-python requires librdkafka v1.4.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html"
59+
#define MIN_VER_ERRSTR "confluent-kafka-python requires librdkafka v1.6.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html"
6060
#endif
6161

6262
#if RD_KAFKA_VERSION < MIN_RD_KAFKA_VERSION
6363
#ifdef __APPLE__
64-
#error "confluent-kafka-python requires librdkafka v1.4.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`"
64+
#error "confluent-kafka-python requires librdkafka v1.6.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`"
6565
#else
66-
#error "confluent-kafka-python requires librdkafka v1.4.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html"
66+
#error "confluent-kafka-python requires librdkafka v1.6.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html"
6767
#endif
6868
#endif
6969

tests/integration/schema_registry/data/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ TARGET_DIR=$(WORK_DIR)/../gen
55

66
PROTOS := common_proto.proto DependencyTestProto.proto exampleProtoCriteo.proto $\
77
metadata_proto.proto NestedTestProto.proto PublicTestProto.proto $\
8-
Sint32Value.proto Sint64Value.proto TestProto.proto
8+
SInt32Value.proto SInt64Value.proto TestProto.proto
99

1010
compile: $(PROTOS)
1111
for proto in $(PROTOS); do \

tests/integration/schema_registry/test_proto_serializers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
#
1717
import pytest
1818

19-
from confluent_kafka import TopicPartition, KafkaException
19+
from confluent_kafka import TopicPartition, KafkaException, KafkaError
2020
from confluent_kafka.error import ConsumeError
2121
from confluent_kafka.schema_registry.protobuf import ProtobufSerializer, ProtobufDeserializer
2222
from .gen import metadata_proto_pb2
@@ -145,6 +145,6 @@ def dr(err, msg):
145145
partition=0)
146146
producer.flush()
147147

148-
with pytest.raises(ConsumeError,
149-
match="Error parsing message"):
148+
with pytest.raises(ConsumeError) as e:
150149
consumer.poll()
150+
assert e.value.code == KafkaError._KEY_DESERIALIZATION

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy