Skip to content

Commit cc7ddbb

Browse files
committed
Supprt for building Windows wheels
1 parent 9cefdd6 commit cc7ddbb

File tree

10 files changed

+210
-49
lines changed

10 files changed

+210
-49
lines changed

.appveyor-disabled.yml

Lines changed: 0 additions & 25 deletions
This file was deleted.

.appveyor.yml

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
environment:
2+
global:
3+
LIBRDKAFKA_NUGET_VERSION: 0.11.6-RC2
4+
CIBW_SKIP: cp33-* cp34-*
5+
CIBW_TEST_REQUIRES: pytest requests avro
6+
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
7+
# /E:ON and /V:ON options are not enabled in the batch script intepreter
8+
# See: http://stackoverflow.com/a/13751649/163740
9+
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\tools\\appveyor\\run_with_env.cmd"
10+
11+
build_script:
12+
- tools/windows-build.bat
13+
14+
artifacts:
15+
- path: "wheelhouse\\*.whl"
16+
name: Wheels
17+
18+
deploy:
19+
- provider: S3
20+
access_key_id:
21+
secure: t+Xo4x1mYVbqzvUDlnuMgFGp8LjQJNOfsDUAMxBsVH4=
22+
secret_access_key:
23+
secure: SNziQPPJs4poCHM7dk6OxufUYcGQhMWiNPx6Y1y6DYuWGjPc3K0APGeousLHsbLv
24+
region: us-west-1
25+
bucket: librdkafka-ci-packages
26+
folder: confluent-kafka-python/p-confluent-kafka-python__bld-appveyor__plat-windows__arch-$(platform)__bldtype-$(configuration)__tag-$(APPVEYOR_REPO_TAG_NAME)__sha-$(APPVEYOR_REPO_COMMIT)__bid-$(APPVEYOR_BUILD_ID)
27+
artifact: /wheelhouse\/.*\.whl/
28+
max_error_retry: 3
29+
on:
30+
APPVEYOR_REPO_TAG: true

confluent_kafka/src/Admin.c

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ static int Admin_set_replica_assignment (const char *forApi, void *c_obj,
193193
return 0;
194194
}
195195

196-
c_replicas = alloca(sizeof(*c_replicas) *
196+
c_replicas = malloc(sizeof(*c_replicas) *
197197
replica_cnt);
198198

199199
for (ri = 0 ; ri < replica_cnt ; ri++) {
@@ -206,6 +206,7 @@ static int Admin_set_replica_assignment (const char *forApi, void *c_obj,
206206
"replica_assignment must be "
207207
"a list of int lists with an "
208208
"outer size of %s", err_count_desc);
209+
free(c_replicas);
209210
return 0;
210211
}
211212

@@ -231,6 +232,8 @@ static int Admin_set_replica_assignment (const char *forApi, void *c_obj,
231232
"Unsupported forApi %s", forApi);
232233
}
233234

235+
free(c_replicas);
236+
234237
if (err) {
235238
PyErr_SetString(
236239
PyExc_ValueError, errstr);
@@ -255,6 +258,7 @@ Admin_config_dict_to_c (void *c_obj, PyObject *dict, const char *op_name) {
255258

256259
while (PyDict_Next(dict, &pos, &ko, &vo)) {
257260
PyObject *ks, *ks8;
261+
PyObject *vs = NULL, *vs8 = NULL;
258262
const char *k;
259263
const char *v;
260264
rd_kafka_resp_err_t err;
@@ -268,8 +272,6 @@ Admin_config_dict_to_c (void *c_obj, PyObject *dict, const char *op_name) {
268272

269273
k = cfl_PyUnistr_AsUTF8(ks, &ks8);
270274

271-
272-
PyObject *vs = NULL, *vs8 = NULL;
273275
if (!(vs = cfl_PyObject_Unistr(vo)) ||
274276
!(v = cfl_PyUnistr_AsUTF8(vs, &vs8))) {
275277
PyErr_Format(PyExc_ValueError,
@@ -367,7 +369,7 @@ static PyObject *Admin_create_topics (Handle *self, PyObject *args,
367369
/*
368370
* Parse the list of NewTopics and convert to corresponding C types.
369371
*/
370-
c_objs = alloca(sizeof(*c_objs) * tcnt);
372+
c_objs = malloc(sizeof(*c_objs) * tcnt);
371373

372374
for (i = 0 ; i < tcnt ; i++) {
373375
NewTopic *newt = (NewTopic *)PyList_GET_ITEM(topics, i);
@@ -443,13 +445,15 @@ static PyObject *Admin_create_topics (Handle *self, PyObject *args,
443445

444446
rd_kafka_NewTopic_destroy_array(c_objs, tcnt);
445447
rd_kafka_AdminOptions_destroy(c_options);
448+
free(c_objs);
446449
rd_kafka_queue_destroy(rkqu); /* drop reference from get_background */
447450

448451
Py_RETURN_NONE;
449452

450453
err:
451454
rd_kafka_NewTopic_destroy_array(c_objs, i);
452455
rd_kafka_AdminOptions_destroy(c_options);
456+
free(c_objs);
453457
Py_DECREF(future); /* from options_to_c() */
454458

455459
return NULL;
@@ -503,7 +507,7 @@ static PyObject *Admin_delete_topics (Handle *self, PyObject *args,
503507
/*
504508
* Parse the list of strings and convert to corresponding C types.
505509
*/
506-
c_objs = alloca(sizeof(*c_objs) * tcnt);
510+
c_objs = malloc(sizeof(*c_objs) * tcnt);
507511

508512
for (i = 0 ; i < tcnt ; i++) {
509513
PyObject *topic = PyList_GET_ITEM(topics, i);
@@ -544,13 +548,15 @@ static PyObject *Admin_delete_topics (Handle *self, PyObject *args,
544548

545549
rd_kafka_DeleteTopic_destroy_array(c_objs, i);
546550
rd_kafka_AdminOptions_destroy(c_options);
551+
free(c_objs);
547552
rd_kafka_queue_destroy(rkqu); /* drop reference from get_background */
548553

549554
Py_RETURN_NONE;
550555

551556
err:
552557
rd_kafka_DeleteTopic_destroy_array(c_objs, i);
553558
rd_kafka_AdminOptions_destroy(c_options);
559+
free(c_objs);
554560
Py_DECREF(future); /* from options_to_c() */
555561

556562
return NULL;
@@ -611,7 +617,7 @@ static PyObject *Admin_create_partitions (Handle *self, PyObject *args,
611617
/*
612618
* Parse the list of NewPartitions and convert to corresponding C types.
613619
*/
614-
c_objs = alloca(sizeof(*c_objs) * tcnt);
620+
c_objs = malloc(sizeof(*c_objs) * tcnt);
615621

616622
for (i = 0 ; i < tcnt ; i++) {
617623
NewPartitions *newp = (NewPartitions *)PyList_GET_ITEM(topics,
@@ -669,13 +675,15 @@ static PyObject *Admin_create_partitions (Handle *self, PyObject *args,
669675

670676
rd_kafka_NewPartitions_destroy_array(c_objs, tcnt);
671677
rd_kafka_AdminOptions_destroy(c_options);
678+
free(c_objs);
672679
rd_kafka_queue_destroy(rkqu); /* drop reference from get_background */
673680

674681
Py_RETURN_NONE;
675682

676683
err:
677684
rd_kafka_NewPartitions_destroy_array(c_objs, i);
678685
rd_kafka_AdminOptions_destroy(c_options);
686+
free(c_objs);
679687
Py_DECREF(future); /* from options_to_c() */
680688

681689
return NULL;
@@ -742,7 +750,7 @@ static PyObject *Admin_describe_configs (Handle *self, PyObject *args,
742750
* Parse the list of ConfigResources and convert to
743751
* corresponding C types.
744752
*/
745-
c_objs = alloca(sizeof(*c_objs) * cnt);
753+
c_objs = malloc(sizeof(*c_objs) * cnt);
746754

747755
for (i = 0 ; i < cnt ; i++) {
748756
PyObject *res = PyList_GET_ITEM(resources, i);
@@ -795,6 +803,7 @@ static PyObject *Admin_describe_configs (Handle *self, PyObject *args,
795803

796804
rd_kafka_ConfigResource_destroy_array(c_objs, cnt);
797805
rd_kafka_AdminOptions_destroy(c_options);
806+
free(c_objs);
798807
rd_kafka_queue_destroy(rkqu); /* drop reference from get_background */
799808

800809
Py_DECREF(ConfigResource_type); /* from lookup() */
@@ -804,6 +813,7 @@ static PyObject *Admin_describe_configs (Handle *self, PyObject *args,
804813
err:
805814
rd_kafka_ConfigResource_destroy_array(c_objs, i);
806815
rd_kafka_AdminOptions_destroy(c_options);
816+
free(c_objs);
807817
Py_DECREF(ConfigResource_type); /* from lookup() */
808818
Py_DECREF(future); /* from options_to_c() */
809819

@@ -881,7 +891,7 @@ static PyObject *Admin_alter_configs (Handle *self, PyObject *args,
881891
* Parse the list of ConfigResources and convert to
882892
* corresponding C types.
883893
*/
884-
c_objs = alloca(sizeof(*c_objs) * cnt);
894+
c_objs = malloc(sizeof(*c_objs) * cnt);
885895

886896
for (i = 0 ; i < cnt ; i++) {
887897
PyObject *res = PyList_GET_ITEM(resources, i);
@@ -950,6 +960,7 @@ static PyObject *Admin_alter_configs (Handle *self, PyObject *args,
950960

951961
rd_kafka_ConfigResource_destroy_array(c_objs, cnt);
952962
rd_kafka_AdminOptions_destroy(c_options);
963+
free(c_objs);
953964
rd_kafka_queue_destroy(rkqu); /* drop reference from get_background */
954965

955966
Py_DECREF(ConfigResource_type); /* from lookup() */
@@ -959,6 +970,7 @@ static PyObject *Admin_alter_configs (Handle *self, PyObject *args,
959970
err:
960971
rd_kafka_ConfigResource_destroy_array(c_objs, i);
961972
rd_kafka_AdminOptions_destroy(c_options);
973+
free(c_objs);
962974
Py_DECREF(ConfigResource_type); /* from lookup() */
963975
Py_DECREF(future); /* from options_to_c() */
964976

confluent_kafka/src/Consumer.c

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -747,7 +747,7 @@ static PyObject *Consumer_seek (Handle *self, PyObject *args, PyObject *kwargs)
747747

748748
if (err) {
749749
cfl_PyErr_Format(err,
750-
"Failed to seek to offset %"PRId64": %s",
750+
"Failed to seek to offset %"CFL_PRId64": %s",
751751
tp->offset, rd_kafka_err2str(err));
752752
return NULL;
753753
}
@@ -917,7 +917,7 @@ static PyObject *Consumer_consume (Handle *self, PyObject *args,
917917
PyObject *msglist;
918918
rd_kafka_queue_t *rkqu = self->u.Consumer.rkqu;
919919
CallState cs;
920-
Py_ssize_t i;
920+
Py_ssize_t i, n;
921921

922922
if (!self->rk) {
923923
PyErr_SetString(PyExc_RuntimeError,
@@ -939,7 +939,7 @@ static PyObject *Consumer_consume (Handle *self, PyObject *args,
939939

940940
rkmessages = malloc(num_messages * sizeof(rd_kafka_message_t *));
941941

942-
Py_ssize_t n = (Py_ssize_t)rd_kafka_consume_batch_queue(rkqu,
942+
n = (Py_ssize_t)rd_kafka_consume_batch_queue(rkqu,
943943
tmout >= 0 ? (int)(tmout * 1000.0f) : -1,
944944
rkmessages,
945945
num_messages);

confluent_kafka/src/confluent_kafka.c

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -756,15 +756,15 @@ static PyObject *TopicPartition_str0 (TopicPartition *self) {
756756
PyObject *ret;
757757
char offset_str[40];
758758

759-
snprintf(offset_str, sizeof(offset_str), "%"PRId64"", self->offset);
759+
snprintf(offset_str, sizeof(offset_str), "%"CFL_PRId64"", self->offset);
760760

761761
if (self->error != Py_None) {
762762
errstr = cfl_PyObject_Unistr(self->error);
763763
c_errstr = cfl_PyUnistr_AsUTF8(errstr, &errstr8);
764764
}
765765

766766
ret = cfl_PyUnistr(
767-
_FromFormat("TopicPartition{topic=%s,partition=%"PRId32
767+
_FromFormat("TopicPartition{topic=%s,partition=%"CFL_PRId32
768768
",offset=%s,error=%s}",
769769
self->topic, self->partition,
770770
offset_str,
@@ -1282,14 +1282,14 @@ static void log_cb (const rd_kafka_t *rk, int level,
12821282
CallState *cs;
12831283
static const int level_map[8] = {
12841284
/* Map syslog levels to python logging levels */
1285-
[0] = 50, /* LOG_EMERG -> logging.CRITICAL */
1286-
[1] = 50, /* LOG_ALERT -> logging.CRITICAL */
1287-
[2] = 50, /* LOG_CRIT -> logging.CRITICAL */
1288-
[3] = 40, /* LOG_ERR -> logging.ERROR */
1289-
[4] = 30, /* LOG_WARNING -> logging.WARNING */
1290-
[5] = 20, /* LOG_NOTICE -> logging.INFO */
1291-
[6] = 20, /* LOG_INFO -> logging.INFO */
1292-
[7] = 10, /* LOG_DEBUG -> logging.DEBUG */
1285+
/* [0] = */ 50, /* LOG_EMERG -> logging.CRITICAL */
1286+
/* [1] = */ 50, /* LOG_ALERT -> logging.CRITICAL */
1287+
/* [2] = */ 50, /* LOG_CRIT -> logging.CRITICAL */
1288+
/* [3] = */ 40, /* LOG_ERR -> logging.ERROR */
1289+
/* [4] = */ 30, /* LOG_WARNING -> logging.WARNING */
1290+
/* [5] = */ 20, /* LOG_NOTICE -> logging.INFO */
1291+
/* [6] = */ 20, /* LOG_INFO -> logging.INFO */
1292+
/* [7] = */ 10, /* LOG_DEBUG -> logging.DEBUG */
12931293
};
12941294

12951295
cs = CallState_get(h);
@@ -1444,7 +1444,7 @@ static int producer_conf_set_special (Handle *self, rd_kafka_conf_t *conf,
14441444
rd_kafka_topic_conf_t *tconf,
14451445
const char *name, PyObject *valobj) {
14461446

1447-
if (!strcasecmp(name, "on_delivery")) {
1447+
if (!strcmp(name, "on_delivery")) {
14481448
if (!PyCallable_Check(valobj)) {
14491449
cfl_PyErr_Format(
14501450
RD_KAFKA_RESP_ERR__INVALID_ARG,
@@ -1486,7 +1486,7 @@ static int consumer_conf_set_special (Handle *self, rd_kafka_conf_t *conf,
14861486
rd_kafka_topic_conf_t *tconf,
14871487
const char *name, PyObject *valobj) {
14881488

1489-
if (!strcasecmp(name, "on_commit")) {
1489+
if (!strcmp(name, "on_commit")) {
14901490
if (!PyCallable_Check(valobj)) {
14911491
cfl_PyErr_Format(
14921492
RD_KAFKA_RESP_ERR__INVALID_ARG,

confluent_kafka/src/confluent_kafka.h

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,21 @@
1818
#include <structmember.h>
1919
#include <pythread.h>
2020

21+
#include <stdint.h>
2122
#include <librdkafka/rdkafka.h>
2223

24+
#ifdef _MSC_VER
25+
/* Windows */
26+
#define CFL_PRId64 "I64d"
27+
#define CFL_PRId32 "I32d"
28+
29+
#else
30+
/* C99 */
31+
#include <inttypes.h>
32+
#define CFL_PRId64 PRId64
33+
#define CFL_PRId32 PRId32
34+
#endif
35+
2336

2437
/**
2538
* Minimum required librdkafka version. This is checked both during

setup.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from setuptools import setup, find_packages
55
from distutils.core import Extension
66
import sys
7+
import platform
78

89
INSTALL_REQUIRES = list()
910

@@ -13,8 +14,15 @@
1314
else:
1415
avro = 'avro-python3'
1516

17+
# On Un*x the library is linked as -lrdkafka,
18+
# while on windows we need the full librdkafka name.
19+
if platform.system() == 'Windows':
20+
librdkafka_libname = 'librdkafka'
21+
else:
22+
librdkafka_libname = 'rdkafka'
23+
1624
module = Extension('confluent_kafka.cimpl',
17-
libraries=['rdkafka'],
25+
libraries=[librdkafka_libname],
1826
sources=['confluent_kafka/src/confluent_kafka.c',
1927
'confluent_kafka/src/Producer.c',
2028
'confluent_kafka/src/Consumer.c',

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy