Skip to content

Update 1.4.0 release candidate #341

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Jul 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .semaphore/semaphore.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ global_job_config:
- git submodule update --init --recursive
- cd deps/librdkafka
- git fetch origin
- git checkout v2.10.1
- git checkout v2.11.0-RC4
- cd ../../
- cache clear

Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ For guidelines on contributing please see [CONTRIBUTING.md](CONTRIBUTING.md)
| 1.3.0 | 2.10.0 |
| 1.3.1 | 2.10.0 |
| 1.3.2 | 2.10.1 |
| 1.4.0 | 2.11.0 |

This mapping is applicable if you're using a pre-built binary. Otherwise, you can check the librdkafka version with the following command:

Expand Down
2 changes: 1 addition & 1 deletion deps/librdkafka
Submodule librdkafka updated 53 files
+59 −1 CHANGELOG.md
+19 −1 CONFIGURATION.md
+38 −3 INTRODUCTION.md
+28 −2 src-cpp/rdkafkacpp.h
+137 −0 src/rd.h
+32 −1 src/rdbase64.c
+2 −0 src/rdbase64.h
+158 −8 src/rdhttp.c
+5 −3 src/rdhttp.h
+62 −35 src/rdkafka.c
+3 −3 src/rdkafka.h
+7 −1 src/rdkafka_buf.c
+39 −23 src/rdkafka_cgrp.c
+326 −12 src/rdkafka_conf.c
+41 −2 src/rdkafka_conf.h
+31 −9 src/rdkafka_feature.c
+28 −28 src/rdkafka_int.h
+15 −0 src/rdkafka_metadata.c
+2 −1 src/rdkafka_mock.c
+45 −17 src/rdkafka_mock_cgrp.c
+1 −1 src/rdkafka_mock_handlers.c
+5 −1 src/rdkafka_mock_int.h
+1 −1 src/rdkafka_proto.h
+116 −37 src/rdkafka_queue.c
+19 −0 src/rdkafka_queue.h
+11 −5 src/rdkafka_request.c
+11 −5 src/rdkafka_sasl_oauthbearer.c
+1,059 −101 src/rdkafka_sasl_oauthbearer_oidc.c
+8 −3 src/rdkafka_sasl_oauthbearer_oidc.h
+73 −44 src/rdkafka_ssl.c
+17 −0 src/rdkafka_ssl.h
+11 −1 src/rdrand.c
+6 −0 src/rdunittest.c
+1 −1 src/rdwin32.h
+69 −0 tests/0004-conf.c
+3 −0 tests/0067-empty_topic.cpp
+26 −4 tests/0080-admin_ut.c
+189 −123 tests/0081-admin.c
+35 −18 tests/0102-static_group_rebalance.c
+212 −7 tests/0126-oauthbearer_oidc.c
+32 −16 tests/0142-reauthentication.c
+952 −0 tests/0147-consumer_group_consumer_mock.c
+128 −0 tests/0153-memberid.c
+2 −0 tests/CMakeLists.txt
+19 −3 tests/LibrdkafkaTestApp.py
+10 −0 tests/fixtures/oauthbearer/jwt_assertion_template.json
+1 −1 tests/requirements.txt
+5 −1 tests/test.c
+6 −0 tests/test.h
+ tests/trivup/trivup-0.12.10.tar.gz
+ tests/trivup/trivup-0.13.0.tar.gz
+1 −1 vcpkg.json
+2 −0 win32/tests/tests.vcxproj
9 changes: 6 additions & 3 deletions deps/windows-install.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,13 @@
depsIncludeDir = '../deps/include'
buildReleaseDir = 'Release'

# alternative: 'https://api.nuget.org/v3-flatcontainer/librdkafka.redist/{}/librdkafka.redist.{}.nupkg'.format(librdkafkaVersion, librdkafkaVersion)
# Use publicly documented API to download librdkafka NuGet package.
# https://api.nuget.org/v3-flatcontainer/{package}/{version}/{package}.{version}.nupkg
# See https://learn.microsoft.com/en-us/nuget/api/package-base-address-resource#download-package-content-nupkg
env_dist = os.environ
downloadBaseUrl = env_dist['NODE_RDKAFKA_NUGET_BASE_URL'] if 'NODE_RDKAFKA_NUGET_BASE_URL' in env_dist else 'https://globalcdn.nuget.org/packages/'
librdkafkaNugetUrl = downloadBaseUrl + 'librdkafka.redist.{}.nupkg'.format(librdkafkaVersion)
downloadBaseUrl = env_dist['NODE_RDKAFKA_NUGET_BASE_URL'] if 'NODE_RDKAFKA_NUGET_BASE_URL' in env_dist else 'https://api.nuget.org/v3-flatcontainer/librdkafka.redist/{version}/'
librdkafkaNugetUrl = downloadBaseUrl + 'librdkafka.redist.{version}.nupkg'
librdkafkaNugetUrl = librdkafkaNugetUrl.format(version=librdkafkaVersion.lower())
print('download librdkafka form ' + librdkafkaNugetUrl)
outputDir = 'librdkafka.redist'
outputFile = outputDir + '.zip'
Expand Down
30 changes: 28 additions & 2 deletions lib/error.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ LibrdKafkaError.wrap = errorWrap;
* @constant
* @memberof RdKafka
*/
// ====== Generated from librdkafka 2.10.1 file src-cpp/rdkafkacpp.h ======
// ====== Generated from librdkafka 2.11.0-RC4 file src-cpp/rdkafkacpp.h ======
LibrdKafkaError.codes = {

/* Internal errors to rdkafka: */
Expand Down Expand Up @@ -159,6 +159,11 @@ LibrdKafkaError.codes = {
ERR__AUTO_OFFSET_RESET: -140,
/** Partition log truncation detected */
ERR__LOG_TRUNCATION: -139,
/** A different record in the batch was invalid
* and this message failed persisting. */
ERR__INVALID_DIFFERENT_RECORD: -138,
/** Broker is going away but client isn't terminating */
ERR__DESTROY_BROKER: -137,
/** End internal error codes */
ERR__END: -100,
/* Kafka broker errors: */
Expand Down Expand Up @@ -374,7 +379,28 @@ LibrdKafkaError.codes = {
/** Unable to update finalized features due to server error */
ERR_FEATURE_UPDATE_FAILED: 96,
/** Request principal deserialization failed during forwarding */
ERR_PRINCIPAL_DESERIALIZATION_FAILURE: 97
ERR_PRINCIPAL_DESERIALIZATION_FAILURE: 97,
/** Unknown Topic Id */
ERR_UNKNOWN_TOPIC_ID: 100,
/** The member epoch is fenced by the group coordinator */
ERR_FENCED_MEMBER_EPOCH: 110,
/** The instance ID is still used by another member in the
* consumer group */
ERR_UNRELEASED_INSTANCE_ID: 111,
/** The assignor or its version range is not supported by the consumer
* group */
ERR_UNSUPPORTED_ASSIGNOR: 112,
/** The member epoch is stale */
ERR_STALE_MEMBER_EPOCH: 113,
/** Client sent a push telemetry request with an invalid or outdated
* subscription ID. */
ERR_UNKNOWN_SUBSCRIPTION_ID: 117,
/** Client sent a push telemetry request larger than the maximum size
* the broker will accept. */
ERR_TELEMETRY_TOO_LARGE: 118,
/** Client metadata is stale,
* client should rebootstrap to obtain new metadata. */
ERR_REBOOTSTRAP_REQUIRED: 129
};

/**
Expand Down
2 changes: 1 addition & 1 deletion lib/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,4 @@ util.dictToStringList = function (mapOrObject) {
return list;
};

util.bindingVersion = '1.3.2';
util.bindingVersion = '1.4.0-rc0';
6 changes: 3 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
{
"name": "@confluentinc/kafka-javascript",
"version": "1.3.2",
"version": "1.4.0-rc0",
"description": "Node.js bindings for librdkafka",
"librdkafka": "2.10.1",
"librdkafka_win": "2.10.1",
"librdkafka": "2.11.0-RC4",
"librdkafka_win": "2.11.0-RC4",
"main": "lib/index.js",
"types": "types/index.d.ts",
"scripts": {
Expand Down Expand Up @@ -65,4 +65,4 @@
"schemaregistry",
"schemaregistry-examples"
]
}
}
2 changes: 1 addition & 1 deletion schemaregistry/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@confluentinc/schemaregistry",
"version": "1.3.1",
"version": "1.4.0-rc0",
"description": "Node.js client for Confluent Schema Registry",
"main": "dist/index.js",
"types": "dist/index.d.ts",
Expand Down
106 changes: 104 additions & 2 deletions types/config.d.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// ====== Generated from librdkafka 2.10.1 file CONFIGURATION.md ======
// ====== Generated from librdkafka 2.11.0-RC4 file CONFIGURATION.md ======
// Code that generated this is a derivative work of the code from Nam Nguyen
// https://gist.github.com/ntgn81/066c2c8ec5b4238f85d1e9168a04e3fb

Expand Down Expand Up @@ -63,12 +63,19 @@ export interface GlobalConfig {
"max.in.flight"?: number;

/**
* Controls how the client recovers when none of the brokers known to it is available. If set to `none`, the client fails with a fatal error. If set to `rebootstrap`, the client repeats the bootstrap process using `bootstrap.servers` and brokers added through `rd_kafka_brokers_add()`. Rebootstrapping is useful when a client communicates with brokers so infrequently that the set of brokers may change entirely before the client refreshes metadata. Metadata recovery is triggered when all last-known brokers appear unavailable simultaneously.
* Controls how the client recovers when none of the brokers known to it is available. If set to `none`, the client doesn't re-bootstrap. If set to `rebootstrap`, the client repeats the bootstrap process using `bootstrap.servers` and brokers added through `rd_kafka_brokers_add()`. Rebootstrapping is useful when a client communicates with brokers so infrequently that the set of brokers may change entirely before the client refreshes metadata. Metadata recovery is triggered when all last-known brokers appear unavailable simultaneously or the client cannot refresh metadata within `metadata.recovery.rebootstrap.trigger.ms` or it's requested in a metadata response.
*
* @default rebootstrap
*/
"metadata.recovery.strategy"?: 'none' | 'rebootstrap';

/**
* If a client configured to rebootstrap using `metadata.recovery.strategy=rebootstrap` is unable to obtain metadata from any of the brokers for this interval, client repeats the bootstrap process using `bootstrap.servers` configuration and brokers added through `rd_kafka_brokers_add()`.
*
* @default 300000
*/
"metadata.recovery.rebootstrap.trigger.ms"?: number;

/**
* Period of time in milliseconds at which topic and broker metadata is refreshed in order to proactively discover any new brokers, topics, partitions or partition leader changes. Use -1 to disable the intervalled refresh (not recommended). If there are no locally referenced topics (no topic objects created, no messages produced, no subscription or no assignment) then only the broker list will be refreshed every interval but no more often than every 10s.
*
Expand Down Expand Up @@ -432,6 +439,16 @@ export interface GlobalConfig {
*/
"ssl.ca.location"?: string;

/**
* File or directory path to CA certificate(s) for verifying HTTPS endpoints, like `sasl.oauthbearer.token.endpoint.url` used for OAUTHBEARER/OIDC authentication. Mutually exclusive with `https.ca.pem`. Defaults: On Windows the system's CA certificates are automatically looked up in the Windows Root certificate store. On Mac OSX this configuration defaults to `probe`. It is recommended to install openssl using Homebrew, to provide CA certificates. On Linux install the distribution's ca-certificates package. If OpenSSL is statically linked or `https.ca.location` is set to `probe` a list of standard paths will be probed and the first one found will be used as the default CA certificate location path. If OpenSSL is dynamically linked the OpenSSL library's default path will be used (see `OPENSSLDIR` in `openssl version -a`).
*/
"https.ca.location"?: string;

/**
* CA certificate string (PEM format) for verifying HTTPS endpoints. Mutually exclusive with `https.ca.location`. Optional: see `https.ca.location`.
*/
"https.ca.pem"?: string;

/**
* CA certificate string (PEM format) for verifying the broker's key.
*/
Expand Down Expand Up @@ -591,6 +608,16 @@ export interface GlobalConfig {
*/
"sasl.oauthbearer.client.id"?: string;

/**
* Alias for `sasl.oauthbearer.client.id`: Public identifier for the application. Must be unique across all clients that the authorization server handles. Only used when `sasl.oauthbearer.method` is set to "oidc".
*/
"sasl.oauthbearer.client.credentials.client.id"?: string;

/**
* Alias for `sasl.oauthbearer.client.secret`: Client secret only known to the application and the authorization server. This should be a sufficiently random string that is not guessable. Only used when `sasl.oauthbearer.method` is set to "oidc".
*/
"sasl.oauthbearer.client.credentials.client.secret"?: string;

/**
* Client secret only known to the application and the authorization server. This should be a sufficiently random string that is not guessable. Only used when `sasl.oauthbearer.method` is set to "oidc".
*/
Expand All @@ -611,6 +638,81 @@ export interface GlobalConfig {
*/
"sasl.oauthbearer.token.endpoint.url"?: string;

/**
* OAuth grant type to use when communicating with the identity provider.
*
* @default client_credentials
*/
"sasl.oauthbearer.grant.type"?: 'client_credentials' | 'urn:ietf:params:oauth:grant-type:jwt-bearer';

/**
* Algorithm the client should use to sign the assertion sent to the identity provider and in the OAuth alg header in the JWT assertion.
*
* @default RS256
*/
"sasl.oauthbearer.assertion.algorithm"?: 'RS256' | 'ES256';

/**
* Path to client's private key (PEM) used for authentication when using the JWT assertion.
*/
"sasl.oauthbearer.assertion.private.key.file"?: string;

/**
* Private key passphrase for `sasl.oauthbearer.assertion.private.key.file` or `sasl.oauthbearer.assertion.private.key.pem`.
*/
"sasl.oauthbearer.assertion.private.key.passphrase"?: string;

/**
* Client's private key (PEM) used for authentication when using the JWT assertion.
*/
"sasl.oauthbearer.assertion.private.key.pem"?: string;

/**
* Path to the assertion file. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*/
"sasl.oauthbearer.assertion.file"?: string;

/**
* JWT audience claim. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*/
"sasl.oauthbearer.assertion.claim.aud"?: string;

/**
* Assertion expiration time in seconds. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*
* @default 300
*/
"sasl.oauthbearer.assertion.claim.exp.seconds"?: number;

/**
* JWT issuer claim. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*/
"sasl.oauthbearer.assertion.claim.iss"?: string;

/**
* JWT ID claim. When set to `true`, a random UUID is generated. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*
* @default false
*/
"sasl.oauthbearer.assertion.claim.jti.include"?: boolean;

/**
* Assertion not before time in seconds. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*
* @default 60
*/
"sasl.oauthbearer.assertion.claim.nbf.seconds"?: number;

/**
* JWT subject claim. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*/
"sasl.oauthbearer.assertion.claim.sub"?: string;

/**
* Path to the JWT template file. Only used when `sasl.oauthbearer.method` is set to "oidc" and JWT assertion is needed.
*/
"sasl.oauthbearer.assertion.jwt.template.file"?: string;

/**
* List of plugin libraries to load (; separated). The library search path is platform dependent (see dlopen(3) for Unix and LoadLibrary() for Windows). If no filename extension is specified the platform-specific extension (such as .dll or .so) will be appended automatically.
*/
Expand Down
28 changes: 27 additions & 1 deletion types/errors.d.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// ====== Generated from librdkafka 2.10.1 file src-cpp/rdkafkacpp.h ======
// ====== Generated from librdkafka 2.11.0-RC4 file src-cpp/rdkafkacpp.h ======
export const CODES: { ERRORS: {
/* Internal errors to rdkafka: */
/** Begin internal error codes (**-200**) */
Expand Down Expand Up @@ -128,6 +128,11 @@ export const CODES: { ERRORS: {
ERR__AUTO_OFFSET_RESET: number,
/** Partition log truncation detected (**-139**) */
ERR__LOG_TRUNCATION: number,
/** A different record in the batch was invalid
* and this message failed persisting (**-138**) */
ERR__INVALID_DIFFERENT_RECORD: number,
/** Broker is going away but client isn't terminating (**-137**) */
ERR__DESTROY_BROKER: number,
/** End internal error codes (**-100**) */
ERR__END: number,
/* Kafka broker errors: */
Expand Down Expand Up @@ -344,4 +349,25 @@ export const CODES: { ERRORS: {
ERR_FEATURE_UPDATE_FAILED: number,
/** Request principal deserialization failed during forwarding (**97**) */
ERR_PRINCIPAL_DESERIALIZATION_FAILURE: number,
/** Unknown Topic Id (**100**) */
ERR_UNKNOWN_TOPIC_ID: number,
/** The member epoch is fenced by the group coordinator (**110**) */
ERR_FENCED_MEMBER_EPOCH: number,
/** The instance ID is still used by another member in the
* consumer group (**111**) */
ERR_UNRELEASED_INSTANCE_ID: number,
/** The assignor or its version range is not supported by the consumer
* group (**112**) */
ERR_UNSUPPORTED_ASSIGNOR: number,
/** The member epoch is stale (**113**) */
ERR_STALE_MEMBER_EPOCH: number,
/** Client sent a push telemetry request with an invalid or outdated
* subscription ID (**117**) */
ERR_UNKNOWN_SUBSCRIPTION_ID: number,
/** Client sent a push telemetry request larger than the maximum size
* the broker will accept (**118**) */
ERR_TELEMETRY_TOO_LARGE: number,
/** Client metadata is stale,
* client should rebootstrap to obtain new metadata (**129**) */
ERR_REBOOTSTRAP_REQUIRED: number,
}}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy