From b844a823b504a80c692376cc189050eed6067e1a Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Mon, 24 Jun 2024 11:51:46 +0200 Subject: [PATCH 01/23] chore(release): prepare for next development iteration --- CHANGELOG.md | 2 ++ conda/meta.yaml | 6 +++--- influxdb_client/version.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eb9cd952..83a29b5d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +## 1.45.0 [unreleased] + ## 1.44.0 [2024-06-24] ### Features diff --git a/conda/meta.yaml b/conda/meta.yaml index 186640f0..b250ec14 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.43.0" %} +{% set version = "1.44.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/3a/1f/d610ac86af1204bb12698a4d9ac4bd743141e01c13dc44d2e5a8bcf9c556/influxdb_client-1.43.0.tar.gz - sha256: ae2614d891baed52c0ae8f6194a04ee5b1c6422f6061318a3639fe63b7671b25 + url: https://files.pythonhosted.org/packages/9e/a1/ab4f2a3b90334c2e7cb795fbc85483a30134078b1bad0a165a34cb827aa7/influxdb_client-1.44.0.tar.gz + sha256: da9bc0cc49de4a0ac844d833c1efa65227ec5a2254e63cdbe07b5d532c0c37f8 build: number: 0 diff --git a/influxdb_client/version.py b/influxdb_client/version.py index e40ba040..237fe182 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.44.0' +VERSION = '1.45.0dev0' From 653af4657265755ff718c2f03339616d036fea3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Bedn=C3=A1=C5=99?= Date: Wed, 26 Jun 2024 15:00:00 +0200 Subject: [PATCH 02/23] refactor: to timezone specific datetime helper to avoid use deprecated functions (#652) --- CHANGELOG.md | 3 +++ README.md | 10 +++++----- examples/asynchronous.py | 2 +- examples/example.py | 6 +++--- examples/influx_cloud.py | 5 +++-- examples/logging_handler.py | 2 +- examples/write_structured_data.py | 4 ++-- influxdb_client/client/write/point.py | 2 +- tests/test_InfluxDBClientAsync.py | 11 ++++++----- tests/test_MultiprocessingWriter.py | 6 +++--- tests/test_PandasDateTimeHelper.py | 2 +- 11 files changed, 29 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 83a29b5d..8f9df7e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ ## 1.45.0 [unreleased] +### Bug Fixes +1. [#652](https://github.com/influxdata/influxdb-client-python/pull/652): Refactor to `timezone` specific `datetime` helpers to avoid use deprecated functions + ## 1.44.0 [2024-06-24] ### Features diff --git a/README.md b/README.md index ce78bd00..ef4eff86 100644 --- a/README.md +++ b/README.md @@ -392,7 +392,7 @@ The batching is configurable by `write_options`: | **exponential_base** | the base for the exponential retry delay, the next delay is computed using random exponential backoff as a random value within the interval `retry_interval * exponential_base^(attempts-1)` and `retry_interval * exponential_base^(attempts)`. Example for `retry_interval=5_000, exponential_base=2, max_retry_delay=125_000, total=5` Retry delays are random distributed values within the ranges of `[5_000-10_000, 10_000-20_000, 20_000-40_000, 40_000-80_000, 80_000-125_000]` | `2` | ``` python -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pandas as pd import reactivex as rx @@ -456,7 +456,7 @@ with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") """ Write Pandas DataFrame """ - _now = datetime.utcnow() + _now = datetime.now(tz=timezone.utc) _data_frame = pd.DataFrame(data=[["coyote_creek", 1.0], ["coyote_creek", 2.0]], index=[_now, _now + timedelta(hours=1)], columns=["location", "water_level"]) @@ -923,7 +923,7 @@ The last step is run a python script via: `python3 influx_cloud.py`. Connect to InfluxDB 2.0 - write data and query them """ -from datetime import datetime +from datetime import datetime, timezone from influxdb_client import Point, InfluxDBClient from influxdb_client.client.write_api import SYNCHRONOUS @@ -945,7 +945,7 @@ try: """ Write data by Point structure """ - point = Point(kind).tag('host', host).tag('device', device).field('value', 25.3).time(time=datetime.utcnow()) + point = Point(kind).tag('host', host).tag('device', device).field('value', 25.3).time(time=datetime.now(tz=timezone.utc)) print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...') @@ -1407,7 +1407,7 @@ The `influxdb_client.client.query_api_async.QueryApiAsync` supports retrieve dat > > async def main(): > async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: -> start = datetime.utcfromtimestamp(0) +> start = datetime.fromtimestamp(0) > stop = datetime.now() > # Delete data with location = 'Prague' > successfully = await client.delete_api().delete(start=start, stop=stop, bucket="my-bucket", diff --git a/examples/asynchronous.py b/examples/asynchronous.py index 4205d461..ad0b876c 100644 --- a/examples/asynchronous.py +++ b/examples/asynchronous.py @@ -76,7 +76,7 @@ async def main(): Delete data """ print(f"\n------- Delete data with location = 'Prague' -------\n") - successfully = await client.delete_api().delete(start=datetime.utcfromtimestamp(0), stop=datetime.now(), + successfully = await client.delete_api().delete(start=datetime.fromtimestamp(0), stop=datetime.now(), predicate="location = \"Prague\"", bucket="my-bucket") print(f" > successfully: {successfully}") diff --git a/examples/example.py b/examples/example.py index 0082ade1..f6ac61f6 100644 --- a/examples/example.py +++ b/examples/example.py @@ -1,5 +1,5 @@ import codecs -from datetime import datetime +from datetime import datetime, timezone from influxdb_client import WritePrecision, InfluxDBClient, Point from influxdb_client.client.write_api import SYNCHRONOUS @@ -7,8 +7,8 @@ with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) as client: query_api = client.query_api() - p = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3).time(datetime.utcnow(), - WritePrecision.MS) + p = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) \ + .time(datetime.now(tz=timezone.utc), WritePrecision.MS) write_api = client.write_api(write_options=SYNCHRONOUS) # write using point structure diff --git a/examples/influx_cloud.py b/examples/influx_cloud.py index 6c8ed6f2..96b0fc3c 100644 --- a/examples/influx_cloud.py +++ b/examples/influx_cloud.py @@ -2,7 +2,7 @@ Connect to InfluxDB 2.0 - write data and query them """ -from datetime import datetime +from datetime import datetime, timezone from influxdb_client import Point, InfluxDBClient from influxdb_client.client.write_api import SYNCHRONOUS @@ -23,7 +23,8 @@ """ Write data by Point structure """ - point = Point(kind).tag('host', host).tag('device', device).field('value', 25.3).time(time=datetime.utcnow()) + point = Point(kind).tag('host', host).tag('device', device).field('value', 25.3) \ + .time(time=datetime.now(tz=timezone.utc)) print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...') diff --git a/examples/logging_handler.py b/examples/logging_handler.py index 08f2ae05..6f875f7b 100644 --- a/examples/logging_handler.py +++ b/examples/logging_handler.py @@ -45,7 +45,7 @@ def use_logger(): Point('my-measurement') .tag('host', 'host1') .field('temperature', 25.3) - .time(datetime.datetime.utcnow(), WritePrecision.MS) + .time(datetime.datetime.now(tz=datetime.timezone.utc), WritePrecision.MS) ) diff --git a/examples/write_structured_data.py b/examples/write_structured_data.py index 26a904f3..14a4e8ae 100644 --- a/examples/write_structured_data.py +++ b/examples/write_structured_data.py @@ -1,6 +1,6 @@ from collections import namedtuple from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timezone from influxdb_client import InfluxDBClient from influxdb_client.client.write_api import SYNCHRONOUS @@ -37,7 +37,7 @@ class Car: version="2021.06.05.5874", pressure=125, temperature=10, - timestamp=datetime.utcnow()) + timestamp=datetime.now(tz=timezone.utc)) print(sensor) """ diff --git a/influxdb_client/client/write/point.py b/influxdb_client/client/write/point.py index 31d44d5c..cc95d204 100644 --- a/influxdb_client/client/write/point.py +++ b/influxdb_client/client/write/point.py @@ -10,7 +10,7 @@ from influxdb_client.client.util.date_utils import get_date_helper from influxdb_client.domain.write_precision import WritePrecision -EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc) +EPOCH = datetime.fromtimestamp(0, tz=timezone.utc) DEFAULT_WRITE_PRECISION = WritePrecision.NS diff --git a/tests/test_InfluxDBClientAsync.py b/tests/test_InfluxDBClientAsync.py index 123967a7..20eabd7d 100644 --- a/tests/test_InfluxDBClientAsync.py +++ b/tests/test_InfluxDBClientAsync.py @@ -2,7 +2,7 @@ import logging import unittest import os -from datetime import datetime +from datetime import datetime, timezone from io import StringIO import pytest @@ -202,11 +202,11 @@ async def test_write_empty_data(self): async def test_write_points_different_precision(self): measurement = generate_name("measurement") _point1 = Point(measurement).tag("location", "Prague").field("temperature", 25.3) \ - .time(datetime.utcfromtimestamp(0), write_precision=WritePrecision.S) + .time(datetime.fromtimestamp(0, tz=timezone.utc), write_precision=WritePrecision.S) _point2 = Point(measurement).tag("location", "New York").field("temperature", 24.3) \ - .time(datetime.utcfromtimestamp(1), write_precision=WritePrecision.MS) + .time(datetime.fromtimestamp(1, tz=timezone.utc), write_precision=WritePrecision.MS) _point3 = Point(measurement).tag("location", "Berlin").field("temperature", 24.3) \ - .time(datetime.utcfromtimestamp(2), write_precision=WritePrecision.NS) + .time(datetime.fromtimestamp(2, tz=timezone.utc), write_precision=WritePrecision.NS) await self.client.write_api().write(bucket="my-bucket", record=[_point1, _point2, _point3], write_precision=WritePrecision.NS) query = f''' @@ -228,7 +228,8 @@ async def test_delete_api(self): measurement = generate_name("measurement") await self._prepare_data(measurement) - successfully = await self.client.delete_api().delete(start=datetime.utcfromtimestamp(0), stop=datetime.utcnow(), + successfully = await self.client.delete_api().delete(start=datetime.fromtimestamp(0), + stop=datetime.now(tz=timezone.utc), predicate="location = \"Prague\"", bucket="my-bucket") self.assertEqual(True, successfully) query = f''' diff --git a/tests/test_MultiprocessingWriter.py b/tests/test_MultiprocessingWriter.py index 940ae6ec..e7996b5f 100644 --- a/tests/test_MultiprocessingWriter.py +++ b/tests/test_MultiprocessingWriter.py @@ -1,6 +1,6 @@ import os import unittest -from datetime import datetime +from datetime import datetime, timezone from influxdb_client import WritePrecision, InfluxDBClient from influxdb_client.client.util.date_utils import get_date_helper @@ -53,7 +53,7 @@ def test_use_context_manager(self): self.assertIsNotNone(writer) def test_pass_parameters(self): - unique = get_date_helper().to_nanoseconds(datetime.utcnow() - datetime.utcfromtimestamp(0)) + unique = get_date_helper().to_nanoseconds(datetime.now(tz=timezone.utc) - datetime.fromtimestamp(0, tz=timezone.utc)) # write data with MultiprocessingWriter(url=self.url, token=self.token, org=self.org, write_options=SYNCHRONOUS) as writer: @@ -69,4 +69,4 @@ def test_pass_parameters(self): self.assertIsNotNone(record) self.assertEqual("a", record["tag"]) self.assertEqual(5, record["_value"]) - self.assertEqual(get_date_helper().to_utc(datetime.utcfromtimestamp(10)), record["_time"]) + self.assertEqual(get_date_helper().to_utc(datetime.fromtimestamp(10, tz=timezone.utc)), record["_time"]) diff --git a/tests/test_PandasDateTimeHelper.py b/tests/test_PandasDateTimeHelper.py index 60017172..2c7e4ce5 100644 --- a/tests/test_PandasDateTimeHelper.py +++ b/tests/test_PandasDateTimeHelper.py @@ -23,7 +23,7 @@ def test_parse_date(self): def test_to_nanoseconds(self): date = self.helper.parse_date('2020-08-07T06:21:57.331249158Z').replace(tzinfo=timezone.utc) - nanoseconds = self.helper.to_nanoseconds(date - datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc)) + nanoseconds = self.helper.to_nanoseconds(date - datetime.fromtimestamp(0, tz=timezone.utc)) self.assertEqual(nanoseconds, 1596781317331249158) From 74a0fbf95518bec3981f1bd24930154af9433162 Mon Sep 17 00:00:00 2001 From: Jacob Marble Date: Thu, 8 Aug 2024 22:02:21 -0700 Subject: [PATCH 03/23] fix(write-api): accept 201 response to write (#663) InfluxDB v3 will soon return 201 or 204, in cases where InfluxDB v1 and v2 only return 204. --- CHANGELOG.md | 1 + influxdb_client/client/write_api_async.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f9df7e5..05c37a17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ### Bug Fixes 1. [#652](https://github.com/influxdata/influxdb-client-python/pull/652): Refactor to `timezone` specific `datetime` helpers to avoid use deprecated functions +1. [#663](https://github.com/influxdata/influxdb-client-python/pull/663): Accept HTTP 201 response to write request ## 1.44.0 [2024-06-24] diff --git a/influxdb_client/client/write_api_async.py b/influxdb_client/client/write_api_async.py index 2f32802f..e9e2018b 100644 --- a/influxdb_client/client/write_api_async.py +++ b/influxdb_client/client/write_api_async.py @@ -122,4 +122,4 @@ async def write(self, bucket: str, org: str = None, precision=write_precision, async_req=False, _return_http_data_only=False, content_type="text/plain; charset=utf-8") - return response[1] == 204 + return response[1] in (201, 204) From db1630d52ecd29f2f1008ca25c742fa26ee3777c Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Mon, 12 Aug 2024 10:15:34 +0200 Subject: [PATCH 04/23] chore(release): release version 1.45.0 [skip CI] --- CHANGELOG.md | 2 +- influxdb_client/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 05c37a17..a07e311e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 1.45.0 [unreleased] +## 1.45.0 [2024-08-12] ### Bug Fixes 1. [#652](https://github.com/influxdata/influxdb-client-python/pull/652): Refactor to `timezone` specific `datetime` helpers to avoid use deprecated functions diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 237fe182..cc536e49 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.45.0dev0' +VERSION = '1.45.0' From 0dcc35bc62a3a2cbed3d32d6e50480a88961a819 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Mon, 12 Aug 2024 10:20:01 +0200 Subject: [PATCH 05/23] chore(release): prepare for next development iteration --- CHANGELOG.md | 2 ++ conda/meta.yaml | 6 +++--- influxdb_client/version.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a07e311e..9dab2d69 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +## 1.46.0 [unreleased] + ## 1.45.0 [2024-08-12] ### Bug Fixes diff --git a/conda/meta.yaml b/conda/meta.yaml index b250ec14..0c626075 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.44.0" %} +{% set version = "1.45.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/9e/a1/ab4f2a3b90334c2e7cb795fbc85483a30134078b1bad0a165a34cb827aa7/influxdb_client-1.44.0.tar.gz - sha256: da9bc0cc49de4a0ac844d833c1efa65227ec5a2254e63cdbe07b5d532c0c37f8 + url: https://files.pythonhosted.org/packages/71/cd/a016f327d0669074526b36ae7c1bb84760e3c0d29911f6e8e4046a217f32/influxdb_client-1.45.0.tar.gz + sha256: e24aa0a838f58487b2382c654fa8183fb5ca504af70438a42ca20dd79669a2be build: number: 0 diff --git a/influxdb_client/version.py b/influxdb_client/version.py index cc536e49..4a0a5c92 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.45.0' +VERSION = '1.46.0dev0' From 45e6607a94efbdb23fc607ca7540c74ff0749009 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Bedn=C3=A1=C5=99?= Date: Mon, 12 Aug 2024 21:23:37 +0200 Subject: [PATCH 06/23] fix: multiprocessing example (#664) * fix: multiprocessing example * docs: update CHANGELOG.md --- CHANGELOG.md | 3 + examples/import_data_set_multiprocessing.py | 129 ++++++++++---------- 2 files changed, 68 insertions(+), 64 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9dab2d69..923317ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ ## 1.46.0 [unreleased] +### Examples: +1. [#664](https://github.com/influxdata/influxdb-client-python/pull/664/): Multiprocessing example uses new source of data + ## 1.45.0 [2024-08-12] ### Bug Fixes diff --git a/examples/import_data_set_multiprocessing.py b/examples/import_data_set_multiprocessing.py index 60de64c5..b20b6174 100644 --- a/examples/import_data_set_multiprocessing.py +++ b/examples/import_data_set_multiprocessing.py @@ -4,6 +4,7 @@ https://github.com/toddwschneider/nyc-taxi-data """ import concurrent.futures +import gzip import io import multiprocessing from collections import OrderedDict @@ -92,10 +93,10 @@ def parse_row(row: OrderedDict): return Point("taxi-trip-data") \ .tag("dispatching_base_num", row['dispatching_base_num']) \ - .tag("PULocationID", row['PULocationID']) \ - .tag("DOLocationID", row['DOLocationID']) \ + .tag("PULocationID", row['PUlocationID']) \ + .tag("DOLocationID", row['DOlocationID']) \ .tag("SR_Flag", row['SR_Flag']) \ - .field("dropoff_datetime", row['dropoff_datetime']) \ + .field("dropoff_datetime", row['dropOff_datetime']) \ .time(row['pickup_datetime']) \ .to_line_protocol() @@ -113,7 +114,7 @@ def parse_rows(rows, total_size): counter_.value += len(_parsed_rows) if counter_.value % 10_000 == 0: print('{0:8}{1}'.format(counter_.value, ' - {0:.2f} %' - .format(100 * float(progress_.value) / float(int(total_size))) if total_size else "")) + .format(float(progress_.value) / float(int(total_size))) if total_size else "")) pass queue_.put(_parsed_rows) @@ -141,80 +142,80 @@ def init_counter(counter, progress, queue): progress_ = Value('i', 0) startTime = datetime.now() - url = "https://s3.amazonaws.com/nyc-tlc/trip+data/fhv_tripdata_2019-01.csv" - # url = "file:///Users/bednar/Developer/influxdata/influxdb-client-python/examples/fhv_tripdata_2019-01.csv" + url = "https://github.com/DataTalksClub/nyc-tlc-data/releases/download/fhv/fhv_tripdata_2019-01.csv.gz" """ Open URL and for stream data """ response = urlopen(url) - if response.headers: - content_length = response.headers['Content-length'] - io_wrapper = ProgressTextIOWrapper(response) - io_wrapper.progress = progress_ + # we can't get content length from response because the gzip stream content length is unknown + # so we set it to this value, just for progress display + content_length = 23143223 """ - Start writer as a new process + Open GZIP stream """ - writer = InfluxDBWriter(queue_) - writer.start() + with gzip.open(response, 'rb') as stream: + io_wrapper = ProgressTextIOWrapper(stream, encoding='utf-8') + io_wrapper.progress = progress_ - """ - Create process pool for parallel encoding into LineProtocol - """ - cpu_count = multiprocessing.cpu_count() - with concurrent.futures.ProcessPoolExecutor(cpu_count, initializer=init_counter, - initargs=(counter_, progress_, queue_)) as executor: """ - Converts incoming HTTP stream into sequence of LineProtocol + Start writer as a new process """ - data = rx \ - .from_iterable(DictReader(io_wrapper)) \ - .pipe(ops.buffer_with_count(10_000), - # Parse 10_000 rows into LineProtocol on subprocess - ops.flat_map(lambda rows: executor.submit(parse_rows, rows, content_length))) + writer = InfluxDBWriter(queue_) + writer.start() """ - Write data into InfluxDB + Create process pool for parallel encoding into LineProtocol """ - data.subscribe(on_next=lambda x: None, on_error=lambda ex: print(f'Unexpected error: {ex}')) - - """ - Terminate Writer - """ - queue_.put(None) - queue_.join() + cpu_count = multiprocessing.cpu_count() + with concurrent.futures.ProcessPoolExecutor(cpu_count, initializer=init_counter, + initargs=(counter_, progress_, queue_)) as executor: + """ + Converts incoming HTTP stream into sequence of LineProtocol + """ + data = rx \ + .from_iterable(DictReader(io_wrapper)) \ + .pipe(ops.buffer_with_count(10_000), + # Parse 10_000 rows into LineProtocol on subprocess + ops.map(lambda rows: executor.submit(parse_rows, rows, content_length))) + + """ + Write data into InfluxDB + """ + data.subscribe(on_next=lambda x: None, on_error=lambda ex: print(f'Unexpected error: {ex}')) - print() - print(f'Import finished in: {datetime.now() - startTime}') - print() - - """ - Querying 10 pickups from dispatching 'B00008' - """ - query = 'from(bucket:"my-bucket")' \ - '|> range(start: 2019-01-01T00:00:00Z, stop: now()) ' \ - '|> filter(fn: (r) => r._measurement == "taxi-trip-data")' \ - '|> filter(fn: (r) => r.dispatching_base_num == "B00008")' \ - '|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")' \ - '|> rename(columns: {_time: "pickup_datetime"})' \ - '|> drop(columns: ["_start", "_stop"])|> limit(n:10, offset: 0)' - - client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) - result = client.query_api().query(query=query) + """ + Terminate Writer + """ + queue_.put(None) + queue_.join() - """ - Processing results - """ - print() - print("=== Querying 10 pickups from dispatching 'B00008' ===") - print() - for table in result: - for record in table.records: - print( - f'Dispatching: {record["dispatching_base_num"]} pickup: {record["pickup_datetime"]} dropoff: {record["dropoff_datetime"]}') + print() + print(f'Import finished in: {datetime.now() - startTime}') + print() - """ - Close client - """ - client.close() + """ + Querying 10 pickups from dispatching 'B00008' + """ + query = 'from(bucket:"my-bucket")' \ + '|> range(start: 2019-01-01T00:00:00Z, stop: now()) ' \ + '|> filter(fn: (r) => r._measurement == "taxi-trip-data")' \ + '|> filter(fn: (r) => r.dispatching_base_num == "B00008")' \ + '|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")' \ + '|> rename(columns: {_time: "pickup_datetime"})' \ + '|> drop(columns: ["_start", "_stop"])|> limit(n:10, offset: 0)' + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) as client: + result = client.query_api().query(query=query) + + """ + Processing results + """ + print() + print("=== Querying 10 pickups from dispatching 'B00008' ===") + print() + for table in result: + for record in table.records: + print( + f'Dispatching: {record["dispatching_base_num"]} pickup: {record["pickup_datetime"]} dropoff: {record["dropoff_datetime"]}') From 7ad95339bfb08f25e7bf8e4da2b1bd379667b9ec Mon Sep 17 00:00:00 2001 From: karel-rehor Date: Wed, 21 Aug 2024 09:36:23 +0200 Subject: [PATCH 07/23] feat: add `headers` field to `InfluxDBError` and add example of use (#665) --- CHANGELOG.md | 1 + examples/README.md | 1 + examples/http_error_handling.py | 126 +++++++++++++++++++++++++++ influxdb_client/client/exceptions.py | 2 + tests/test_InfluxDBClientAsync.py | 19 ++++ tests/test_WriteApi.py | 37 ++++++++ 6 files changed, 186 insertions(+) create mode 100644 examples/http_error_handling.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 923317ab..e22bf238 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ### Examples: 1. [#664](https://github.com/influxdata/influxdb-client-python/pull/664/): Multiprocessing example uses new source of data +1. [#665](https://github.com/influxdata/influxdb-client-python/pull/665): Shows how to leverage header fields in errors returned on write. ## 1.45.0 [2024-08-12] diff --git a/examples/README.md b/examples/README.md index 1678d00e..2b42ffd7 100644 --- a/examples/README.md +++ b/examples/README.md @@ -15,6 +15,7 @@ - manually download [NYC TLC Trip Record Data](https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page) - install Apache Arrow `pip install pyarrow` dependency - [write_batching_by_bytes_count.py](write_batching_by_bytes_count.py) - How to use RxPY to prepare batches by maximum bytes count. +- [http_error_handling.py](http_error_handling.py) - How to leverage HttpHeader information when errors are returned on write. ## Queries - [query.py](query.py) - How to query data into `FluxTable`s, `Stream` and `CSV` diff --git a/examples/http_error_handling.py b/examples/http_error_handling.py new file mode 100644 index 00000000..c125a7ff --- /dev/null +++ b/examples/http_error_handling.py @@ -0,0 +1,126 @@ +""" +Illustrates getting header values from Errors that may occur on write. + +To test against cloud set the following environment variables: + INFLUX_URL + INFLUX_TOKEN + INFLUX_DATABASE + INFLUX_ORG + +...otherwise will run against a standard OSS endpoint. +""" +import asyncio +import os +from typing import MutableMapping + +from influxdb_client import InfluxDBClient +from influxdb_client.client.exceptions import InfluxDBError +from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync +from influxdb_client.client.write_api import SYNCHRONOUS +from influxdb_client.rest import ApiException + + +def get_envar(key, default): + try: + return os.environ[key] + except: + return default + + +class Config(object): + + def __init__(self): + self.url = get_envar("INFLUX_URL", "http://localhost:8086") + self.token = get_envar("INFLUX_TOKEN", "my-token") + self.bucket = get_envar("INFLUX_DATABASE", "my-bucket") + self.org = get_envar("INFLUX_ORG", "my-org") + + def __str__(self): + return (f"config:\n" + f" url: {self.url}\n" + f" token: ****redacted*****\n" + f" bucket: {self.bucket}\n" + f" org: {self.org}\n" + ) + + +# To encapsulate functions used in batch writing +class BatchCB(object): + + def success(self, conf: (str, str, str), data: str): + print(f"Write success: {conf}, data: {data}") + + def error(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"\nBatch -> Write failed: {conf}, data: {data}, error: {exception.message}") + report_headers(exception.headers) + + def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Write failed but retryable: {conf}, data: {data}, error: {exception}") + + +# simple reporter that server is available +def report_ping(ping: bool): + if not ping: + raise ValueError("InfluxDB: Failed to ping server") + else: + print("InfluxDB: ready") + + +# report some useful expected header fields +def report_headers(headers: MutableMapping[str, str]): + print(" Date: ", headers.get("Date")) + print(" X-Influxdb-Build: ", headers.get("X-Influxdb-Build")) + print(" X-Influxdb-Version: ", headers.get("X-Influxdb-Version")) # OSS version, Cloud should be None + print(" X-Platform-Error-Code: ", headers.get("X-Platform-Error-Code")) # OSS invalid, Cloud should be None + print(" Retry-After: ", headers.get("Retry-After")) # Should be None + print(" Trace-Id: ", headers.get("Trace-Id")) # OSS should be None, Cloud should return value + + +# try a write using a synchronous call +def use_sync(conf: Config): + print("Using sync") + with InfluxDBClient(url=conf.url, token=conf.token, org=conf.org) as client: + report_ping(client.ping()) + try: + client.write_api(write_options=SYNCHRONOUS).write(bucket=conf.bucket, record="cpu,location=G4 usage=") + except ApiException as ae: + print("\nSync -> Caught ApiException: ", ae.message) + report_headers(ae.headers) + + print("Sync write done") + + +# try a write using batch API +def use_batch(conf: Config): + print("Using batch") + with InfluxDBClient(url=conf.url, token=conf.token, org=conf.org) as client: + cb = BatchCB() + with client.write_api(success_callback=cb.success, + error_callback=cb.error, + retry_callback=cb.retry) as write_api: + write_api.write(bucket=conf.bucket, record="cpu,location=G9 usage=") + print("Batch write sent") + print("Batch write done") + + +# try a write using async.io +async def use_async(conf: Config): + print("Using async") + async with InfluxDBClientAsync(url=conf.url, token=conf.token, org=conf.org) as client: + report_ping(await client.ping()) + try: + await client.write_api().write(bucket=conf.bucket, record="cpu,location=G7 usage=") + except InfluxDBError as ie: + print("\nAsync -> Caught InfluxDBError: ", ie.message) + report_headers(ie.headers) + print("Async write done") + + +if __name__ == "__main__": + conf = Config() + print(conf) + use_sync(conf) + print("\n Continuing...\n") + use_batch(conf) + print("\n Continuing...\n") + asyncio.run(use_async(conf)) diff --git a/influxdb_client/client/exceptions.py b/influxdb_client/client/exceptions.py index 2ca235c8..bfa453e2 100644 --- a/influxdb_client/client/exceptions.py +++ b/influxdb_client/client/exceptions.py @@ -16,8 +16,10 @@ def __init__(self, response: HTTPResponse = None, message: str = None): self.response = response self.message = self._get_message(response) if isinstance(response, HTTPResponse): # response is HTTPResponse + self.headers = response.headers self.retry_after = response.headers.get('Retry-After') else: # response is RESTResponse + self.headers = response.getheaders() self.retry_after = response.getheader('Retry-After') else: self.response = None diff --git a/tests/test_InfluxDBClientAsync.py b/tests/test_InfluxDBClientAsync.py index 20eabd7d..7f8c6214 100644 --- a/tests/test_InfluxDBClientAsync.py +++ b/tests/test_InfluxDBClientAsync.py @@ -1,5 +1,6 @@ import asyncio import logging +import re import unittest import os from datetime import datetime, timezone @@ -390,6 +391,24 @@ async def test_query_exception_propagation(self): await self.client.query_api().query("buckets()", "my-org") self.assertEqual("unauthorized access", e.value.message) + @async_test + async def test_write_exception_propagation(self): + await self.client.close() + self.client = InfluxDBClientAsync(url="http://localhost:8086", token="wrong", org="my-org") + + with pytest.raises(InfluxDBError) as e: + await self.client.write_api().write(bucket="my_bucket", + record="temperature,location=hic cels=") + self.assertEqual("unauthorized access", e.value.message) + headers = e.value.headers + self.assertIsNotNone(headers) + self.assertIsNotNone(headers.get("Content-Length")) + self.assertIsNotNone(headers.get("Date")) + self.assertIsNotNone(headers.get("X-Platform-Error-Code")) + self.assertIn("application/json", headers.get("Content-Type")) + self.assertTrue(re.compile("^v.*").match(headers.get("X-Influxdb-Version"))) + self.assertEqual("OSS", headers.get("X-Influxdb-Build")) + @async_test @aioresponses() async def test_parse_utf8_two_bytes_character(self, mocked): diff --git a/tests/test_WriteApi.py b/tests/test_WriteApi.py index 474bf394..b2cc7ca7 100644 --- a/tests/test_WriteApi.py +++ b/tests/test_WriteApi.py @@ -3,12 +3,16 @@ from __future__ import absolute_import import datetime +import json +import logging import os +import re import sys import unittest from collections import namedtuple from datetime import timedelta from multiprocessing.pool import ApplyResult +from types import SimpleNamespace import httpretty import pytest @@ -190,6 +194,17 @@ def test_write_error(self): self.assertEqual(400, exception.status) self.assertEqual("Bad Request", exception.reason) + # assert headers + self.assertIsNotNone(exception.headers) + self.assertIsNotNone(exception.headers.get("Content-Length")) + self.assertIsNotNone(exception.headers.get("Date")) + self.assertIsNotNone(exception.headers.get("X-Platform-Error-Code")) + self.assertIn("application/json", exception.headers.get("Content-Type")) + self.assertTrue(re.compile("^v.*").match(exception.headers.get("X-Influxdb-Version"))) + self.assertEqual("OSS", exception.headers.get("X-Influxdb-Build")) + # assert body + b = json.loads(exception.body, object_hook=lambda d: SimpleNamespace(**d)) + self.assertTrue(re.compile("^unable to parse.*invalid field format").match(b.message)) def test_write_dictionary(self): _bucket = self.create_test_bucket() @@ -609,6 +624,28 @@ def test_write_result(self): self.assertEqual(None, result.get()) self.delete_test_bucket(_bucket) + def test_write_error(self): + _bucket = self.create_test_bucket() + + _record = "h2o_feet,location=coyote_creek level\\ water_level=" + result = self.write_client.write(_bucket.name, self.org, _record) + + with self.assertRaises(ApiException) as cm: + result.get() + self.assertEqual(400, cm.exception.status) + self.assertEqual("Bad Request", cm.exception.reason) + # assert headers + self.assertIsNotNone(cm.exception.headers) + self.assertIsNotNone(cm.exception.headers.get("Content-Length")) + self.assertIsNotNone(cm.exception.headers.get("Date")) + self.assertIsNotNone(cm.exception.headers.get("X-Platform-Error-Code")) + self.assertIn("application/json", cm.exception.headers.get("Content-Type")) + self.assertTrue(re.compile("^v.*").match(cm.exception.headers.get("X-Influxdb-Version"))) + self.assertEqual("OSS", cm.exception.headers.get("X-Influxdb-Build")) + # assert body + b = json.loads(cm.exception.body, object_hook=lambda d: SimpleNamespace(**d)) + self.assertTrue(re.compile("^unable to parse.*missing field value").match(b.message)) + def test_write_dictionaries(self): bucket = self.create_test_bucket() From 63949b53efd8c19ad954dca82c0fd09fe26af1e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Bedn=C3=A1=C5=99?= Date: Mon, 2 Sep 2024 12:49:43 +0200 Subject: [PATCH 08/23] fix: add py.typed to the package definition (#667) --- CHANGELOG.md | 3 +++ setup.py | 1 + 2 files changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e22bf238..8006956a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ ## 1.46.0 [unreleased] +### Bug Fixes +1. [#667](https://github.com/influxdata/influxdb-client-python/pull/667): Missing `py.typed` in distribution package + ### Examples: 1. [#664](https://github.com/influxdata/influxdb-client-python/pull/664/): Multiprocessing example uses new source of data 1. [#665](https://github.com/influxdata/influxdb-client-python/pull/665): Shows how to leverage header fields in errors returned on write. diff --git a/setup.py b/setup.py index 5596713f..6d4a34cb 100644 --- a/setup.py +++ b/setup.py @@ -66,6 +66,7 @@ extras_require={'extra': extra_requires, 'ciso': ciso_requires, 'async': async_requires, 'test': test_requires}, long_description_content_type="text/markdown", packages=find_packages(exclude=('tests*',)), + package_data={'influxdb_client': ['py.typed']}, test_suite='tests', python_requires='>=3.7', include_package_data=True, From dfd815d92dd26817396b9594c79c6c6fd977bd36 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Fri, 13 Sep 2024 08:57:42 +0200 Subject: [PATCH 09/23] chore(release): release version 1.46.0 [skip CI] --- CHANGELOG.md | 2 +- influxdb_client/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8006956a..742634f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 1.46.0 [unreleased] +## 1.46.0 [2024-09-13] ### Bug Fixes 1. [#667](https://github.com/influxdata/influxdb-client-python/pull/667): Missing `py.typed` in distribution package diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 4a0a5c92..413edc0d 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.46.0dev0' +VERSION = '1.46.0' From 70ce8cb765ec742a96b665e6d41c97e7cd59ba8a Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Fri, 13 Sep 2024 09:01:22 +0200 Subject: [PATCH 10/23] chore(release): prepare for next development iteration --- CHANGELOG.md | 2 ++ conda/meta.yaml | 6 +++--- influxdb_client/version.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 742634f7..ebaccef3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +## 1.47.0 [unreleased] + ## 1.46.0 [2024-09-13] ### Bug Fixes diff --git a/conda/meta.yaml b/conda/meta.yaml index 0c626075..2595c51e 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.45.0" %} +{% set version = "1.46.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/71/cd/a016f327d0669074526b36ae7c1bb84760e3c0d29911f6e8e4046a217f32/influxdb_client-1.45.0.tar.gz - sha256: e24aa0a838f58487b2382c654fa8183fb5ca504af70438a42ca20dd79669a2be + url: https://files.pythonhosted.org/packages/53/9e/4bd499eff06eab47f7995178623d508703d2b4fedab1a3544b04ef06fb0c/influxdb_client-1.46.0.tar.gz + sha256: d5b5f3787db8ad75e64bf069fdc4d441e43b1a1d57f2c11082af309ef0b9722c build: number: 0 diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 413edc0d..76745307 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.46.0' +VERSION = '1.47.0dev0' From 3d70dbd66e434d5ac58e8d018e566d3089223db3 Mon Sep 17 00:00:00 2001 From: Vitaly Chait Date: Tue, 24 Sep 2024 15:24:49 +0300 Subject: [PATCH 11/23] fix: url attribute type validation (#672) --- CHANGELOG.md | 3 +++ influxdb_client/client/_base.py | 2 ++ tests/test_InfluxDBClient.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ebaccef3..6937e242 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ ## 1.47.0 [unreleased] +### Bug Fixes +1. [#672](https://github.com/influxdata/influxdb-client-python/pull/672): Adding type validation to url attribute in client object + ## 1.46.0 [2024-09-13] ### Bug Fixes diff --git a/influxdb_client/client/_base.py b/influxdb_client/client/_base.py index 8dcf75e9..d4f17901 100644 --- a/influxdb_client/client/_base.py +++ b/influxdb_client/client/_base.py @@ -53,6 +53,8 @@ def __init__(self, url, token, debug=None, timeout=10_000, enable_gzip=False, or self.default_tags = default_tags self.conf = _Configuration() + if not isinstance(self.url, str): + raise ValueError('"url" attribute is not str instance') if self.url.endswith("/"): self.conf.host = self.url[:-1] else: diff --git a/tests/test_InfluxDBClient.py b/tests/test_InfluxDBClient.py index 7fdf834f..228f391b 100644 --- a/tests/test_InfluxDBClient.py +++ b/tests/test_InfluxDBClient.py @@ -323,6 +323,35 @@ def test_version(self): version = self.client.version() self.assertTrue(len(version) > 0) + def test_url_attribute(self): + # Wrong URL attribute + wrong_types = [ + None, + True, False, + 123, 123.5, + dict({"url" : "http://localhost:8086"}), + list(["http://localhost:8086"]), + tuple(("http://localhost:8086")) + ] + correct_types = [ + "http://localhost:8086" + ] + for url_type in wrong_types: + try: + client_not_running = InfluxDBClient(url=url_type, token="my-token", debug=True) + status = True + except ValueError as e: + status = False + self.assertFalse(status) + for url_type in correct_types: + try: + client_not_running = InfluxDBClient(url=url_type, token="my-token", debug=True) + status = True + except ValueError as e: + status = False + self.assertTrue(status) + + def test_build(self): build = self.client.build() self.assertEqual('oss', build.lower()) From 28a4a048345f18d7dcc64ed85e5b9fe91899f0a9 Mon Sep 17 00:00:00 2001 From: Daniel <158782574+youarecode@users.noreply.github.com> Date: Tue, 8 Oct 2024 05:44:24 -0300 Subject: [PATCH 12/23] fix: type linting at client.flux_table.FluxTable (#677) --- CHANGELOG.md | 3 ++- influxdb_client/client/flux_table.py | 4 ++-- setup.py | 1 - 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6937e242..1511f9f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,8 @@ ## 1.47.0 [unreleased] ### Bug Fixes -1. [#672](https://github.com/influxdata/influxdb-client-python/pull/672): Adding type validation to url attribute in client object +1. [#672](https://github.com/influxdata/influxdb-client-python/pull/672): Add type validation to url attribute in client object +1. [#674](https://github.com/influxdata/influxdb-client-python/pull/674): Add type linting to client.flux_table.FluxTable, remove duplicated `from pathlib import Path` at setup.py ## 1.46.0 [2024-09-13] diff --git a/influxdb_client/client/flux_table.py b/influxdb_client/client/flux_table.py index 98a83159..5fd9a061 100644 --- a/influxdb_client/client/flux_table.py +++ b/influxdb_client/client/flux_table.py @@ -46,8 +46,8 @@ class FluxTable(FluxStructure): def __init__(self) -> None: """Initialize defaults.""" - self.columns = [] - self.records = [] + self.columns: List[FluxColumn] = [] + self.records: List[FluxRecord] = [] def get_group_key(self): """ diff --git a/setup.py b/setup.py index 6d4a34cb..cda0d087 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,6 @@ 'aiocsv>=1.2.2' ] -from pathlib import Path this_directory = Path(__file__).parent long_description = (this_directory / "README.md").read_text() From 7e01edbe6ce61e4c4bdd18addf8c3a64f32385a3 Mon Sep 17 00:00:00 2001 From: karel-rehor Date: Wed, 9 Oct 2024 09:57:10 +0200 Subject: [PATCH 13/23] fix: async write prec where DEFAULT_PRECISION should not be used (#675) * fix: (WIP) issue 669 write precision to default in async API * chore: fix lint issues * docs: update CHANGELOG.md * chore: improve indexing of range --- CHANGELOG.md | 6 +- influxdb_client/client/write_api_async.py | 27 ++-- tests/test_InfluxDBClientAsync.py | 149 ++++++++++++++++++++-- 3 files changed, 159 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1511f9f9..e7d08c81 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,10 @@ ## 1.47.0 [unreleased] ### Bug Fixes -1. [#672](https://github.com/influxdata/influxdb-client-python/pull/672): Add type validation to url attribute in client object -1. [#674](https://github.com/influxdata/influxdb-client-python/pull/674): Add type linting to client.flux_table.FluxTable, remove duplicated `from pathlib import Path` at setup.py + +1. [#672](https://github.com/influxdata/influxdb-client-python/pull/672): Adding type validation to url attribute in client object +2. [#674](https://github.com/influxdata/influxdb-client-python/pull/674): Add type linting to client.flux_table.FluxTable, remove duplicated `from pathlib import Path` at setup.py +3. [#675](https://github.com/influxdata/influxdb-client-python/pull/675): Ensures WritePrecision in Point is preferred to `DEFAULT_PRECISION` ## 1.46.0 [2024-09-13] diff --git a/influxdb_client/client/write_api_async.py b/influxdb_client/client/write_api_async.py index e9e2018b..38937eca 100644 --- a/influxdb_client/client/write_api_async.py +++ b/influxdb_client/client/write_api_async.py @@ -1,5 +1,6 @@ """Collect and async write time series data to InfluxDB Cloud or InfluxDB OSS.""" import logging +from asyncio import ensure_future, gather from collections import defaultdict from typing import Union, Iterable, NamedTuple @@ -114,12 +115,20 @@ async def write(self, bucket: str, org: str = None, self._append_default_tags(record) payloads = defaultdict(list) - self._serialize(record, write_precision, payloads, precision_from_point=False, **kwargs) - - # joint list by \n - body = b'\n'.join(payloads[write_precision]) - response = await self._write_service.post_write_async(org=org, bucket=bucket, body=body, - precision=write_precision, async_req=False, - _return_http_data_only=False, - content_type="text/plain; charset=utf-8") - return response[1] in (201, 204) + self._serialize(record, write_precision, payloads, precision_from_point=True, **kwargs) + + futures = [] + for payload_precision, payload_line in payloads.items(): + futures.append(ensure_future + (self._write_service.post_write_async(org=org, bucket=bucket, + body=b'\n'.join(payload_line), + precision=payload_precision, async_req=False, + _return_http_data_only=False, + content_type="text/plain; charset=utf-8"))) + + results = await gather(*futures, return_exceptions=True) + for result in results: + if isinstance(result, Exception): + raise result + + return False not in [re[1] in (201, 204) for re in results] diff --git a/tests/test_InfluxDBClientAsync.py b/tests/test_InfluxDBClientAsync.py index 7f8c6214..cb0586b9 100644 --- a/tests/test_InfluxDBClientAsync.py +++ b/tests/test_InfluxDBClientAsync.py @@ -1,11 +1,15 @@ import asyncio +import dateutil.parser import logging +import math import re +import time import unittest import os from datetime import datetime, timezone from io import StringIO +import pandas import pytest import warnings from aioresponses import aioresponses @@ -199,30 +203,151 @@ async def test_write_empty_data(self): self.assertEqual(True, response) + def gen_fractional_utc(self, nano, precision) -> str: + raw_sec = nano / 1_000_000_000 + if precision == WritePrecision.NS: + rem = f"{nano % 1_000_000_000}".rjust(9,"0").rstrip("0") + return (datetime.fromtimestamp(math.floor(raw_sec), tz=timezone.utc) + .isoformat() + .replace("+00:00", "") + f".{rem}Z") + #f".{rem}Z")) + elif precision == WritePrecision.US: + # rem = f"{round(nano / 1_000) % 1_000_000}"#.ljust(6,"0") + return (datetime.fromtimestamp(round(raw_sec,6), tz=timezone.utc) + .isoformat() + .replace("+00:00","") + .strip("0") + "Z" + ) + elif precision == WritePrecision.MS: + #rem = f"{round(nano / 1_000_000) % 1_000}".rjust(3, "0") + return (datetime.fromtimestamp(round(raw_sec,3), tz=timezone.utc) + .isoformat() + .replace("+00:00","") + .strip("0") + "Z" + ) + elif precision == WritePrecision.S: + return (datetime.fromtimestamp(round(raw_sec), tz=timezone.utc) + .isoformat() + .replace("+00:00","Z")) + else: + raise ValueError(f"Unknown precision: {precision}") + + @async_test async def test_write_points_different_precision(self): + now_ns = time.time_ns() + now_us = now_ns / 1_000 + now_ms = now_us / 1_000 + now_s = now_ms / 1_000 + + now_date_s = self.gen_fractional_utc(now_ns, WritePrecision.S) + now_date_ms = self.gen_fractional_utc(now_ns, WritePrecision.MS) + now_date_us = self.gen_fractional_utc(now_ns, WritePrecision.US) + now_date_ns = self.gen_fractional_utc(now_ns, WritePrecision.NS) + + points = { + WritePrecision.S: [], + WritePrecision.MS: [], + WritePrecision.US: [], + WritePrecision.NS: [] + } + + expected = {} + measurement = generate_name("measurement") - _point1 = Point(measurement).tag("location", "Prague").field("temperature", 25.3) \ - .time(datetime.fromtimestamp(0, tz=timezone.utc), write_precision=WritePrecision.S) - _point2 = Point(measurement).tag("location", "New York").field("temperature", 24.3) \ - .time(datetime.fromtimestamp(1, tz=timezone.utc), write_precision=WritePrecision.MS) - _point3 = Point(measurement).tag("location", "Berlin").field("temperature", 24.3) \ - .time(datetime.fromtimestamp(2, tz=timezone.utc), write_precision=WritePrecision.NS) - await self.client.write_api().write(bucket="my-bucket", record=[_point1, _point2, _point3], + # basic date-time value + points[WritePrecision.S].append(Point(measurement).tag("method", "SecDateTime").field("temperature", 25.3) \ + .time(datetime.fromtimestamp(round(now_s), tz=timezone.utc), write_precision=WritePrecision.S)) + expected['SecDateTime'] = now_date_s + points[WritePrecision.MS].append(Point(measurement).tag("method", "MilDateTime").field("temperature", 24.3) \ + .time(datetime.fromtimestamp(round(now_s,3), tz=timezone.utc), write_precision=WritePrecision.MS)) + expected['MilDateTime'] = now_date_ms + points[WritePrecision.US].append(Point(measurement).tag("method", "MicDateTime").field("temperature", 24.3) \ + .time(datetime.fromtimestamp(round(now_s,6), tz=timezone.utc), write_precision=WritePrecision.US)) + expected['MicDateTime'] = now_date_us + # N.B. datetime does not handle nanoseconds +# points[WritePrecision.NS].append(Point(measurement).tag("method", "NanDateTime").field("temperature", 24.3) \ +# .time(datetime.fromtimestamp(now_s, tz=timezone.utc), write_precision=WritePrecision.NS)) + + # long timestamps based on POSIX time + points[WritePrecision.S].append(Point(measurement).tag("method", "SecPosix").field("temperature", 24.3) \ + .time(round(now_s), write_precision=WritePrecision.S)) + expected['SecPosix'] = now_date_s + points[WritePrecision.MS].append(Point(measurement).tag("method", "MilPosix").field("temperature", 24.3) \ + .time(round(now_ms), write_precision=WritePrecision.MS)) + expected['MilPosix'] = now_date_ms + points[WritePrecision.US].append(Point(measurement).tag("method", "MicPosix").field("temperature", 24.3) \ + .time(round(now_us), write_precision=WritePrecision.US)) + expected['MicPosix'] = now_date_us + points[WritePrecision.NS].append(Point(measurement).tag("method", "NanPosix").field("temperature", 24.3) \ + .time(now_ns, write_precision=WritePrecision.NS)) + expected['NanPosix'] = now_date_ns + + # ISO Zulu datetime with ms, us and ns e.g. "2024-09-27T13:17:16.412399728Z" + points[WritePrecision.S].append(Point(measurement).tag("method", "SecDTZulu").field("temperature", 24.3) \ + .time(now_date_s, write_precision=WritePrecision.S)) + expected['SecDTZulu'] = now_date_s + points[WritePrecision.MS].append(Point(measurement).tag("method", "MilDTZulu").field("temperature", 24.3) \ + .time(now_date_ms, write_precision=WritePrecision.MS)) + expected['MilDTZulu'] = now_date_ms + points[WritePrecision.US].append(Point(measurement).tag("method", "MicDTZulu").field("temperature", 24.3) \ + .time(now_date_us, write_precision=WritePrecision.US)) + expected['MicDTZulu'] = now_date_us + # This keeps resulting in micro second resolution in response +# points[WritePrecision.NS].append(Point(measurement).tag("method", "NanDTZulu").field("temperature", 24.3) \ +# .time(now_date_ns, write_precision=WritePrecision.NS)) + + recs = [x for x in [v for v in points.values()]] + + await self.client.write_api().write(bucket="my-bucket", record=recs, write_precision=WritePrecision.NS) query = f''' from(bucket:"my-bucket") |> range(start: 0) |> filter(fn: (r) => r["_measurement"] == "{measurement}") - |> keep(columns: ["_time"]) + |> keep(columns: ["method","_time"]) ''' query_api = self.client.query_api() + # ensure calls fully processed on server + await asyncio.sleep(1) + raw = await query_api.query_raw(query) - self.assertEqual(8, len(raw.splitlines())) - self.assertEqual(',,0,1970-01-01T00:00:02Z', raw.splitlines()[4]) - self.assertEqual(',,0,1970-01-01T00:00:01Z', raw.splitlines()[5]) - self.assertEqual(',,0,1970-01-01T00:00:00Z', raw.splitlines()[6]) + linesRaw = raw.splitlines()[4:] + + lines = [] + for lnr in linesRaw: + lines.append(lnr[2:].split(",")) + + def get_time_for_method(lines, method): + for l in lines: + if l[2] == method: + return l[1] + return "" + + self.assertEqual(15, len(raw.splitlines())) + + for key in expected: + t = get_time_for_method(lines,key) + comp_time = dateutil.parser.isoparse(get_time_for_method(lines,key)) + target_time = dateutil.parser.isoparse(expected[key]) + self.assertEqual(target_time.date(), comp_time.date()) + self.assertEqual(target_time.hour, comp_time.hour) + self.assertEqual(target_time.second,comp_time.second) + dif = abs(target_time.microsecond - comp_time.microsecond) + if key[:3] == "Sec": + # Already tested + pass + elif key[:3] == "Mil": + # may be slight rounding differences + self.assertLess(dif, 1500, f"failed to match timestamp for {key} {target_time} != {comp_time}") + elif key[:3] == "Mic": + # may be slight rounding differences + self.assertLess(dif, 150, f"failed to match timestamp for {key} {target_time} != {comp_time}") + elif key[:3] == "Nan": + self.assertEqual(expected[key], get_time_for_method(lines, key)) + else: + raise Exception(f"Unhandled key {key}") @async_test async def test_delete_api(self): From 06b71146b20d2f3e7d40eac4fe5d2d81e4b02c62 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Tue, 22 Oct 2024 07:56:31 +0200 Subject: [PATCH 14/23] chore(release): release version 1.47.0 [skip CI] --- CHANGELOG.md | 2 +- influxdb_client/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e7d08c81..727c6ea1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 1.47.0 [unreleased] +## 1.47.0 [2024-10-22] ### Bug Fixes diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 76745307..19339327 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.47.0dev0' +VERSION = '1.47.0' From 012c50aae5a72160dfe381f4002c40934a4ab880 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Tue, 22 Oct 2024 08:02:30 +0200 Subject: [PATCH 15/23] chore(release): prepare for next development iteration --- CHANGELOG.md | 2 ++ conda/meta.yaml | 6 +++--- influxdb_client/version.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 727c6ea1..c7e0c837 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +## 1.48.0 [unreleased] + ## 1.47.0 [2024-10-22] ### Bug Fixes diff --git a/conda/meta.yaml b/conda/meta.yaml index 2595c51e..c7598938 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.46.0" %} +{% set version = "1.47.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/53/9e/4bd499eff06eab47f7995178623d508703d2b4fedab1a3544b04ef06fb0c/influxdb_client-1.46.0.tar.gz - sha256: d5b5f3787db8ad75e64bf069fdc4d441e43b1a1d57f2c11082af309ef0b9722c + url: https://files.pythonhosted.org/packages/f0/d7/07b6d9c02b975ba7961427af5a40c910871a97f543b4f5762112084cea48/influxdb_client-1.47.0.tar.gz + sha256: 549f2c0ad458bbf79de1291ad5b07b823d80a3bcdbe77b4f0b436461aa008e2b build: number: 0 diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 19339327..0cde499d 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.47.0' +VERSION = '1.48.0dev0' From 44dd00eef8da9416ff9eebf6cb536dfbc4bcc5dc Mon Sep 17 00:00:00 2001 From: karel-rehor Date: Mon, 25 Nov 2024 14:49:34 +0100 Subject: [PATCH 16/23] fix: catch CancelledError and TimeoutError and add note about timeout (#679) * chore: add explanatory note to CancelledError. * docs: update CHANGELOG.md and README.md * chore: remove import of asyncio in non-async API --- CHANGELOG.md | 4 ++++ README.md | 7 +++++++ influxdb_client/client/flux_csv_parser.py | 6 +++++- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c7e0c837..35a88ac3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ ## 1.48.0 [unreleased] +### Bug Fixes + +1. [#679](https://github.com/influxdata/influxdb-client-python/pull/679): Add note to caught errors about need to check client timeout. + ## 1.47.0 [2024-10-22] ### Bug Fixes diff --git a/README.md b/README.md index ef4eff86..5b541dcf 100644 --- a/README.md +++ b/README.md @@ -1313,6 +1313,13 @@ All async APIs are available via `influxdb_client.client.influxdb_client_async.I and also check to readiness of the InfluxDB via `/ping` endpoint: +The `InfluxDBClientAsync` constructor accepts a number of __configuration properties__. Most useful among these are: + +* `connection_pool_maxsize` - The total number of simultaneous connections. Defaults to `multiprocessing.cpu_count() * 5`. +* `enable_gzip` - enable gzip compression during `write` and `query` calls. Defaults to `false`. +* `proxy` - URL of an HTTP proxy to be used. +* `timeout` - The maximum number of milliseconds for handling HTTP requests from initial handshake to handling response data. This is passed directly to the underlying transport library. If large amounts of data are anticipated, for example from `query_api.query_stream(...)`, this should be increased to avoid `TimeoutError` or `CancelledError`. Defaults to 10_000 ms. + > ``` python > import asyncio > diff --git a/influxdb_client/client/flux_csv_parser.py b/influxdb_client/client/flux_csv_parser.py index 7a73e3f8..99e68094 100644 --- a/influxdb_client/client/flux_csv_parser.py +++ b/influxdb_client/client/flux_csv_parser.py @@ -1,6 +1,5 @@ """Parsing response from InfluxDB to FluxStructures or DataFrame.""" - import base64 import codecs import csv as csv_parser @@ -147,6 +146,11 @@ async def _parse_flux_response_async(self): df = self._prepare_data_frame() if not self._is_profiler_table(metadata.table): yield df + except BaseException as e: + e_type = type(e).__name__ + if "CancelledError" in e_type or "TimeoutError" in e_type: + e.add_note("Stream cancelled during read. Recommended: Check Influxdb client `timeout` setting.") + raise finally: self._close() From c8d806f1bb3357dec5bf62b0fe2eb84e1789b485 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Wed, 27 Nov 2024 09:24:29 +0100 Subject: [PATCH 17/23] chore(release): release version 1.48.0 [skip CI] --- CHANGELOG.md | 2 +- influxdb_client/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 35a88ac3..7b8b3c58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 1.48.0 [unreleased] +## 1.48.0 [2024-11-27] ### Bug Fixes diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 0cde499d..2008b9ce 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.48.0dev0' +VERSION = '1.48.0' From 2d0adb9f1b73587643bd49f4eb323e7cd413d1c1 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Wed, 27 Nov 2024 09:29:29 +0100 Subject: [PATCH 18/23] chore(release): prepare for next development iteration --- CHANGELOG.md | 2 ++ conda/meta.yaml | 6 +++--- influxdb_client/version.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b8b3c58..ffb5f768 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +## 1.49.0 [unreleased] + ## 1.48.0 [2024-11-27] ### Bug Fixes diff --git a/conda/meta.yaml b/conda/meta.yaml index c7598938..af5027bd 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.47.0" %} +{% set version = "1.48.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/f0/d7/07b6d9c02b975ba7961427af5a40c910871a97f543b4f5762112084cea48/influxdb_client-1.47.0.tar.gz - sha256: 549f2c0ad458bbf79de1291ad5b07b823d80a3bcdbe77b4f0b436461aa008e2b + url: https://files.pythonhosted.org/packages/11/47/b756380917cb4b968bd871fc006128e2cc9897fb1ab4bcf7d108f9601e78/influxdb_client-1.48.0.tar.gz + sha256: 414d5b5eff7d2b6b453f33e2826ea9872ea04a11996ba9c8604b0c1df57c8559 build: number: 0 diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 2008b9ce..a4ac1780 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.48.0' +VERSION = '1.49.0dev0' From ab16384b7e9931da8b74f9a19af89939c0a3b673 Mon Sep 17 00:00:00 2001 From: karel-rehor Date: Wed, 4 Dec 2024 11:19:19 +0100 Subject: [PATCH 19/23] chore: add type checks for Authorization with new example. (#682) * chore: add type checks for Authorization with new example. * docs: update CHANGELOG.md and examples/README.md --- CHANGELOG.md | 8 ++ examples/README.md | 1 + examples/authorizations.py | 103 +++++++++++++++++++ influxdb_client/client/authorizations_api.py | 4 +- influxdb_client/domain/authorization.py | 4 + tests/test_AuthorizationApi.py | 19 ++++ 6 files changed, 138 insertions(+), 1 deletion(-) create mode 100644 examples/authorizations.py diff --git a/CHANGELOG.md b/CHANGELOG.md index ffb5f768..3470d909 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ ## 1.49.0 [unreleased] +### Bug Fixes + +1. [#682](https://github.com/influxdata/influxdb-client-python/pull/682): Check core types when creating Authentication instances. + +### Examples + +1. [#682](https://github.com/influxdata/influxdb-client-python/pull/682): New example for working with Authentication API. + ## 1.48.0 [2024-11-27] ### Bug Fixes diff --git a/examples/README.md b/examples/README.md index 2b42ffd7..7d3a5eea 100644 --- a/examples/README.md +++ b/examples/README.md @@ -28,6 +28,7 @@ - [monitoring_and_alerting.py](monitoring_and_alerting.py) - How to create the Check with Slack notification. - [task_example.py](task_example.py) - How to create a Task by API - [templates_management.py](templates_management.py) - How to use Templates and Stack API +- [authorizations.py](authorizations.py) - How to create and use authorizations. ## InfluxDB Cloud diff --git a/examples/authorizations.py b/examples/authorizations.py new file mode 100644 index 00000000..5857f624 --- /dev/null +++ b/examples/authorizations.py @@ -0,0 +1,103 @@ +import os + +from influxdb_client import InfluxDBClient, BucketRetentionRules, PermissionResource, Permission, Authorization, \ + WriteOptions +from influxdb_client.client.write_api import WriteType +from influxdb_client.rest import ApiException + +HOST_URL = os.environ.get("INFLUX_HOST") if os.environ.get("INFLUX_HOST") is not None else "http://localhost:8086" +TOKEN = os.environ.get("INFLUX_TOKEN") if os.environ.get("INFLUX_TOKEN") is not None else "my-token" +ORG = os.environ.get("INFLUX_ORG") if os.environ.get("INFLUX_ORG") is not None else "my-org" +SYS_BUCKET = os.environ.get("INFLUX_DB") if os.environ.get("INFLUX_DB") is not None else "my-bucket" +BUCKET = "special-bucket" + + +def create_auths(): + # Create authorizations with an initial client using all-access permissions + with InfluxDBClient(url=HOST_URL, token=TOKEN, org=ORG, debug=False) as globalClient: + bucket_rules = BucketRetentionRules(type="expire", every_seconds=3600) + bucket = globalClient.buckets_api().create_bucket(bucket_name=BUCKET, + retention_rules=bucket_rules, + org=ORG) + + bucket_permission_resource_r = PermissionResource(org=ORG, + org_id=bucket.org_id, + type="buckets", + id=bucket.id) + bucket_permission_resource_w = PermissionResource(org=ORG, + org_id=bucket.org_id, + type="buckets", + id=bucket.id) + read_bucket = Permission(action="read", resource=bucket_permission_resource_r) + write_bucket = Permission(action="write", resource=bucket_permission_resource_w) + permissions = [read_bucket, write_bucket] + auth_payload = Authorization(org_id=bucket.org_id, + permissions=permissions, + description="Shared bucket auth from Authorization object", + id="auth1_base") + auth_api = globalClient.authorizations_api() + # use keyword arguments + auth1 = auth_api.create_authorization(authorization=auth_payload) + # or use positional arguments + auth2 = auth_api.create_authorization(bucket.org_id, permissions) + + return auth1, auth2 + + +def try_sys_bucket(client): + print("starting to write") + + w_api = client.write_api(write_options=WriteOptions(write_type=WriteType.synchronous)) + try: + w_api.write(bucket=SYS_BUCKET, record="cpu,host=r2d2 use=3.14") + except ApiException as ae: + print(f"Write to {SYS_BUCKET} failed (as expected) due to:") + print(ae) + + +def try_restricted_bucket(client): + print("starting to write") + w_api = client.write_api(write_options=WriteOptions(write_type=WriteType.synchronous)) + + w_api.write(bucket=BUCKET, record="cpu,host=r2d2 usage=3.14") + print("written") + print("now query") + q_api = client.query_api() + query = f''' + from(bucket:"{BUCKET}") + |> range(start: -5m) + |> filter(fn: (r) => r["_measurement"] == "cpu")''' + + tables = q_api.query(query=query, org=ORG) + for table in tables: + for record in table.records: + print(record["_time"].isoformat(sep="T") + " | " + record["host"] + " | " + record["_field"] + "=" + str(record["_value"])) + + +def main(): + """ + a1 is generated using a local Authorization instance + a2 is generated using local permissions and an internally created Authorization + :return: void + """ + print("=== Setting up authorizations ===") + a1, a2 = create_auths() + + print("=== Using a1 authorization ===") + client1 = InfluxDBClient(url=HOST_URL, token=a1.token, org=ORG, debug=False) + print(" --- Try System Bucket ---") + try_sys_bucket(client1) + print(" --- Try Special Bucket ---") + try_restricted_bucket(client1) + print() + + print("=== Using a2 authorization ===") + client2 = InfluxDBClient(url=HOST_URL, token=a2.token, org=ORG, debug=False) + print(" --- Try System Bucket ---") + try_sys_bucket(client2) + print(" --- Try Special Bucket ---") + try_restricted_bucket(client2) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/influxdb_client/client/authorizations_api.py b/influxdb_client/client/authorizations_api.py index b7179b62..05be6ecd 100644 --- a/influxdb_client/client/authorizations_api.py +++ b/influxdb_client/client/authorizations_api.py @@ -11,7 +11,7 @@ def __init__(self, influxdb_client): self._influxdb_client = influxdb_client self._authorizations_service = AuthorizationsService(influxdb_client.api_client) - def create_authorization(self, org_id=None, permissions: list = None, + def create_authorization(self, org_id: str = None, permissions: list = None, authorization: Authorization = None) -> Authorization: """ Create an authorization. @@ -23,6 +23,8 @@ def create_authorization(self, org_id=None, permissions: list = None, """ if authorization is not None: + if not isinstance(authorization, Authorization): + raise TypeError(f"Attempt to use non-Authorization value for authorization: {authorization}") return self._authorizations_service.post_authorizations(authorization_post_request=authorization) # if org_id is not None and permissions is not None: diff --git a/influxdb_client/domain/authorization.py b/influxdb_client/domain/authorization.py index 67a0bfd3..aef38d9c 100644 --- a/influxdb_client/domain/authorization.py +++ b/influxdb_client/domain/authorization.py @@ -82,8 +82,12 @@ def __init__(self, created_at=None, updated_at=None, org_id=None, permissions=No if updated_at is not None: self.updated_at = updated_at if org_id is not None: + if not isinstance(org_id, str): + raise TypeError("org_id must be a string.") self.org_id = org_id if permissions is not None: + if not isinstance(permissions, list): + raise TypeError("permissions must be a list.") self.permissions = permissions if id is not None: self.id = id diff --git a/tests/test_AuthorizationApi.py b/tests/test_AuthorizationApi.py index 8b1850d9..036f0d60 100644 --- a/tests/test_AuthorizationApi.py +++ b/tests/test_AuthorizationApi.py @@ -45,6 +45,25 @@ def test_createAuthorization(self): self.assertEqual(authorization.links["user"], "/api/v2/users/" + self.user.id) + def test_AuthorizationTypeAssert(self): + self.assertRaisesRegex(TypeError, "org_id must be a string.", Authorization, org_id={}) + self.assertRaisesRegex(TypeError, "permissions must be a list.", Authorization, permissions={}) + + def test_createAuthorizationWrongTypes(self): + user_resource = PermissionResource(org_id=self.organization.id, type="users") + read_users = Permission(action="read", resource=user_resource) + + org_resource = PermissionResource(org_id=self.organization.id, type="orgs") + write_organizations = Permission(action="write", resource=org_resource) + + permissions = [read_users, write_organizations] + self.assertRaisesRegex(TypeError, "org_id must be a string.", + self.authorizations_api.create_authorization, permissions) + self.assertRaisesRegex(TypeError, "permissions must be a list", + self.authorizations_api.create_authorization, "123456789ABCDEF0", "Foo") + self.assertRaisesRegex(TypeError, "Attempt to use non-Authorization value for authorization: Foo", + self.authorizations_api.create_authorization, "123456789ABCDEF0", permissions, "Foo") + def test_authorizationDescription(self): organization = self.my_organization From 74013566a9df3e41dc1ef67cda0cbd0f6b83c733 Mon Sep 17 00:00:00 2001 From: Daniel O'Connor Date: Sat, 15 Feb 2025 18:48:39 +1030 Subject: [PATCH 20/23] docs: minor docstring typo/grammar correction (#687) --- influxdb_client/client/influxdb_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb_client/client/influxdb_client.py b/influxdb_client/client/influxdb_client.py index 6079aac0..cbae75a9 100644 --- a/influxdb_client/client/influxdb_client.py +++ b/influxdb_client/client/influxdb_client.py @@ -265,7 +265,7 @@ def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): :param write_options: Write API configuration :param point_settings: settings to store default tags - :key success_callback: The callable ``callback`` to run after successfully writen a batch. + :key success_callback: The callable ``callback`` to run after having successfully written a batch. The callable must accept two arguments: - `Tuple`: ``(bucket, organization, precision)`` @@ -273,7 +273,7 @@ def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): **[batching mode]** - :key error_callback: The callable ``callback`` to run after unsuccessfully writen a batch. + :key error_callback: The callable ``callback`` to run after having unsuccessfully written a batch. The callable must accept three arguments: - `Tuple`: ``(bucket, organization, precision)`` From 9001fea11a6908d2f0590fde58cf9caa459bcb76 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Thu, 22 May 2025 11:28:21 +0200 Subject: [PATCH 21/23] chore(release): release version 1.49.0 [skip CI] --- CHANGELOG.md | 2 +- influxdb_client/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3470d909..9eea3a41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 1.49.0 [unreleased] +## 1.49.0 [2025-05-22] ### Bug Fixes diff --git a/influxdb_client/version.py b/influxdb_client/version.py index a4ac1780..9c626c0c 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.49.0dev0' +VERSION = '1.49.0' From 4ce3746cffbcad51cd2a16214b47b4579f86333e Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Thu, 22 May 2025 13:52:04 +0200 Subject: [PATCH 22/23] chore(release): prepare for next development iteration --- CHANGELOG.md | 2 ++ conda/meta.yaml | 6 +++--- influxdb_client/version.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9eea3a41..af872d37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +## 1.50.0 [unreleased] + ## 1.49.0 [2025-05-22] ### Bug Fixes diff --git a/conda/meta.yaml b/conda/meta.yaml index af5027bd..33a0c26c 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.48.0" %} +{% set version = "1.49.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/11/47/b756380917cb4b968bd871fc006128e2cc9897fb1ab4bcf7d108f9601e78/influxdb_client-1.48.0.tar.gz - sha256: 414d5b5eff7d2b6b453f33e2826ea9872ea04a11996ba9c8604b0c1df57c8559 + url: https://files.pythonhosted.org/packages/2a/f3/9c418215cf399529175ed5b198d15a21c2e29f28d90932107634b375c9ee/influxdb_client-1.49.0.tar.gz + sha256: 4a53a218adef6ac9458bfbd31fa08c76194f70310c6b4e01f53d804bd2c48e03 build: number: 0 diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 9c626c0c..03ca288e 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.49.0' +VERSION = '1.50.0dev0' From feb97eef067013881e798b322f90a83e27d07366 Mon Sep 17 00:00:00 2001 From: sonnh <46211823+NguyenHoangSon96@users.noreply.github.com> Date: Tue, 3 Jun 2025 20:12:37 +0700 Subject: [PATCH 23/23] feat: move setuptools to build dependency (#696) --- CHANGELOG.md | 4 ++++ pyproject.toml | 3 +++ setup.py | 1 - 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 pyproject.toml diff --git a/CHANGELOG.md b/CHANGELOG.md index af872d37..3da5e8ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ ## 1.50.0 [unreleased] +### Features + +1. [696](https://github.com/influxdata/influxdb-client-python/pull/696): Move "setuptools" package to build dependency. + ## 1.49.0 [2025-05-22] ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..20c12656 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools>=21.0.0"] +build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/setup.py b/setup.py index cda0d087..76c2748c 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,6 @@ 'reactivex >= 4.0.4', 'certifi >= 14.05.14', 'python_dateutil >= 2.5.3', - 'setuptools >= 21.0.0', 'urllib3 >= 1.26.0' ] pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy