documentation".
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index fdda62a9..841ad8b1 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -31,3 +31,9 @@ Tutorials - UDP
.. literalinclude:: ../../examples/tutorial_udp.py
:language: python
+
+Tutorials - Authorization by Token
+==================================
+
+.. literalinclude:: ../../examples/tutorial_authorization.py
+ :language: python
diff --git a/examples/tutorial_authorization.py b/examples/tutorial_authorization.py
new file mode 100644
index 00000000..9d9a800f
--- /dev/null
+++ b/examples/tutorial_authorization.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+"""Tutorial how to authorize InfluxDB client by custom Authorization token."""
+
+import argparse
+from influxdb import InfluxDBClient
+
+
+def main(token='my-token'):
+ """Instantiate a connection to the InfluxDB."""
+ client = InfluxDBClient(username=None, password=None,
+ headers={"Authorization": token})
+
+ print("Use authorization token: " + token)
+
+ version = client.ping()
+ print("Successfully connected to InfluxDB: " + version)
+ pass
+
+
+def parse_args():
+ """Parse the args from main."""
+ parser = argparse.ArgumentParser(
+ description='example code to play with InfluxDB')
+ parser.add_argument('--token', type=str, required=False,
+ default='my-token',
+ help='Authorization token for the proxy that is ahead the InfluxDB.')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(token=args.token)
diff --git a/influxdb/__init__.py b/influxdb/__init__.py
index 56f2f619..e66f80ea 100644
--- a/influxdb/__init__.py
+++ b/influxdb/__init__.py
@@ -18,4 +18,4 @@
]
-__version__ = '5.3.0'
+__version__ = '5.3.2'
diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py
index ec58cebb..907db2cb 100644
--- a/influxdb/_dataframe_client.py
+++ b/influxdb/_dataframe_client.py
@@ -152,7 +152,8 @@ def query(self,
chunked=False,
chunk_size=0,
method="GET",
- dropna=True):
+ dropna=True,
+ data_frame_index=None):
"""
Query data into a DataFrame.
@@ -181,6 +182,8 @@ def query(self,
containing all results within that chunk
:param chunk_size: Size of each chunk to tell InfluxDB to use.
:param dropna: drop columns where all values are missing
+ :param data_frame_index: the list of columns that
+ are used as DataFrame index
:returns: the queried data
:rtype: :class:`~.ResultSet`
"""
@@ -196,13 +199,14 @@ def query(self,
results = super(DataFrameClient, self).query(query, **query_args)
if query.strip().upper().startswith("SELECT"):
if len(results) > 0:
- return self._to_dataframe(results, dropna)
+ return self._to_dataframe(results, dropna,
+ data_frame_index=data_frame_index)
else:
return {}
else:
return results
- def _to_dataframe(self, rs, dropna=True):
+ def _to_dataframe(self, rs, dropna=True, data_frame_index=None):
result = defaultdict(list)
if isinstance(rs, list):
return map(self._to_dataframe, rs,
@@ -216,10 +220,15 @@ def _to_dataframe(self, rs, dropna=True):
key = (name, tuple(sorted(tags.items())))
df = pd.DataFrame(data)
df.time = pd.to_datetime(df.time)
- df.set_index('time', inplace=True)
- if df.index.tzinfo is None:
- df.index = df.index.tz_localize('UTC')
- df.index.name = None
+
+ if data_frame_index:
+ df.set_index(data_frame_index, inplace=True)
+ else:
+ df.set_index('time', inplace=True)
+ if df.index.tzinfo is None:
+ df.index = df.index.tz_localize('UTC')
+ df.index.name = None
+
result[key].append(df)
for key, data in result.items():
df = pd.concat(data).sort_index()
@@ -363,10 +372,10 @@ def _convert_dataframe_to_lines(self,
# Make array of timestamp ints
if isinstance(dataframe.index, pd.PeriodIndex):
- time = ((dataframe.index.to_timestamp().values.astype(np.int64) /
+ time = ((dataframe.index.to_timestamp().values.astype(np.int64) //
precision_factor).astype(np.int64).astype(str))
else:
- time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) /
+ time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) //
precision_factor).astype(np.int64).astype(str))
# If tag columns exist, make an array of formatted tag keys and values
diff --git a/influxdb/client.py b/influxdb/client.py
index 404e14be..c535a3f1 100644
--- a/influxdb/client.py
+++ b/influxdb/client.py
@@ -20,6 +20,7 @@
import msgpack
import requests
import requests.exceptions
+from requests.adapters import HTTPAdapter
from six.moves.urllib.parse import urlparse
from influxdb.line_protocol import make_lines, quote_ident, quote_literal
@@ -87,6 +88,11 @@ class InfluxDBClient(object):
:param headers: headers to add to Requests, will add 'Content-Type'
and 'Accept' unless these are already present, defaults to {}
:type headers: dict
+ :param socket_options: use custom tcp socket options,
+ If not specified, then defaults are loaded from
+ ``HTTPConnection.default_socket_options``
+ :type socket_options: list
+
:raises ValueError: if cert is provided but ssl is disabled (set to False)
"""
@@ -109,6 +115,7 @@ def __init__(self,
gzip=False,
session=None,
headers=None,
+ socket_options=None,
):
"""Construct a new InfluxDBClient object."""
self.__host = host
@@ -128,9 +135,10 @@ def __init__(self,
session = requests.Session()
self._session = session
- adapter = requests.adapters.HTTPAdapter(
+ adapter = _SocketOptionsAdapter(
pool_connections=int(pool_size),
- pool_maxsize=int(pool_size)
+ pool_maxsize=int(pool_size),
+ socket_options=socket_options
)
if use_udp:
@@ -179,7 +187,7 @@ def __init__(self,
def __enter__(self):
"""Enter function as used by context manager."""
- pass
+ return self
def __exit__(self, _exc_type, _exc_value, _traceback):
"""Exit function as used by context manager."""
@@ -328,10 +336,14 @@ def request(self, url, method='GET', params=None, data=None, stream=False,
_try = 0
while retry:
try:
+ if "Authorization" in headers:
+ auth = (None, None)
+ else:
+ auth = (self._username, self._password)
response = self._session.request(
method=method,
url=url,
- auth=(self._username, self._password),
+ auth=auth if None not in auth else None,
params=params,
data=data,
stream=stream,
@@ -383,7 +395,7 @@ def write(self, data, params=None, expected_response_code=204,
:param data: the data to be written
:type data: (if protocol is 'json') dict
(if protocol is 'line') sequence of line protocol strings
- or single string
+ or single string
:param params: additional parameters for the request, defaults to None
:type params: dict
:param expected_response_code: the expected response code of the write
@@ -559,8 +571,9 @@ def write_points(self,
:param points: the list of points to be written in the database
:type points: list of dictionaries, each dictionary represents a point
:type points: (if protocol is 'json') list of dicts, where each dict
- represents a point.
- (if protocol is 'line') sequence of line protocol strings.
+ represents a point.
+ (if protocol is 'line') sequence of line protocol strings.
+
:param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
:type time_precision: str
:param database: the database to write the points to. Defaults to
@@ -625,7 +638,7 @@ def _batches(iterable, size):
# http://code.activestate.com/recipes/303279-getting-items-in-batches/
iterator = iter(iterable)
while True:
- try: # Try get the first element in the iterator...
+ try: # Try get the first element in the iterator...
head = (next(iterator),)
except StopIteration:
return # ...so that we can stop if there isn't one
@@ -860,7 +873,7 @@ def alter_retention_policy(self, name, database=None,
query_string = (
"ALTER RETENTION POLICY {0} ON {1}"
).format(quote_ident(name),
- quote_ident(database or self._database), shard_duration)
+ quote_ident(database or self._database))
if duration:
query_string += " DURATION {0}".format(duration)
if shard_duration:
@@ -958,7 +971,7 @@ def drop_user(self, username):
:param username: the username to drop
:type username: str
"""
- text = "DROP USER {0}".format(quote_ident(username), method="POST")
+ text = "DROP USER {0}".format(quote_ident(username))
self.query(text, method="POST")
def set_user_password(self, username, password):
@@ -1248,3 +1261,16 @@ def _msgpack_parse_hook(code, data):
timestamp += datetime.timedelta(microseconds=(epoch_ns / 1000))
return timestamp.isoformat() + 'Z'
return msgpack.ExtType(code, data)
+
+
+class _SocketOptionsAdapter(HTTPAdapter):
+ """_SocketOptionsAdapter injects socket_options into HTTP Adapter."""
+
+ def __init__(self, *args, **kwargs):
+ self.socket_options = kwargs.pop("socket_options", None)
+ super(_SocketOptionsAdapter, self).__init__(*args, **kwargs)
+
+ def init_poolmanager(self, *args, **kwargs):
+ if self.socket_options is not None:
+ kwargs["socket_options"] = self.socket_options
+ super(_SocketOptionsAdapter, self).init_poolmanager(*args, **kwargs)
diff --git a/influxdb/helper.py b/influxdb/helper.py
index 74209354..138cf6e8 100644
--- a/influxdb/helper.py
+++ b/influxdb/helper.py
@@ -82,7 +82,7 @@ def __new__(cls, *args, **kwargs):
allowed_time_precisions = ['h', 'm', 's', 'ms', 'u', 'ns', None]
if cls._time_precision not in allowed_time_precisions:
raise AttributeError(
- 'In {0}, time_precision is set, but invalid use any of {}.'
+ 'In {}, time_precision is set, but invalid use any of {}.'
.format(cls.__name__, ','.join(allowed_time_precisions)))
cls._retention_policy = getattr(_meta, 'retention_policy', None)
diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py
index 965a91db..40c58145 100644
--- a/influxdb/influxdb08/client.py
+++ b/influxdb/influxdb08/client.py
@@ -292,10 +292,10 @@ def write_points(self, data, time_precision='s', *args, **kwargs):
:type batch_size: int
"""
- def list_chunks(l, n):
+ def list_chunks(data_list, n):
"""Yield successive n-sized chunks from l."""
- for i in xrange(0, len(l), n):
- yield l[i:i + n]
+ for i in xrange(0, len(data_list), n):
+ yield data_list[i:i + n]
batch_size = kwargs.get('batch_size')
if batch_size and batch_size > 0:
diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py
index fd3c06bb..115fbc48 100644
--- a/influxdb/tests/client_test.py
+++ b/influxdb/tests/client_test.py
@@ -33,6 +33,7 @@
import requests_mock
from nose.tools import raises
+from urllib3.connection import HTTPConnection
from influxdb import InfluxDBClient
from influxdb.resultset import ResultSet
@@ -1398,7 +1399,7 @@ def test_invalid_port_fails(self):
InfluxDBClient('host', '80/redir', 'username', 'password')
def test_chunked_response(self):
- """Test chunked reponse for TestInfluxDBClient object."""
+ """Test chunked response for TestInfluxDBClient object."""
example_response = \
u'{"results":[{"statement_id":0,"series":[{"columns":["key"],' \
'"values":[["cpu"],["memory"],["iops"],["network"]],"partial":' \
@@ -1427,6 +1428,111 @@ def test_chunked_response(self):
'values': [['qps'], ['uptime'], ['df'], ['mount']]
}]}).__repr__())
+ def test_auth_default(self):
+ """Test auth with default settings."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient()
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic cm9vdDpyb290")
+
+ def test_auth_username_password(self):
+ """Test auth with custom username and password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username='my-username',
+ password='my-password')
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic bXktdXNlcm5hbWU6bXktcGFzc3dvcmQ=")
+
+ def test_auth_username_password_none(self):
+ """Test auth with not defined username or password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(username=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ def test_auth_token(self):
+ """Test auth with custom authorization header."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None,
+ headers={"Authorization": "my-token"})
+ cli.ping()
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "my-token")
+
+ def test_custom_socket_options(self):
+ """Test custom socket options."""
+ test_socket_options = HTTPConnection.default_socket_options + \
+ [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)]
+
+ cli = InfluxDBClient(username=None, password=None,
+ socket_options=test_socket_options)
+
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ test_socket_options)
+ self.assertEquals(cli._session.adapters.get("http://").poolmanager.
+ connection_pool_kw.get("socket_options"),
+ test_socket_options)
+
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options, test_socket_options)
+
+ def test_none_socket_options(self):
+ """Test default socket options."""
+ cli = InfluxDBClient(username=None, password=None)
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ None)
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options,
+ HTTPConnection.default_socket_options)
+
class FakeClient(InfluxDBClient):
"""Set up a fake client instance of InfluxDBClient."""
diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py
index a80498f3..87b8e0d8 100644
--- a/influxdb/tests/dataframe_client_test.py
+++ b/influxdb/tests/dataframe_client_test.py
@@ -877,7 +877,7 @@ def test_query_into_dataframe(self):
{"measurement": "network",
"tags": {"direction": ""},
"columns": ["time", "value"],
- "values":[["2009-11-10T23:00:00Z", 23422]]
+ "values": [["2009-11-10T23:00:00Z", 23422]]
},
{"measurement": "network",
"tags": {"direction": "in"},
@@ -1240,3 +1240,109 @@ def test_write_points_from_dataframe_with_tags_and_nan_json(self):
cli.write_points(dataframe, 'foo', tags=None, protocol='json',
tag_columns=['tag_one', 'tag_two'])
self.assertEqual(m.last_request.body, expected)
+
+ def test_query_custom_index(self):
+ """Test query with custom indexes."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "host"],
+ "values": [
+ [1, 0.55, "local"],
+ [2, 23422, "local"],
+ [3, 0.64, "local"]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params,
+ data_frame_index=["time", "host"])
+
+ _data_frame = result['cpu_load_short']
+ print(_data_frame)
+
+ self.assertListEqual(["time", "host"],
+ list(_data_frame.index.names))
+
+ def test_dataframe_nanosecond_precision(self):
+ """Test nanosecond precision."""
+ for_df_dict = {
+ "nanFloats": [1.1, float('nan'), 3.3, 4.4],
+ "onlyFloats": [1.1, 2.2, 3.3, 4.4],
+ "strings": ['one_one', 'two_two', 'three_three', 'four_four']
+ }
+ df = pd.DataFrame.from_dict(for_df_dict)
+ df['time'] = ['2019-10-04 06:27:19.850557111+00:00',
+ '2019-10-04 06:27:19.850557184+00:00',
+ '2019-10-04 06:27:42.251396864+00:00',
+ '2019-10-04 06:27:42.251396974+00:00']
+ df['time'] = pd.to_datetime(df['time'], unit='ns')
+ df = df.set_index('time')
+
+ expected = (
+ b'foo nanFloats=1.1,onlyFloats=1.1,strings="one_one" 1570170439850557111\n' # noqa E501 line too long
+ b'foo onlyFloats=2.2,strings="two_two" 1570170439850557184\n' # noqa E501 line too long
+ b'foo nanFloats=3.3,onlyFloats=3.3,strings="three_three" 1570170462251396864\n' # noqa E501 line too long
+ b'foo nanFloats=4.4,onlyFloats=4.4,strings="four_four" 1570170462251396974\n' # noqa E501 line too long
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_nanosecond_precision_one_microsecond(self):
+ """Test nanosecond precision within one microsecond."""
+ # 1 microsecond = 1000 nanoseconds
+ start = np.datetime64('2019-10-04T06:27:19.850557000')
+ end = np.datetime64('2019-10-04T06:27:19.850558000')
+
+ # generate timestamps with nanosecond precision
+ timestamps = np.arange(
+ start,
+ end + np.timedelta64(1, 'ns'),
+ np.timedelta64(1, 'ns')
+ )
+ # generate values
+ values = np.arange(0.0, len(timestamps))
+
+ df = pd.DataFrame({'value': values}, index=timestamps)
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ lines = m.last_request.body.decode('utf-8').split('\n')
+ self.assertEqual(len(lines), 1002)
+
+ for index, line in enumerate(lines):
+ if index == 1001:
+ self.assertEqual(line, '')
+ continue
+ self.assertEqual(
+ line,
+ f"foo value={index}.0 157017043985055{7000 + index:04}"
+ )
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..1b68d94e
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 548b17c8..a3df3154 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,5 @@
python-dateutil>=2.6.0
-pytz
+pytz>=2016.10
requests>=2.17.0
six>=1.10.0
-msgpack
+msgpack>=0.5.0
diff --git a/setup.py b/setup.py
index d44875f6..8ac7d1a7 100755
--- a/setup.py
+++ b/setup.py
@@ -23,6 +23,11 @@
with open('requirements.txt', 'r') as f:
requires = [x.strip() for x in f if x.strip()]
+# Debugging: Print the requires values
+print("install_requires values:")
+for req in requires:
+ print(f"- {req}")
+
with open('test-requirements.txt', 'r') as f:
test_requires = [x.strip() for x in f if x.strip()]
diff --git a/tox.ini b/tox.ini
index ff30ebac..a1005abb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -12,8 +12,8 @@ deps = -r{toxinidir}/requirements.txt
py35: numpy==1.14.6
py36: pandas==0.23.4
py36: numpy==1.15.4
- py37: pandas==0.24.2
- py37: numpy==1.16.2
+ py37: pandas>=0.24.2
+ py37: numpy>=1.16.2
# Only install pandas with non-pypy interpreters
# Testing all combinations would be too expensive
commands = nosetests -v --with-doctest {posargs}
@@ -31,16 +31,16 @@ commands = pydocstyle --count -ve examples influxdb
[testenv:coverage]
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
- pandas
+ pandas==0.24.2
coverage
numpy
commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb
[testenv:docs]
deps = -r{toxinidir}/requirements.txt
- pandas==0.24.2
- numpy==1.16.2
- Sphinx==1.8.5
+ pandas>=0.24.2
+ numpy>=1.16.2
+ Sphinx>=1.8.5
sphinx_rtd_theme
commands = sphinx-build -b html docs/source docs/build
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy