`_.
+
:param host: hostname to connect to InfluxDB, defaults to 'localhost'
:type host: str
:param port: port to connect to InfluxDB, defaults to 8086
@@ -50,8 +60,12 @@ class InfluxDBClient(object):
:param timeout: number of seconds Requests will wait for your client to
establish a connection, defaults to None
:type timeout: int
- :param retries: number of retries your client will try before aborting,
- defaults to 3. 0 indicates try until success
+ :param retries: number of attempts your client will make before aborting,
+ defaults to 3
+ 0 - try until success
+ 1 - attempt only once (without retry)
+ 2 - maximum two attempts (including one retry)
+ 3 - maximum three attempts (default option)
:type retries: int
:param use_udp: use UDP to connect to InfluxDB, defaults to False
:type use_udp: bool
@@ -61,6 +75,25 @@ class InfluxDBClient(object):
:type proxies: dict
:param path: path of InfluxDB on the server to connect, defaults to ''
:type path: str
+ :param cert: Path to client certificate information to use for mutual TLS
+ authentication. You can specify a local cert to use
+ as a single file containing the private key and the certificate, or as
+ a tuple of both files’ paths, defaults to None
+ :type cert: str
+ :param gzip: use gzip content encoding to compress requests
+ :type gzip: bool
+ :param session: allow for the new client request to use an existing
+ requests Session, defaults to None
+ :type session: requests.Session
+ :param headers: headers to add to Requests, will add 'Content-Type'
+ and 'Accept' unless these are already present, defaults to {}
+ :type headers: dict
+ :param socket_options: use custom tcp socket options,
+ If not specified, then defaults are loaded from
+ ``HTTPConnection.default_socket_options``
+ :type socket_options: list
+
+ :raises ValueError: if cert is provided but ssl is disabled (set to False)
"""
def __init__(self,
@@ -78,6 +111,11 @@ def __init__(self,
proxies=None,
pool_size=10,
path='',
+ cert=None,
+ gzip=False,
+ session=None,
+ headers=None,
+ socket_options=None,
):
"""Construct a new InfluxDBClient object."""
self.__host = host
@@ -91,11 +129,16 @@ def __init__(self,
self._verify_ssl = verify_ssl
self.__use_udp = use_udp
- self.__udp_port = udp_port
- self._session = requests.Session()
- adapter = requests.adapters.HTTPAdapter(
+ self.__udp_port = int(udp_port)
+
+ if not session:
+ session = requests.Session()
+
+ self._session = session
+ adapter = _SocketOptionsAdapter(
pool_connections=int(pool_size),
- pool_maxsize=int(pool_size)
+ pool_maxsize=int(pool_size),
+ socket_options=socket_options
)
if use_udp:
@@ -120,16 +163,35 @@ def __init__(self,
else:
self._proxies = proxies
+ if cert:
+ if not ssl:
+ raise ValueError(
+ "Client certificate provided but ssl is disabled."
+ )
+ else:
+ self._session.cert = cert
+
self.__baseurl = "{0}://{1}:{2}{3}".format(
self._scheme,
self._host,
self._port,
self._path)
- self._headers = {
- 'Content-Type': 'application/json',
- 'Accept': 'text/plain'
- }
+ if headers is None:
+ headers = {}
+ headers.setdefault('Content-Type', 'application/json')
+ headers.setdefault('Accept', 'application/x-msgpack')
+ self._headers = headers
+
+ self._gzip = gzip
+
+ def __enter__(self):
+ """Enter function as used by context manager."""
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ """Exit function as used by context manager."""
+ self.close()
@property
def _baseurl(self):
@@ -215,7 +277,7 @@ def switch_user(self, username, password):
self._username = username
self._password = password
- def request(self, url, method='GET', params=None, data=None,
+ def request(self, url, method='GET', params=None, data=None, stream=False,
expected_response_code=200, headers=None):
"""Make a HTTP request to the InfluxDB API.
@@ -227,6 +289,8 @@ def request(self, url, method='GET', params=None, data=None,
:type params: dict
:param data: the data of the request, defaults to None
:type data: str
+ :param stream: True if a query uses chunked responses
+ :type stream: bool
:param expected_response_code: the expected response code of
the request, defaults to 200
:type expected_response_code: int
@@ -250,17 +314,39 @@ def request(self, url, method='GET', params=None, data=None,
if isinstance(data, (dict, list)):
data = json.dumps(data)
+ if self._gzip:
+ # Receive and send compressed data
+ headers.update({
+ 'Accept-Encoding': 'gzip',
+ 'Content-Encoding': 'gzip',
+ })
+ if data is not None:
+ # For Py 2.7 compatability use Gzipfile
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(data)
+ data = compressed.getvalue()
+
# Try to send the request more than once by default (see #103)
retry = True
_try = 0
while retry:
try:
+ if "Authorization" in headers:
+ auth = (None, None)
+ else:
+ auth = (self._username, self._password)
response = self._session.request(
method=method,
url=url,
- auth=(self._username, self._password),
+ auth=auth if None not in auth else None,
params=params,
data=data,
+ stream=stream,
headers=headers,
proxies=self._proxies,
verify=self._verify_ssl,
@@ -273,17 +359,34 @@ def request(self, url, method='GET', params=None, data=None,
_try += 1
if self._retries != 0:
retry = _try < self._retries
- if method == "POST":
- time.sleep((2 ** _try) * random.random() / 100.0)
if not retry:
raise
+ if method == "POST":
+ time.sleep((2 ** _try) * random.random() / 100.0)
+
+ type_header = response.headers and response.headers.get("Content-Type")
+ if type_header == "application/x-msgpack" and response.content:
+ response._msgpack = msgpack.unpackb(
+ packed=response.content,
+ ext_hook=_msgpack_parse_hook,
+ raw=False)
+ else:
+ response._msgpack = None
+
+ def reformat_error(response):
+ if response._msgpack:
+ return json.dumps(response._msgpack, separators=(',', ':'))
+ else:
+ return response.content
+
# if there's not an error, there must have been a successful response
if 500 <= response.status_code < 600:
- raise InfluxDBServerError(response.content)
+ raise InfluxDBServerError(reformat_error(response))
elif response.status_code == expected_response_code:
return response
else:
- raise InfluxDBClientError(response.content, response.status_code)
+ err_msg = reformat_error(response)
+ raise InfluxDBClientError(err_msg, response.status_code)
def write(self, data, params=None, expected_response_code=204,
protocol='json'):
@@ -292,7 +395,7 @@ def write(self, data, params=None, expected_response_code=204,
:param data: the data to be written
:type data: (if protocol is 'json') dict
(if protocol is 'line') sequence of line protocol strings
- or single string
+ or single string
:param params: additional parameters for the request, defaults to None
:type params: dict
:param expected_response_code: the expected response code of the write
@@ -303,7 +406,7 @@ def write(self, data, params=None, expected_response_code=204,
:returns: True, if the write operation is successful
:rtype: bool
"""
- headers = self._headers
+ headers = self._headers.copy()
headers['Content-Type'] = 'application/octet-stream'
if params:
@@ -330,17 +433,17 @@ def write(self, data, params=None, expected_response_code=204,
@staticmethod
def _read_chunked_response(response, raise_errors=True):
- result_set = {}
for line in response.iter_lines():
if isinstance(line, bytes):
line = line.decode('utf-8')
data = json.loads(line)
+ result_set = {}
for result in data.get('results', []):
for _key in result:
if isinstance(result[_key], list):
result_set.setdefault(
_key, []).extend(result[_key])
- return ResultSet(result_set, raise_errors=raise_errors)
+ yield ResultSet(result_set, raise_errors=raise_errors)
def query(self,
query,
@@ -431,13 +534,15 @@ def query(self,
method=method,
params=params,
data=None,
+ stream=chunked,
expected_response_code=expected_response_code
)
- if chunked:
- return self._read_chunked_response(response)
-
- data = response.json()
+ data = response._msgpack
+ if not data:
+ if chunked:
+ return self._read_chunked_response(response)
+ data = response.json()
results = [
ResultSet(result, raise_errors=raise_errors)
@@ -466,8 +571,9 @@ def write_points(self,
:param points: the list of points to be written in the database
:type points: list of dictionaries, each dictionary represents a point
:type points: (if protocol is 'json') list of dicts, where each dict
- represents a point.
- (if protocol is 'line') sequence of line protocol strings.
+ represents a point.
+ (if protocol is 'line') sequence of line protocol strings.
+
:param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
:type time_precision: str
:param database: the database to write the points to. Defaults to
@@ -528,8 +634,17 @@ def ping(self):
@staticmethod
def _batches(iterable, size):
- for i in xrange(0, len(iterable), size):
- yield iterable[i:i + size]
+ # Iterate over an iterable producing iterables of batches. Based on:
+ # http://code.activestate.com/recipes/303279-getting-items-in-batches/
+ iterator = iter(iterable)
+ while True:
+ try: # Try get the first element in the iterator...
+ head = (next(iterator),)
+ except StopIteration:
+ return # ...so that we can stop if there isn't one
+ # Otherwise, lazily slice the rest of the batch
+ rest = islice(iterator, size - 1)
+ yield chain(head, rest)
def _write_points(self,
points,
@@ -600,6 +715,40 @@ def get_list_database(self):
"""
return list(self.query("SHOW DATABASES").get_points())
+ def get_list_series(self, database=None, measurement=None, tags=None):
+ """
+ Query SHOW SERIES returns the distinct series in your database.
+
+ FROM and WHERE clauses are optional.
+
+ :param measurement: Show all series from a measurement
+ :type id: string
+ :param tags: Show all series that match given tags
+ :type id: dict
+ :param database: the database from which the series should be
+ shows, defaults to client's current database
+ :type database: str
+ """
+ database = database or self._database
+ query_str = 'SHOW SERIES'
+
+ if measurement:
+ query_str += ' FROM "{0}"'.format(measurement)
+
+ if tags:
+ query_str += ' WHERE ' + ' and '.join(["{0}='{1}'".format(k, v)
+ for k, v in tags.items()])
+
+ return list(
+ itertools.chain.from_iterable(
+ [
+ x.values()
+ for x in (self.query(query_str, database=database)
+ .get_points())
+ ]
+ )
+ )
+
def create_database(self, dbname):
"""Create a new database in InfluxDB.
@@ -724,7 +873,7 @@ def alter_retention_policy(self, name, database=None,
query_string = (
"ALTER RETENTION POLICY {0} ON {1}"
).format(quote_ident(name),
- quote_ident(database or self._database), shard_duration)
+ quote_ident(database or self._database))
if duration:
query_string += " DURATION {0}".format(duration)
if shard_duration:
@@ -822,7 +971,7 @@ def drop_user(self, username):
:param username: the username to drop
:type username: str
"""
- text = "DROP USER {0}".format(quote_ident(username), method="POST")
+ text = "DROP USER {0}".format(quote_ident(username))
self.query(text, method="POST")
def set_user_password(self, username, password):
@@ -1103,3 +1252,25 @@ def _parse_netloc(netloc):
'password': info.password or None,
'host': info.hostname or 'localhost',
'port': info.port or 8086}
+
+
+def _msgpack_parse_hook(code, data):
+ if code == 5:
+ (epoch_s, epoch_ns) = struct.unpack(">QI", data)
+ timestamp = datetime.datetime.utcfromtimestamp(epoch_s)
+ timestamp += datetime.timedelta(microseconds=(epoch_ns / 1000))
+ return timestamp.isoformat() + 'Z'
+ return msgpack.ExtType(code, data)
+
+
+class _SocketOptionsAdapter(HTTPAdapter):
+ """_SocketOptionsAdapter injects socket_options into HTTP Adapter."""
+
+ def __init__(self, *args, **kwargs):
+ self.socket_options = kwargs.pop("socket_options", None)
+ super(_SocketOptionsAdapter, self).__init__(*args, **kwargs)
+
+ def init_poolmanager(self, *args, **kwargs):
+ if self.socket_options is not None:
+ kwargs["socket_options"] = self.socket_options
+ super(_SocketOptionsAdapter, self).init_poolmanager(*args, **kwargs)
diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py
index 97258644..babfe0dd 100644
--- a/influxdb/dataframe_client.py
+++ b/influxdb/dataframe_client.py
@@ -25,4 +25,4 @@ def __init__(self, *a, **kw):
raise ImportError("DataFrameClient requires Pandas "
"which couldn't be imported: %s" % self.err)
else:
- from ._dataframe_client import DataFrameClient
+ from ._dataframe_client import DataFrameClient # type: ignore
diff --git a/influxdb/helper.py b/influxdb/helper.py
index e622526d..138cf6e8 100644
--- a/influxdb/helper.py
+++ b/influxdb/helper.py
@@ -41,6 +41,12 @@ class Meta:
# Only applicable if autocommit is True.
autocommit = True
# If True and no bulk_size, then will set bulk_size to 1.
+ retention_policy = 'your_retention_policy'
+ # Specify the retention policy for the data points
+ time_precision = "h"|"m"|s"|"ms"|"u"|"ns"
+ # Default is ns (nanoseconds)
+ # Setting time precision while writing point
+ # You should also make sure time is set in the given precision
"""
@@ -71,6 +77,15 @@ def __new__(cls, *args, **kwargs):
cls.__name__))
cls._autocommit = getattr(_meta, 'autocommit', False)
+ cls._time_precision = getattr(_meta, 'time_precision', None)
+
+ allowed_time_precisions = ['h', 'm', 's', 'ms', 'u', 'ns', None]
+ if cls._time_precision not in allowed_time_precisions:
+ raise AttributeError(
+ 'In {}, time_precision is set, but invalid use any of {}.'
+ .format(cls.__name__, ','.join(allowed_time_precisions)))
+
+ cls._retention_policy = getattr(_meta, 'retention_policy', None)
cls._client = getattr(_meta, 'client', None)
if cls._autocommit and not cls._client:
@@ -116,11 +131,11 @@ def __init__(self, **kw):
keys = set(kw.keys())
# all tags should be passed, and keys - tags should be a subset of keys
- if not(tags <= keys):
+ if not (tags <= keys):
raise NameError(
'Expected arguments to contain all tags {0}, instead got {1}.'
.format(cls._tags, kw.keys()))
- if not(keys - tags <= fields):
+ if not (keys - tags <= fields):
raise NameError('Got arguments not in tags or fields: {0}'
.format(keys - tags - fields))
@@ -143,7 +158,12 @@ def commit(cls, client=None):
"""
if not client:
client = cls._client
- rtn = client.write_points(cls._json_body_())
+
+ rtn = client.write_points(
+ cls._json_body_(),
+ time_precision=cls._time_precision,
+ retention_policy=cls._retention_policy)
+ # will be None if not set and will default to ns
cls._reset_()
return rtn
@@ -154,6 +174,8 @@ def _json_body_(cls):
:return: JSON body of these datapoints.
"""
json = []
+ if not cls.__initialized__:
+ cls._reset_()
for series_name, data in six.iteritems(cls._datapoints):
for point in data:
json_point = {
diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py
index 965a91db..40c58145 100644
--- a/influxdb/influxdb08/client.py
+++ b/influxdb/influxdb08/client.py
@@ -292,10 +292,10 @@ def write_points(self, data, time_precision='s', *args, **kwargs):
:type batch_size: int
"""
- def list_chunks(l, n):
+ def list_chunks(data_list, n):
"""Yield successive n-sized chunks from l."""
- for i in xrange(0, len(l), n):
- yield l[i:i + n]
+ for i in xrange(0, len(data_list), n):
+ yield data_list[i:i + n]
batch_size = kwargs.get('batch_size')
if batch_size and batch_size > 0:
diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py
index f3dec33c..5f2d4614 100644
--- a/influxdb/influxdb08/helper.py
+++ b/influxdb/influxdb08/helper.py
@@ -139,6 +139,8 @@ def _json_body_(cls):
:return: JSON body of the datapoints.
"""
json = []
+ if not cls.__initialized__:
+ cls._reset_()
for series_name, data in six.iteritems(cls._datapoints):
json.append({'name': series_name,
'columns': cls._fields,
diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py
index e8816fc0..25dd2ad7 100644
--- a/influxdb/line_protocol.py
+++ b/influxdb/line_protocol.py
@@ -11,11 +11,19 @@
from pytz import UTC
from dateutil.parser import parse
-from six import iteritems, binary_type, text_type, integer_types, PY2
+from six import binary_type, text_type, integer_types, PY2
EPOCH = UTC.localize(datetime.utcfromtimestamp(0))
+def _to_nanos(timestamp):
+ delta = timestamp - EPOCH
+ nanos_in_days = delta.days * 86400 * 10 ** 9
+ nanos_in_seconds = delta.seconds * 10 ** 9
+ nanos_in_micros = delta.microseconds * 10 ** 3
+ return nanos_in_days + nanos_in_seconds + nanos_in_micros
+
+
def _convert_timestamp(timestamp, precision=None):
if isinstance(timestamp, Integral):
return timestamp # assume precision is correct if timestamp is int
@@ -27,19 +35,24 @@ def _convert_timestamp(timestamp, precision=None):
if not timestamp.tzinfo:
timestamp = UTC.localize(timestamp)
- ns = (timestamp - EPOCH).total_seconds() * 1e9
+ ns = _to_nanos(timestamp)
if precision is None or precision == 'n':
return ns
- elif precision == 'u':
- return ns / 1e3
- elif precision == 'ms':
- return ns / 1e6
- elif precision == 's':
- return ns / 1e9
- elif precision == 'm':
- return ns / 1e9 / 60
- elif precision == 'h':
- return ns / 1e9 / 3600
+
+ if precision == 'u':
+ return ns / 10**3
+
+ if precision == 'ms':
+ return ns / 10**6
+
+ if precision == 's':
+ return ns / 10**9
+
+ if precision == 'm':
+ return ns / 10**9 / 60
+
+ if precision == 'h':
+ return ns / 10**9 / 3600
raise ValueError(timestamp)
@@ -54,6 +67,8 @@ def _escape_tag(tag):
",", "\\,"
).replace(
"=", "\\="
+ ).replace(
+ "\n", "\\n"
)
@@ -89,14 +104,21 @@ def _is_float(value):
def _escape_value(value):
- value = _get_unicode(value)
+ if value is None:
+ return ''
- if isinstance(value, text_type) and value != '':
+ value = _get_unicode(value)
+ if isinstance(value, text_type):
return quote_ident(value)
- elif isinstance(value, integer_types) and not isinstance(value, bool):
+
+ if isinstance(value, integer_types) and not isinstance(value, bool):
return str(value) + 'i'
- elif _is_float(value):
- return repr(value)
+
+ if isinstance(value, bool):
+ return str(value)
+
+ if _is_float(value):
+ return repr(float(value))
return str(value)
@@ -105,15 +127,60 @@ def _get_unicode(data, force=False):
"""Try to return a text aka unicode object from the given data."""
if isinstance(data, binary_type):
return data.decode('utf-8')
- elif data is None:
+
+ if data is None:
return ''
- elif force:
+
+ if force:
if PY2:
return unicode(data)
- else:
- return str(data)
- else:
- return data
+ return str(data)
+
+ return data
+
+
+def make_line(measurement, tags=None, fields=None, time=None, precision=None):
+ """Extract the actual point from a given measurement line."""
+ tags = tags or {}
+ fields = fields or {}
+
+ line = _escape_tag(_get_unicode(measurement))
+
+ # tags should be sorted client-side to take load off server
+ tag_list = []
+ for tag_key in sorted(tags.keys()):
+ key = _escape_tag(tag_key)
+ value = _escape_tag(tags[tag_key])
+
+ if key != '' and value != '':
+ tag_list.append(
+ "{key}={value}".format(key=key, value=value)
+ )
+
+ if tag_list:
+ line += ',' + ','.join(tag_list)
+
+ field_list = []
+ for field_key in sorted(fields.keys()):
+ key = _escape_tag(field_key)
+ value = _escape_value(fields[field_key])
+
+ if key != '' and value != '':
+ field_list.append("{key}={value}".format(
+ key=key,
+ value=value
+ ))
+
+ if field_list:
+ line += ' ' + ','.join(field_list)
+
+ if time is not None:
+ timestamp = _get_unicode(str(int(
+ _convert_timestamp(time, precision)
+ )))
+ line += ' ' + timestamp
+
+ return line
def make_lines(data, precision=None):
@@ -125,48 +192,19 @@ def make_lines(data, precision=None):
lines = []
static_tags = data.get('tags')
for point in data['points']:
- elements = []
-
- # add measurement name
- measurement = _escape_tag(_get_unicode(
- point.get('measurement', data.get('measurement'))))
- key_values = [measurement]
-
- # add tags
if static_tags:
tags = dict(static_tags) # make a copy, since we'll modify
tags.update(point.get('tags') or {})
else:
tags = point.get('tags') or {}
- # tags should be sorted client-side to take load off server
- for tag_key, tag_value in sorted(iteritems(tags)):
- key = _escape_tag(tag_key)
- value = _escape_tag_value(tag_value)
-
- if key != '' and value != '':
- key_values.append(key + "=" + value)
-
- elements.append(','.join(key_values))
-
- # add fields
- field_values = []
- for field_key, field_value in sorted(iteritems(point['fields'])):
- key = _escape_tag(field_key)
- value = _escape_value(field_value)
-
- if key != '' and value != '':
- field_values.append(key + "=" + value)
-
- elements.append(','.join(field_values))
-
- # add timestamp
- if 'time' in point:
- timestamp = _get_unicode(str(int(
- _convert_timestamp(point['time'], precision))))
- elements.append(timestamp)
-
- line = ' '.join(elements)
+ line = make_line(
+ point.get('measurement', data.get('measurement')),
+ tags=tags,
+ fields=point.get('fields'),
+ precision=precision,
+ time=point.get('time')
+ )
lines.append(line)
return '\n'.join(lines) + '\n'
diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py
index e4cc7e11..115fbc48 100644
--- a/influxdb/tests/client_test.py
+++ b/influxdb/tests/client_test.py
@@ -24,6 +24,8 @@
import unittest
import warnings
+import io
+import gzip
import json
import mock
import requests
@@ -31,6 +33,7 @@
import requests_mock
from nose.tools import raises
+from urllib3.connection import HTTPConnection
from influxdb import InfluxDBClient
from influxdb.resultset import ResultSet
@@ -149,6 +152,14 @@ def test_dsn(self):
**{'ssl': False})
self.assertEqual('http://my.host.fr:1886', cli._baseurl)
+ def test_cert(self):
+ """Test mutual TLS authentication for TestInfluxDBClient object."""
+ cli = InfluxDBClient(ssl=True, cert='/etc/pki/tls/private/dummy.crt')
+ self.assertEqual(cli._session.cert, '/etc/pki/tls/private/dummy.crt')
+
+ with self.assertRaises(ValueError):
+ cli = InfluxDBClient(cert='/etc/pki/tls/private/dummy.crt')
+
def test_switch_database(self):
"""Test switch database in TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
@@ -206,6 +217,71 @@ def test_write_points(self):
m.last_request.body.decode('utf-8'),
)
+ def test_write_gzip(self):
+ """Test write in TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db', gzip=True)
+ cli.write(
+ {"database": "mydb",
+ "retentionPolicy": "mypolicy",
+ "points": [{"measurement": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "time": "2009-11-10T23:00:00Z",
+ "fields": {"value": 0.64}}]}
+ )
+
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000000000000\n"
+ )
+
+ self.assertEqual(
+ m.last_request.body,
+ compressed.getvalue(),
+ )
+
+ def test_write_points_gzip(self):
+ """Test write points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db', gzip=True)
+ cli.write_points(
+ self.dummy_points,
+ )
+
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456000\n'
+ )
+ self.assertEqual(
+ m.last_request.body,
+ compressed.getvalue(),
+ )
+
def test_write_points_toplevel_attributes(self):
"""Test write points attrs for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
@@ -257,6 +333,36 @@ def test_write_points_batch(self):
self.assertEqual(expected_last_body,
m.last_request.body.decode('utf-8'))
+ def test_write_points_batch_generator(self):
+ """Test write points batch from a generator for TestInfluxDBClient."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ dummy_points_generator = (point for point in dummy_points)
+ expected_last_body = (
+ "network,direction=out,host=server01,region=us-west "
+ "value=12.0 1257894000000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = InfluxDBClient(database='db')
+ cli.write_points(points=dummy_points_generator,
+ database='db',
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ self.assertEqual(m.call_count, 2)
+ self.assertEqual(expected_last_body,
+ m.last_request.body.decode('utf-8'))
+
def test_write_points_udp(self):
"""Test write points UDP for TestInfluxDBClient object."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -465,6 +571,29 @@ def test_query(self):
[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]
)
+ def test_query_msgpack(self):
+ """Test query method with a messagepack response."""
+ example_response = bytes(bytearray.fromhex(
+ "81a7726573756c74739182ac73746174656d656e745f696400a673657269"
+ "65739183a46e616d65a161a7636f6c756d6e7392a474696d65a176a67661"
+ "6c7565739192c70c05000000005d26178a019096c8cb3ff0000000000000"
+ ))
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ request_headers={"Accept": "application/x-msgpack"},
+ headers={"Content-Type": "application/x-msgpack"},
+ content=example_response
+ )
+ rs = self.cli.query('select * from a')
+
+ self.assertListEqual(
+ list(rs.get_points()),
+ [{'v': 1.0, 'time': '2019-07-10T16:51:22.026253Z'}]
+ )
+
def test_select_into_post(self):
"""Test SELECT.*INTO is POSTed."""
example_response = (
@@ -658,6 +787,66 @@ def test_get_list_measurements(self):
[{'name': 'cpu'}, {'name': 'disk'}]
)
+ def test_get_list_series(self):
+ """Test get a list of series from the database."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west'],
+ ['memory_usage,host=server02,region=us-east']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(),
+ ['cpu_load_short,host=server01,region=us-west',
+ 'memory_usage,host=server02,region=us-east'])
+
+ def test_get_list_series_with_measurement(self):
+ """Test get a list of series from the database by filter."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(measurement='cpu_load_short'),
+ ['cpu_load_short,host=server01,region=us-west'])
+
+ def test_get_list_series_with_tags(self):
+ """Test get a list of series from the database by tags."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(tags={'region': 'us-west'}),
+ ['cpu_load_short,host=server01,region=us-west'])
+
+ @raises(Exception)
+ def test_get_list_series_fails(self):
+ """Test get a list of series from the database but fail."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_series()
+
def test_create_retention_policy_default(self):
"""Test create default ret policy for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
@@ -698,6 +887,49 @@ def test_create_retention_policy(self):
'"db" duration 1d replication 4 shard duration 0s'
)
+ def test_create_retention_policy_shard_duration(self):
+ """Test create retention policy with a custom shard duration."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename2', '1d', 4, database='db',
+ shard_duration='1h'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename2" on '
+ '"db" duration 1d replication 4 shard duration 1h'
+ )
+
+ def test_create_retention_policy_shard_duration_default(self):
+ """Test create retention policy with a default shard duration."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename3', '1d', 4, database='db',
+ shard_duration='1h', default=True
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename3" on '
+ '"db" duration 1d replication 4 shard duration 1h '
+ 'default'
+ )
+
def test_alter_retention_policy(self):
"""Test alter retention policy for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
@@ -1027,7 +1259,7 @@ def test_revoke_privilege_invalid(self):
self.cli.revoke_privilege('', 'testdb', 'test')
def test_get_list_privileges(self):
- """Tst get list of privs for TestInfluxDBClient object."""
+ """Test get list of privs for TestInfluxDBClient object."""
data = {'results': [
{'series': [
{'columns': ['database', 'privilege'],
@@ -1167,18 +1399,13 @@ def test_invalid_port_fails(self):
InfluxDBClient('host', '80/redir', 'username', 'password')
def test_chunked_response(self):
- """Test chunked reponse for TestInfluxDBClient object."""
+ """Test chunked response for TestInfluxDBClient object."""
example_response = \
- u'{"results":[{"statement_id":0,"series":' \
- '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \
- '[["value","integer"]]}],"partial":true}]}\n{"results":' \
- '[{"statement_id":0,"series":[{"name":"iops","columns":' \
- '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \
- '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \
- '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \
- '[["value","integer"]]}],"partial":true}]}\n{"results":' \
- '[{"statement_id":0,"series":[{"name":"memory","columns":' \
- '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n'
+ u'{"results":[{"statement_id":0,"series":[{"columns":["key"],' \
+ '"values":[["cpu"],["memory"],["iops"],["network"]],"partial":' \
+ 'true}],"partial":true}]}\n{"results":[{"statement_id":0,' \
+ '"series":[{"columns":["key"],"values":[["qps"],["uptime"],' \
+ '["df"],["mount"]]}]}]}\n'
with requests_mock.Mocker() as m:
m.register_uri(
@@ -1186,23 +1413,125 @@ def test_chunked_response(self):
"http://localhost:8086/query",
text=example_response
)
- response = self.cli.query('show series limit 4 offset 0',
+ response = self.cli.query('show series',
chunked=True, chunk_size=4)
- self.assertTrue(len(response) == 4)
- self.assertEqual(response.__repr__(), ResultSet(
- {'series': [{'values': [['value', 'integer']],
- 'name': 'cpu',
- 'columns': ['fieldKey', 'fieldType']},
- {'values': [['value', 'integer']],
- 'name': 'iops',
- 'columns': ['fieldKey', 'fieldType']},
- {'values': [['value', 'integer']],
- 'name': 'load',
- 'columns': ['fieldKey', 'fieldType']},
- {'values': [['value', 'integer']],
- 'name': 'memory',
- 'columns': ['fieldKey', 'fieldType']}]}
- ).__repr__())
+ res = list(response)
+ self.assertTrue(len(res) == 2)
+ self.assertEqual(res[0].__repr__(), ResultSet(
+ {'series': [{
+ 'columns': ['key'],
+ 'values': [['cpu'], ['memory'], ['iops'], ['network']]
+ }]}).__repr__())
+ self.assertEqual(res[1].__repr__(), ResultSet(
+ {'series': [{
+ 'columns': ['key'],
+ 'values': [['qps'], ['uptime'], ['df'], ['mount']]
+ }]}).__repr__())
+
+ def test_auth_default(self):
+ """Test auth with default settings."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient()
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic cm9vdDpyb290")
+
+ def test_auth_username_password(self):
+ """Test auth with custom username and password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username='my-username',
+ password='my-password')
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic bXktdXNlcm5hbWU6bXktcGFzc3dvcmQ=")
+
+ def test_auth_username_password_none(self):
+ """Test auth with not defined username or password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(username=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ def test_auth_token(self):
+ """Test auth with custom authorization header."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None,
+ headers={"Authorization": "my-token"})
+ cli.ping()
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "my-token")
+
+ def test_custom_socket_options(self):
+ """Test custom socket options."""
+ test_socket_options = HTTPConnection.default_socket_options + \
+ [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)]
+
+ cli = InfluxDBClient(username=None, password=None,
+ socket_options=test_socket_options)
+
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ test_socket_options)
+ self.assertEquals(cli._session.adapters.get("http://").poolmanager.
+ connection_pool_kw.get("socket_options"),
+ test_socket_options)
+
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options, test_socket_options)
+
+ def test_none_socket_options(self):
+ """Test default socket options."""
+ cli = InfluxDBClient(username=None, password=None)
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ None)
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options,
+ HTTPConnection.default_socket_options)
class FakeClient(InfluxDBClient):
diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py
index cb380ac5..87b8e0d8 100644
--- a/influxdb/tests/dataframe_client_test.py
+++ b/influxdb/tests/dataframe_client_test.py
@@ -13,8 +13,8 @@
import warnings
import requests_mock
-from influxdb.tests import skip_if_pypy, using_pypy
from nose.tools import raises
+from influxdb.tests import skip_if_pypy, using_pypy
from .client_test import _mocked_session
@@ -22,7 +22,7 @@
import pandas as pd
from pandas.util.testing import assert_frame_equal
from influxdb import DataFrameClient
- import numpy
+ import numpy as np
@skip_if_pypy
@@ -389,6 +389,71 @@ def test_write_points_from_dataframe_with_numeric_column_names(self):
self.assertEqual(m.last_request.body, expected)
+ def test_write_points_from_dataframe_with_leading_none_column(self):
+ """write_points detect erroneous leading comma for null first field."""
+ dataframe = pd.DataFrame(
+ dict(
+ first=[1, None, None, 8, 9],
+ second=[2, None, None, None, 10],
+ third=[3, 4.1, None, None, 11],
+ first_tag=["one", None, None, "eight", None],
+ second_tag=["two", None, None, None, None],
+ third_tag=["three", "four", None, None, None],
+ comment=[
+ "All columns filled",
+ "First two of three empty",
+ "All empty",
+ "Last two of three empty",
+ "Empty tags with values",
+ ]
+ ),
+ index=pd.date_range(
+ start=pd.to_datetime('2018-01-01'),
+ freq='1D',
+ periods=5,
+ )
+ )
+ expected = (
+ b'foo,first_tag=one,second_tag=two,third_tag=three'
+ b' comment="All columns filled",first=1.0,second=2.0,third=3.0'
+ b' 1514764800000000000\n'
+ b'foo,third_tag=four'
+ b' comment="First two of three empty",third=4.1'
+ b' 1514851200000000000\n'
+ b'foo comment="All empty" 1514937600000000000\n'
+ b'foo,first_tag=eight'
+ b' comment="Last two of three empty",first=8.0'
+ b' 1515024000000000000\n'
+ b'foo'
+ b' comment="Empty tags with values",first=9.0,second=10.0'
+ b',third=11.0'
+ b' 1515110400000000000\n'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ colnames = [
+ "first_tag",
+ "second_tag",
+ "third_tag",
+ "comment",
+ "first",
+ "second",
+ "third"
+ ]
+ cli.write_points(dataframe.loc[:, colnames], 'foo',
+ tag_columns=[
+ "first_tag",
+ "second_tag",
+ "third_tag"])
+
+ self.assertEqual(m.last_request.body, expected)
+
def test_write_points_from_dataframe_with_numeric_precision(self):
"""Test write points from df with numeric precision."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
@@ -397,7 +462,7 @@ def test_write_points_from_dataframe_with_numeric_precision(self):
["2", 2, 2.2222222222222]],
index=[now, now + timedelta(hours=1)])
- if numpy.lib.NumpyVersion(numpy.__version__) <= '1.13.3':
+ if np.lib.NumpyVersion(np.__version__) <= '1.13.3':
expected_default_precision = (
b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
@@ -812,7 +877,7 @@ def test_query_into_dataframe(self):
{"measurement": "network",
"tags": {"direction": ""},
"columns": ["time", "value"],
- "values":[["2009-11-10T23:00:00Z", 23422]]
+ "values": [["2009-11-10T23:00:00Z", 23422]]
},
{"measurement": "network",
"tags": {"direction": "in"},
@@ -903,6 +968,98 @@ def test_multiquery_into_dataframe(self):
for k in e:
assert_frame_equal(e[k], r[k])
+ def test_multiquery_into_dataframe_dropna(self):
+ """Test multiquery into df for TestDataFrameClient object."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "value2", "value3"],
+ "values": [
+ ["2015-01-29T21:55:43.702900257Z",
+ 0.55, 0.254, np.NaN],
+ ["2015-01-29T21:55:43.702900257Z",
+ 23422, 122878, np.NaN],
+ ["2015-06-11T20:46:02Z",
+ 0.64, 0.5434, np.NaN]
+ ]
+ }
+ ]
+ }, {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "count"],
+ "values": [
+ ["1970-01-01T00:00:00Z", 3]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ pd1 = pd.DataFrame(
+ [[0.55, 0.254, np.NaN],
+ [23422.0, 122878, np.NaN],
+ [0.64, 0.5434, np.NaN]],
+ columns=['value', 'value2', 'value3'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
+
+ pd1_dropna = pd.DataFrame(
+ [[0.55, 0.254], [23422.0, 122878], [0.64, 0.5434]],
+ columns=['value', 'value2'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+
+ if pd1_dropna.index.tzinfo is None:
+ pd1_dropna.index = pd1_dropna.index.tz_localize('UTC')
+
+ pd2 = pd.DataFrame(
+ [[3]], columns=['count'],
+ index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))
+
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
+
+ expected_dropna_true = [
+ {'cpu_load_short': pd1_dropna},
+ {'cpu_load_short': pd2}]
+ expected_dropna_false = [
+ {'cpu_load_short': pd1},
+ {'cpu_load_short': pd2}]
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+
+ for dropna in [True, False]:
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params, dropna=dropna)
+ expected = \
+ expected_dropna_true if dropna else expected_dropna_false
+ for r, e in zip(result, expected):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+
+ # test default value (dropna = True)
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params)
+ for r, e in zip(result, expected_dropna_true):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+
def test_query_with_empty_result(self):
"""Test query with empty results in TestDataFrameClient object."""
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
@@ -967,3 +1124,225 @@ def test_dsn_constructor(self):
client = DataFrameClient.from_dsn('influxdb://localhost:8086')
self.assertIsInstance(client, DataFrameClient)
self.assertEqual('http://localhost:8086', client._baseurl)
+
+ def test_write_points_from_dataframe_with_nan_line(self):
+ """Test write points from dataframe with Nan lines."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i 0\n"
+ b"foo column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='line')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='line')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_nan_json(self):
+ """Test write points from json with NaN lines."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i 0\n"
+ b"foo column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='json')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='json')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tags_and_nan_line(self):
+ """Test write points from dataframe with NaN lines and tags."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf],
+ ['red', 0, "2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='line',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='line',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tags_and_nan_json(self):
+ """Test write points from json with NaN lines and tags."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf],
+ ['red', 0, "2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='json',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='json',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_query_custom_index(self):
+ """Test query with custom indexes."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "host"],
+ "values": [
+ [1, 0.55, "local"],
+ [2, 23422, "local"],
+ [3, 0.64, "local"]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params,
+ data_frame_index=["time", "host"])
+
+ _data_frame = result['cpu_load_short']
+ print(_data_frame)
+
+ self.assertListEqual(["time", "host"],
+ list(_data_frame.index.names))
+
+ def test_dataframe_nanosecond_precision(self):
+ """Test nanosecond precision."""
+ for_df_dict = {
+ "nanFloats": [1.1, float('nan'), 3.3, 4.4],
+ "onlyFloats": [1.1, 2.2, 3.3, 4.4],
+ "strings": ['one_one', 'two_two', 'three_three', 'four_four']
+ }
+ df = pd.DataFrame.from_dict(for_df_dict)
+ df['time'] = ['2019-10-04 06:27:19.850557111+00:00',
+ '2019-10-04 06:27:19.850557184+00:00',
+ '2019-10-04 06:27:42.251396864+00:00',
+ '2019-10-04 06:27:42.251396974+00:00']
+ df['time'] = pd.to_datetime(df['time'], unit='ns')
+ df = df.set_index('time')
+
+ expected = (
+ b'foo nanFloats=1.1,onlyFloats=1.1,strings="one_one" 1570170439850557111\n' # noqa E501 line too long
+ b'foo onlyFloats=2.2,strings="two_two" 1570170439850557184\n' # noqa E501 line too long
+ b'foo nanFloats=3.3,onlyFloats=3.3,strings="three_three" 1570170462251396864\n' # noqa E501 line too long
+ b'foo nanFloats=4.4,onlyFloats=4.4,strings="four_four" 1570170462251396974\n' # noqa E501 line too long
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_nanosecond_precision_one_microsecond(self):
+ """Test nanosecond precision within one microsecond."""
+ # 1 microsecond = 1000 nanoseconds
+ start = np.datetime64('2019-10-04T06:27:19.850557000')
+ end = np.datetime64('2019-10-04T06:27:19.850558000')
+
+ # generate timestamps with nanosecond precision
+ timestamps = np.arange(
+ start,
+ end + np.timedelta64(1, 'ns'),
+ np.timedelta64(1, 'ns')
+ )
+ # generate values
+ values = np.arange(0.0, len(timestamps))
+
+ df = pd.DataFrame({'value': values}, index=timestamps)
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ lines = m.last_request.body.decode('utf-8').split('\n')
+ self.assertEqual(len(lines), 1002)
+
+ for index, line in enumerate(lines):
+ if index == 1001:
+ self.assertEqual(line, '')
+ continue
+ self.assertEqual(
+ line,
+ f"foo value={index}.0 157017043985055{7000 + index:04}"
+ )
diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py
index 6f24e85d..6737f921 100644
--- a/influxdb/tests/helper_test.py
+++ b/influxdb/tests/helper_test.py
@@ -47,6 +47,14 @@ class Meta:
TestSeriesHelper.MySeriesHelper = MySeriesHelper
+ def setUp(self):
+ """Check that MySeriesHelper has empty datapoints."""
+ super(TestSeriesHelper, self).setUp()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper in teardown did not empty datapoints.')
+
def tearDown(self):
"""Deconstruct the TestSeriesHelper object."""
super(TestSeriesHelper, self).tearDown()
@@ -310,8 +318,19 @@ class Meta:
series_name = 'events.stats.{server_name}'
+ class InvalidTimePrecision(SeriesHelper):
+ """Define instance of SeriesHelper for invalid time precision."""
+
+ class Meta:
+ """Define metadata for InvalidTimePrecision."""
+
+ series_name = 'events.stats.{server_name}'
+ time_precision = "ks"
+ fields = ['time', 'server_name']
+ autocommit = True
+
for cls in [MissingMeta, MissingClient, MissingFields,
- MissingSeriesName]:
+ MissingSeriesName, InvalidTimePrecision]:
self.assertRaises(
AttributeError, cls, **{'time': 159,
'server_name': 'us.east-1'})
@@ -365,3 +384,54 @@ class Meta:
.format(WarnBulkSizeNoEffect))
self.assertIn('has no affect', str(w[-1].message),
'Warning message did not contain "has not affect".')
+
+ def testSeriesWithRetentionPolicy(self):
+ """Test that the data is saved with the specified retention policy."""
+ my_policy = 'my_policy'
+
+ class RetentionPolicySeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = InfluxDBClient()
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 2
+ autocommit = True
+ retention_policy = my_policy
+
+ fake_write_points = mock.MagicMock()
+ RetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='gg')
+ RetentionPolicySeriesHelper._client.write_points = fake_write_points
+ RetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=158, other_tag='aa')
+
+ kall = fake_write_points.call_args
+ args, kwargs = kall
+ self.assertTrue('retention_policy' in kwargs)
+ self.assertEqual(kwargs['retention_policy'], my_policy)
+
+ def testSeriesWithoutRetentionPolicy(self):
+ """Test that the data is saved without any retention policy."""
+ class NoRetentionPolicySeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = InfluxDBClient()
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 2
+ autocommit = True
+
+ fake_write_points = mock.MagicMock()
+ NoRetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='gg')
+ NoRetentionPolicySeriesHelper._client.write_points = fake_write_points
+ NoRetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=158, other_tag='aa')
+
+ kall = fake_write_points.call_args
+ args, kwargs = kall
+ self.assertTrue('retention_policy' in kwargs)
+ self.assertEqual(kwargs['retention_policy'], None)
diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py
index f4bd3ff9..45a9ec80 100644
--- a/influxdb/tests/server_tests/base.py
+++ b/influxdb/tests/server_tests/base.py
@@ -36,6 +36,15 @@ def _setup_influxdb_server(inst):
database='db')
+def _setup_gzip_client(inst):
+ inst.cli = InfluxDBClient('localhost',
+ inst.influxd_inst.http_port,
+ 'root',
+ '',
+ database='db',
+ gzip=True)
+
+
def _teardown_influxdb_server(inst):
remove_tree = sys.exc_info() == (None, None, None)
inst.influxd_inst.close(remove_tree=remove_tree)
@@ -51,8 +60,15 @@ class SingleTestCaseWithServerMixin(object):
# 'influxdb_template_conf' attribute must be set
# on the TestCase class or instance.
- setUp = _setup_influxdb_server
- tearDown = _teardown_influxdb_server
+ @classmethod
+ def setUp(cls):
+ """Set up an instance of the SingleTestCaseWithServerMixin."""
+ _setup_influxdb_server(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
class ManyTestCasesWithServerMixin(object):
@@ -82,3 +98,41 @@ def tearDownClass(cls):
def tearDown(self):
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
self.cli.drop_database('db')
+
+
+class SingleTestCaseWithServerGzipMixin(object):
+ """Define the single testcase with server with gzip client mixin.
+
+ Same as the SingleTestCaseWithServerGzipMixin but the InfluxDBClient has
+ gzip=True
+ """
+
+ @classmethod
+ def setUp(cls):
+ """Set up an instance of the SingleTestCaseWithServerGzipMixin."""
+ _setup_influxdb_server(cls)
+ _setup_gzip_client(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
+
+
+class ManyTestCasesWithServerGzipMixin(object):
+ """Define the many testcase with server with gzip client mixin.
+
+ Same as the ManyTestCasesWithServerMixin but the InfluxDBClient has
+ gzip=True.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ """Set up an instance of the ManyTestCasesWithServerGzipMixin."""
+ _setup_influxdb_server(cls)
+ _setup_gzip_client(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py
index fda3f720..a0263243 100644
--- a/influxdb/tests/server_tests/client_test_with_server.py
+++ b/influxdb/tests/server_tests/client_test_with_server.py
@@ -26,6 +26,8 @@
from influxdb.tests import skip_if_pypy, using_pypy, skip_server_tests
from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin
from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin
+from influxdb.tests.server_tests.base import ManyTestCasesWithServerGzipMixin
+from influxdb.tests.server_tests.base import SingleTestCaseWithServerGzipMixin
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
@@ -450,6 +452,33 @@ def test_write_points_batch(self):
self.assertIn(12, net_out['series'][0]['values'][0])
self.assertIn(12.34, cpu['series'][0]['values'][0])
+ def test_write_points_batch_generator(self):
+ """Test writing points in a batch from a generator."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ dummy_points_generator = (point for point in dummy_points)
+ self.cli.write_points(points=dummy_points_generator,
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ time.sleep(5)
+ net_in = self.cli.query("SELECT value FROM network "
+ "WHERE direction=$dir",
+ bind_params={'dir': 'in'}
+ ).raw
+ net_out = self.cli.query("SELECT value FROM network "
+ "WHERE direction='out'").raw
+ cpu = self.cli.query("SELECT value FROM cpu_usage").raw
+ self.assertIn(123, net_in['series'][0]['values'][0])
+ self.assertIn(12, net_out['series'][0]['values'][0])
+ self.assertIn(12.34, cpu['series'][0]['values'][0])
+
def test_query(self):
"""Test querying data back from server."""
self.assertIs(True, self.cli.write_points(dummy_point))
@@ -817,6 +846,64 @@ def test_query_multiple_series(self):
]
self.cli.write_points(pts)
+ def test_get_list_series(self):
+ """Test get a list of series from the database."""
+ dummy_points = [
+ {
+ "measurement": "cpu_load_short",
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 0.64
+ }
+ }
+ ]
+
+ dummy_points_2 = [
+ {
+ "measurement": "memory_usage",
+ "tags": {
+ "host": "server02",
+ "region": "us-east"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 80
+ }
+ }
+ ]
+
+ self.cli.write_points(dummy_points)
+ self.cli.write_points(dummy_points_2)
+
+ self.assertEquals(
+ self.cli.get_list_series(),
+ ['cpu_load_short,host=server01,region=us-west',
+ 'memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(measurement='memory_usage'),
+ ['memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(measurement='memory_usage'),
+ ['memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(tags={'host': 'server02'}),
+ ['memory_usage,host=server02,region=us-east'])
+
+ self.assertEquals(
+ self.cli.get_list_series(
+ measurement='cpu_load_short', tags={'host': 'server02'}),
+ [])
+
@skip_server_tests
class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase):
@@ -855,3 +942,25 @@ def test_write_points_udp(self):
],
list(rsp['cpu_load_short'])
)
+
+
+# Run the tests again, but with gzip enabled this time
+@skip_server_tests
+class GzipSimpleTests(SimpleTests, SingleTestCaseWithServerGzipMixin):
+ """Repeat the simple tests with InfluxDBClient where gzip=True."""
+
+ pass
+
+
+@skip_server_tests
+class GzipCommonTests(CommonTests, ManyTestCasesWithServerGzipMixin):
+ """Repeat the common tests with InfluxDBClient where gzip=True."""
+
+ pass
+
+
+@skip_server_tests
+class GzipUdpTests(UdpTests, ManyTestCasesWithServerGzipMixin):
+ """Repeat the UDP tests with InfluxDBClient where gzip=True."""
+
+ pass
diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py
index 1dcd7567..2dd823ff 100644
--- a/influxdb/tests/server_tests/influxdb_instance.py
+++ b/influxdb/tests/server_tests/influxdb_instance.py
@@ -7,7 +7,7 @@
from __future__ import unicode_literals
import datetime
-import distutils
+import distutils.spawn
import os
import tempfile
import shutil
diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py
index a3d84793..5b344990 100644
--- a/influxdb/tests/test_line_protocol.py
+++ b/influxdb/tests/test_line_protocol.py
@@ -6,10 +6,12 @@
from __future__ import print_function
from __future__ import unicode_literals
-from datetime import datetime
import unittest
-from pytz import UTC, timezone
+from datetime import datetime
+from decimal import Decimal
+
+from pytz import UTC, timezone
from influxdb import line_protocol
@@ -42,7 +44,7 @@ def test_make_lines(self):
self.assertEqual(
line_protocol.make_lines(data),
- 'test,backslash_tag=C:\\\\ ,integer_tag=2,string_tag=hello '
+ 'test,backslash_tag=C:\\\\,integer_tag=2,string_tag=hello '
'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
)
@@ -115,6 +117,45 @@ def test_make_lines_unicode(self):
'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
)
+ def test_make_lines_empty_field_string(self):
+ """Test make lines with an empty string field."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "string": "",
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test string=""\n'
+ )
+
+ def test_tag_value_newline(self):
+ """Test make lines with tag value contains newline."""
+ data = {
+ "tags": {
+ "t1": "line1\nline2"
+ },
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "val": "hello"
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test,t1=line1\\nline2 val="hello"\n'
+ )
+
def test_quote_ident(self):
"""Test quote indentation in TestLineProtocol object."""
self.assertEqual(
@@ -145,3 +186,20 @@ def test_float_with_long_decimal_fraction(self):
line_protocol.make_lines(data),
'test float_val=1.0000000000000009\n'
)
+
+ def test_float_with_long_decimal_fraction_as_type_decimal(self):
+ """Ensure precision is preserved when casting Decimal into strings."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "float_val": Decimal(0.8289445733333332),
+ }
+ }
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test float_val=0.8289445733333332\n'
+ )
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000..308aa62d
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,8 @@
+[mypy]
+ignore_missing_imports = True
+warn_unused_ignores = True
+warn_unused_configs = True
+warn_redundant_casts = True
+warn_no_return = True
+no_implicit_optional = True
+strict_equality = True
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..1b68d94e
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index db5f6f85..a3df3154 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,5 @@
python-dateutil>=2.6.0
-pytz
+pytz>=2016.10
requests>=2.17.0
six>=1.10.0
+msgpack>=0.5.0
diff --git a/setup.py b/setup.py
index d44875f6..8ac7d1a7 100755
--- a/setup.py
+++ b/setup.py
@@ -23,6 +23,11 @@
with open('requirements.txt', 'r') as f:
requires = [x.strip() for x in f if x.strip()]
+# Debugging: Print the requires values
+print("install_requires values:")
+for req in requires:
+ print(f"- {req}")
+
with open('test-requirements.txt', 'r') as f:
test_requires = [x.strip() for x in f if x.strip()]
diff --git a/tox.ini b/tox.ini
index 4a1921e2..a1005abb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs
+envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs, mypy
[testenv]
passenv = INFLUXDB_PYTHON_INFLUXD_PATH
@@ -12,8 +12,8 @@ deps = -r{toxinidir}/requirements.txt
py35: numpy==1.14.6
py36: pandas==0.23.4
py36: numpy==1.15.4
- py37: pandas==0.24.2
- py37: numpy==1.16.2
+ py37: pandas>=0.24.2
+ py37: numpy>=1.16.2
# Only install pandas with non-pypy interpreters
# Testing all combinations would be too expensive
commands = nosetests -v --with-doctest {posargs}
@@ -31,19 +31,24 @@ commands = pydocstyle --count -ve examples influxdb
[testenv:coverage]
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
- pandas
+ pandas==0.24.2
coverage
numpy
commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb
[testenv:docs]
deps = -r{toxinidir}/requirements.txt
- pandas==0.24.2
- numpy==1.16.2
- Sphinx==1.8.5
+ pandas>=0.24.2
+ numpy>=1.16.2
+ Sphinx>=1.8.5
sphinx_rtd_theme
commands = sphinx-build -b html docs/source docs/build
+[testenv:mypy]
+deps = -r{toxinidir}/test-requirements.txt
+ mypy==0.720
+commands = mypy --config-file mypy.ini -p influxdb
+
[flake8]
ignore = W503,W504,W605,N802,F821,E402
# W503: Line break occurred before a binary operator
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy