diff --git a/.travis.yml b/.travis.yml index 7f3d4a5d..8c660b67 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,14 +4,16 @@ python: - "2.7" - "3.5" - "3.6" - - "pypy-5.3.1" + - "pypy" - "pypy3" env: - - INFLUXDB_VER=1.2.4 - - INFLUXDB_VER=1.3.9 - - INFLUXDB_VER=1.4.2 - - INFLUXDB_VER=1.5.4 + - INFLUXDB_VER=1.2.4 # 2017-05-08 + - INFLUXDB_VER=1.3.9 # 2018-01-19 + - INFLUXDB_VER=1.4.3 # 2018-01-30 + - INFLUXDB_VER=1.5.4 # 2018-06-22 + - INFLUXDB_VER=1.6.4 # 2018-10-24 + - INFLUXDB_VER=1.7.4 # 2019-02-14 addons: apt: @@ -20,7 +22,31 @@ addons: matrix: include: - - python: 2.7 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.2.4 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.3.9 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.4.3 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.5.4 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.6.4 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.7.4 + - python: 3.6 env: TOX_ENV=pep257 - python: 3.6 env: TOX_ENV=docs @@ -31,7 +57,7 @@ matrix: install: - pip install tox-travis - - pip install setuptools==20.6.6 + - pip install setuptools - pip install coveralls - mkdir -p "influxdb_install/${INFLUXDB_VER}" - if [ -n "${INFLUXDB_VER}" ] ; then wget "https://dl.influxdata.com/influxdb/releases/influxdb_${INFLUXDB_VER}_amd64.deb" ; fi diff --git a/CHANGELOG.md b/CHANGELOG.md index c156b678..7f1503b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,27 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## [Unreleased] + +### Added +- Add `get_list_continuous_queries`, `drop_continuous_query`, and `create_continuous_query` management methods for + continuous queries (#681 thx @lukaszdudek-silvair) +- query() now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) + +### Changed +- Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman) +- Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) +- Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi) +- Update documentation for empty `delete_series` confusion + +### Removed + +## [v5.2.2] - 2019-03-14 +### Added + +### Changed +- Fix 'TypeError: Already tz-aware' introduced with recent versions of Panda (#671, #676, thx @f4bsch @clslgrnc) + +## [v5.2.1] - 2018-12-07 ### Added ### Changed diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..0acbd7c8 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @aviau @xginn8 @sebito91 diff --git a/README.rst b/README.rst index d4f9611c..026171b2 100644 --- a/README.rst +++ b/README.rst @@ -39,7 +39,7 @@ InfluxDB is an open-source distributed time series database, find more about Inf InfluxDB pre v1.1.0 users ------------------------- -This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.2, and v1.5.4. +This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.3, v1.5.4, v1.6.4, and 1.7.4. Those users still on InfluxDB v0.8.x users may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. @@ -59,7 +59,7 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ------------ -The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 3.6, PyPy and PyPy3. +The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 3.6, 3.7, PyPy and PyPy3. **Note:** Python <3.5 are currently untested. See ``.travis.yml``. diff --git a/docs/source/examples.rst b/docs/source/examples.rst index 2c85fbda..fdda62a9 100644 --- a/docs/source/examples.rst +++ b/docs/source/examples.rst @@ -25,3 +25,9 @@ Tutorials - SeriesHelper .. literalinclude:: ../../examples/tutorial_serieshelper.py :language: python + +Tutorials - UDP +=============== + +.. literalinclude:: ../../examples/tutorial_udp.py + :language: python diff --git a/examples/tutorial.py b/examples/tutorial.py index 4083bfc5..12cd49c1 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -13,7 +13,9 @@ def main(host='localhost', port=8086): dbname = 'example' dbuser = 'smly' dbuser_password = 'my_secret_password' - query = 'select value from cpu_load_short;' + query = 'select Float_value from cpu_load_short;' + query_where = 'select Int_value from cpu_load_short where host=$host;' + bind_params = {'host': 'server01'} json_body = [ { "measurement": "cpu_load_short", @@ -50,6 +52,11 @@ def main(host='localhost', port=8086): print("Result: {0}".format(result)) + print("Querying data: " + query_where) + result = client.query(query_where, bind_params=bind_params) + + print("Result: {0}".format(result)) + print("Switch user: " + user) client.switch_user(user, password) diff --git a/examples/tutorial_udp.py b/examples/tutorial_udp.py new file mode 100644 index 00000000..517ae858 --- /dev/null +++ b/examples/tutorial_udp.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +"""Example for sending batch information to InfluxDB via UDP.""" + +""" +INFO: In order to use UDP, one should enable the UDP service from the +`influxdb.conf` under section + [[udp]] + enabled = true + bind-address = ":8089" # port number for sending data via UDP + database = "udp1" # name of database to be stored + [[udp]] + enabled = true + bind-address = ":8090" + database = "udp2" +""" + + +import argparse + +from influxdb import InfluxDBClient + + +def main(uport): + """Instantiate connection to the InfluxDB.""" + # NOTE: structure of the UDP packet is different than that of information + # sent via HTTP + json_body = { + "tags": { + "host": "server01", + "region": "us-west" + }, + "time": "2009-11-10T23:00:00Z", + "points": [{ + "measurement": "cpu_load_short", + "fields": { + "value": 0.64 + } + }, + { + "measurement": "cpu_load_short", + "fields": { + "value": 0.67 + } + }] + } + + # make `use_udp` True and add `udp_port` number from `influxdb.conf` file + # no need to mention the database name since it is already configured + client = InfluxDBClient(use_udp=True, udp_port=uport) + + # Instead of `write_points` use `send_packet` + client.send_packet(json_body) + + +def parse_args(): + """Parse the args.""" + parser = argparse.ArgumentParser( + description='example code to play with InfluxDB along with UDP Port') + parser.add_argument('--uport', type=int, required=True, + help=' UDP port of InfluxDB') + return parser.parse_args() + + +if __name__ == '__main__': + args = parse_args() + main(uport=args.uport) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 03f74581..288880b1 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.2.0' +__version__ = '5.2.2' diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 06da7ac4..33a8b562 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -142,6 +142,7 @@ def write_points(self, def query(self, query, params=None, + bind_params=None, epoch=None, expected_response_code=200, database=None, @@ -153,8 +154,18 @@ def query(self, """ Query data into a DataFrame. + .. danger:: + In order to avoid injection vulnerabilities (similar to `SQL + injection `_ + vulnerabilities), do not directly include untrusted data into the + ``query`` parameter, use ``bind_params`` instead. + :param query: the actual query string :param params: additional parameters for the request, defaults to {} + :param bind_params: bind parameters for the query: + any variable in the query written as ``'$var_name'`` will be + replaced with ``bind_params['var_name']``. Only works in the + ``WHERE`` clause and takes precedence over ``params['params']`` :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision @@ -172,6 +183,7 @@ def query(self, :rtype: :class:`~.ResultSet` """ query_args = dict(params=params, + bind_params=bind_params, epoch=epoch, expected_response_code=expected_response_code, raise_errors=raise_errors, @@ -202,7 +214,8 @@ def _to_dataframe(self, rs, dropna=True): df = pd.DataFrame(data) df.time = pd.to_datetime(df.time) df.set_index('time', inplace=True) - df.index = df.index.tz_localize('UTC') + if df.index.tzinfo is None: + df.index = df.index.tz_localize('UTC') df.index.name = None result[key].append(df) for key, data in result.items(): @@ -213,8 +226,8 @@ def _to_dataframe(self, rs, dropna=True): return result - @staticmethod - def _convert_dataframe_to_json(dataframe, + def _convert_dataframe_to_json(self, + dataframe, measurement, tags=None, tag_columns=None, @@ -257,14 +270,16 @@ def _convert_dataframe_to_json(dataframe, "h": 1e9 * 3600, }.get(time_precision, 1) + dataframe_zip = zip(dataframe.index, + self._todict_dropna(dataframe[tag_columns]), + self._todict_dropna(dataframe[field_columns])) + points = [ {'measurement': measurement, 'tags': dict(list(tag.items()) + list(tags.items())), 'fields': rec, 'time': np.int64(ts.value / precision_factor)} - for ts, tag, rec in zip(dataframe.index, - dataframe[tag_columns].to_dict('record'), - dataframe[field_columns].to_dict('record')) + for ts, tag, rec in dataframe_zip ] return points @@ -386,6 +401,12 @@ def _convert_dataframe_to_lines(self, points = (measurement + tags + ' ' + fields + ' ' + time).tolist() return points + @staticmethod + def _todict_dropna(dataframe): + return [{k: v for k, v in m.items() + if pd.notnull(v)} + for m in dataframe.to_dict('record')] + @staticmethod def _stringify_dataframe(dframe, numeric_precision, datatype='field'): diff --git a/influxdb/client.py b/influxdb/client.py index 8f8b14ae..8ac557d3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -345,6 +345,7 @@ def _read_chunked_response(response, raise_errors=True): def query(self, query, params=None, + bind_params=None, epoch=None, expected_response_code=200, database=None, @@ -354,6 +355,12 @@ def query(self, method="GET"): """Send a query to InfluxDB. + .. danger:: + In order to avoid injection vulnerabilities (similar to `SQL + injection `_ + vulnerabilities), do not directly include untrusted data into the + ``query`` parameter, use ``bind_params`` instead. + :param query: the actual query string :type query: str @@ -361,6 +368,12 @@ def query(self, defaults to {} :type params: dict + :param bind_params: bind parameters for the query: + any variable in the query written as ``'$var_name'`` will be + replaced with ``bind_params['var_name']``. Only works in the + ``WHERE`` clause and takes precedence over ``params['params']`` + :type bind_params: dict + :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision @@ -394,6 +407,11 @@ def query(self, if params is None: params = {} + if bind_params is not None: + params_dict = json.loads(params.get('params', '{}')) + params_dict.update(bind_params) + params['params'] = json.dumps(params_dict) + params['q'] = query params['db'] = database or self._database @@ -440,7 +458,8 @@ def write_points(self, retention_policy=None, tags=None, batch_size=None, - protocol='json' + protocol='json', + consistency=None ): """Write to multiple time series names. @@ -468,6 +487,9 @@ def write_points(self, :type batch_size: int :param protocol: Protocol for writing data. Either 'line' or 'json'. :type protocol: str + :param consistency: Consistency for the points. + One of {'any','one','quorum','all'}. + :type consistency: str :returns: True, if the operation is successful :rtype: bool @@ -480,14 +502,16 @@ def write_points(self, time_precision=time_precision, database=database, retention_policy=retention_policy, - tags=tags, protocol=protocol) + tags=tags, protocol=protocol, + consistency=consistency) return True return self._write_points(points=points, time_precision=time_precision, database=database, retention_policy=retention_policy, - tags=tags, protocol=protocol) + tags=tags, protocol=protocol, + consistency=consistency) def ping(self): """Check connectivity to InfluxDB. @@ -513,12 +537,16 @@ def _write_points(self, database, retention_policy, tags, - protocol='json'): + protocol='json', + consistency=None): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise ValueError( "Invalid time precision is given. " "(use 'n', 'u', 'ms', 's', 'm' or 'h')") + if consistency not in ['any', 'one', 'quorum', 'all', None]: + raise ValueError('Invalid consistency: {}'.format(consistency)) + if protocol == 'json': data = { 'points': points @@ -533,6 +561,9 @@ def _write_points(self, 'db': database or self._database } + if consistency is not None: + params['consistency'] = consistency + if time_precision is not None: params['precision'] = time_precision @@ -809,7 +840,9 @@ def set_user_password(self, username, password): def delete_series(self, database=None, measurement=None, tags=None): """Delete series from a database. - Series can be filtered by measurement and tags. + Series must be filtered by either measurement and tags. + This method cannot be used to delete all series, use + `drop_database` instead. :param database: the database from which the series should be deleted, defaults to client's current database @@ -908,6 +941,98 @@ def get_list_privileges(self, username): text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points()) + def get_list_continuous_queries(self): + """Get the list of continuous queries in InfluxDB. + + :return: all CQs in InfluxDB + :rtype: list of dictionaries + + :Example: + + :: + + >> cqs = client.get_list_cqs() + >> cqs + [ + { + u'db1': [] + }, + { + u'db2': [ + { + u'name': u'vampire', + u'query': u'CREATE CONTINUOUS QUERY vampire ON ' + 'mydb BEGIN SELECT count(dracula) INTO ' + 'mydb.autogen.all_of_them FROM ' + 'mydb.autogen.one GROUP BY time(5m) END' + } + ] + } + ] + """ + query_string = "SHOW CONTINUOUS QUERIES" + return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()] + + def create_continuous_query(self, name, select, database=None, + resample_opts=None): + r"""Create a continuous query for a database. + + :param name: the name of continuous query to create + :type name: str + :param select: select statement for the continuous query + :type select: str + :param database: the database for which the continuous query is + created. Defaults to current client's database + :type database: str + :param resample_opts: resample options + :type resample_opts: str + + :Example: + + :: + + >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \ + ... 'FROM "cpu" GROUP BY time(1m)' + >> client.create_continuous_query( + ... 'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m' + ... ) + >> client.get_list_continuous_queries() + [ + { + 'db_name': [ + { + 'name': 'cpu_mean', + 'query': 'CREATE CONTINUOUS QUERY "cpu_mean" ' + 'ON "db_name" ' + 'RESAMPLE EVERY 10s FOR 2m ' + 'BEGIN SELECT mean("value") ' + 'INTO "cpu_mean" FROM "cpu" ' + 'GROUP BY time(1m) END' + } + ] + } + ] + """ + query_string = ( + "CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END" + ).format(quote_ident(name), quote_ident(database or self._database), + ' RESAMPLE ' + resample_opts if resample_opts else '', select) + self.query(query_string) + + def drop_continuous_query(self, name, database=None): + """Drop an existing continuous query for a database. + + :param name: the name of continuous query to drop + :type name: str + :param database: the database for which the continuous query is + dropped. Defaults to current client's database + :type database: str + """ + query_string = ( + "DROP CONTINUOUS QUERY {0} ON {1}" + ).format(quote_ident(name), quote_ident(database or self._database)) + self.query(query_string) + def send_packet(self, packet, protocol='json', time_precision=None): """Send an UDP packet. diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py index adf2f20c..f7c5dfb9 100644 --- a/influxdb/tests/__init__.py +++ b/influxdb/tests/__init__.py @@ -12,10 +12,10 @@ import unittest using_pypy = hasattr(sys, "pypy_version_info") -skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") +skip_if_pypy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") _skip_server_tests = os.environ.get( 'INFLUXDB_PYTHON_SKIP_SERVER_TESTS', None) == 'True' -skipServerTests = unittest.skipIf(_skip_server_tests, - "Skipping server tests...") +skip_server_tests = unittest.skipIf(_skip_server_tests, + "Skipping server tests...") diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e27eef17..e4cc7e11 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -337,6 +337,23 @@ def test_write_points_with_precision(self): m.last_request.body, ) + def test_write_points_with_consistency(self): + """Test write points with consistency for TestInfluxDBClient object.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + 'http://localhost:8086/write', + status_code=204 + ) + + cli = InfluxDBClient(database='db') + + cli.write_points(self.dummy_points, consistency='any') + self.assertEqual( + m.last_request.qs, + {'db': ['db'], 'consistency': ['any']} + ) + def test_write_points_with_precision_udp(self): """Test write points with precision for TestInfluxDBClient object.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -409,6 +426,15 @@ def test_write_points_bad_precision(self): time_precision='g' ) + def test_write_points_bad_consistency(self): + """Test write points w/bad consistency value.""" + cli = InfluxDBClient() + with self.assertRaises(ValueError): + cli.write_points( + self.dummy_points, + consistency='boo' + ) + @raises(Exception) def test_write_points_with_precision_fails(self): """Test write points w/precision fail for TestInfluxDBClient object.""" @@ -1027,6 +1053,114 @@ def test_get_list_privileges_fails(self): with _mocked_session(cli, 'get', 401): cli.get_list_privileges('test') + def test_get_list_continuous_queries(self): + """Test getting a list of continuous queries.""" + data = { + "results": [ + { + "statement_id": 0, + "series": [ + { + "name": "testdb01", + "columns": ["name", "query"], + "values": [["testname01", "testquery01"], + ["testname02", "testquery02"]] + }, + { + "name": "testdb02", + "columns": ["name", "query"], + "values": [["testname03", "testquery03"]] + }, + { + "name": "testdb03", + "columns": ["name", "query"] + } + ] + } + ] + } + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_continuous_queries(), + [ + { + 'testdb01': [ + {'name': 'testname01', 'query': 'testquery01'}, + {'name': 'testname02', 'query': 'testquery02'} + ] + }, + { + 'testdb02': [ + {'name': 'testname03', 'query': 'testquery03'} + ] + }, + { + 'testdb03': [] + } + ] + ) + + @raises(Exception) + def test_get_list_continuous_queries_fails(self): + """Test failing to get a list of continuous queries.""" + with _mocked_session(self.cli, 'get', 400): + self.cli.get_list_continuous_queries() + + def test_create_continuous_query(self): + """Test continuous query creation.""" + data = {"results": [{}]} + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=json.dumps(data) + ) + query = 'SELECT count("value") INTO "6_months"."events" FROM ' \ + '"events" GROUP BY time(10m)' + self.cli.create_continuous_query('cq_name', query, 'db_name') + self.assertEqual( + m.last_request.qs['q'][0], + 'create continuous query "cq_name" on "db_name" begin select ' + 'count("value") into "6_months"."events" from "events" group ' + 'by time(10m) end' + ) + self.cli.create_continuous_query('cq_name', query, 'db_name', + 'EVERY 10s FOR 2m') + self.assertEqual( + m.last_request.qs['q'][0], + 'create continuous query "cq_name" on "db_name" resample ' + 'every 10s for 2m begin select count("value") into ' + '"6_months"."events" from "events" group by time(10m) end' + ) + + @raises(Exception) + def test_create_continuous_query_fails(self): + """Test failing to create a continuous query.""" + with _mocked_session(self.cli, 'get', 400): + self.cli.create_continuous_query('cq_name', 'select', 'db_name') + + def test_drop_continuous_query(self): + """Test dropping a continuous query.""" + data = {"results": [{}]} + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=json.dumps(data) + ) + self.cli.drop_continuous_query('cq_name', 'db_name') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop continuous query "cq_name" on "db_name"' + ) + + @raises(Exception) + def test_drop_continuous_query_fails(self): + """Test failing to drop a continuous query.""" + with _mocked_session(self.cli, 'get', 400): + self.cli.drop_continuous_query('cq_name', 'db_name') + def test_invalid_port_fails(self): """Test invalid port fail for TestInfluxDBClient object.""" with self.assertRaises(ValueError): diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 72447c89..6cb47083 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -13,7 +13,7 @@ import warnings import requests_mock -from influxdb.tests import skipIfPYpy, using_pypy +from influxdb.tests import skip_if_pypy, using_pypy from nose.tools import raises from .client_test import _mocked_session @@ -22,9 +22,10 @@ import pandas as pd from pandas.util.testing import assert_frame_equal from influxdb import DataFrameClient + import numpy -@skipIfPYpy +@skip_if_pypy class TestDataFrameClient(unittest.TestCase): """Set up a test DataFrameClient object.""" @@ -33,6 +34,35 @@ def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) + def test_write_points_with_nan_values(self): + """Test write points with NaN values.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + + dataframe.replace('2', float("NaN")) + + expected = ( + b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n" + b"foo column_one=\"2\",column_two=2i,column_three=2.0 " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo') + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None) + self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe(self): """Test write points from df in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -396,10 +426,16 @@ def test_write_points_from_dataframe_with_numeric_precision(self): ["2", 2, 2.2222222222222]], index=[now, now + timedelta(hours=1)]) - expected_default_precision = ( - b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' - b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' - ) + if numpy.lib.NumpyVersion(numpy.__version__) <= '1.13.3': + expected_default_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' + ) + else: + expected_default_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n' # noqa E501 line too long + ) expected_specified_precision = ( b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n' @@ -419,6 +455,9 @@ def test_write_points_from_dataframe_with_numeric_precision(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo", {"hello": "there"}) + print(expected_default_precision) + print(m.last_request.body) + self.assertEqual(m.last_request.body, expected_default_precision) cli = DataFrameClient(database='db') @@ -818,13 +857,15 @@ def test_query_into_dataframe(self): pd1 = pd.DataFrame( [[23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) - pd1.index = pd1.index.tz_localize('UTC') + if pd1.index.tzinfo is None: + pd1.index = pd1.index.tz_localize('UTC') pd2 = pd.DataFrame( [[23422], [23422], [23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z"])) - pd2.index = pd2.index.tz_localize('UTC') + if pd2.index.tzinfo is None: + pd2.index = pd2.index.tz_localize('UTC') expected = { ('network', (('direction', ''),)): pd1, ('network', (('direction', 'in'),)): pd2 @@ -837,7 +878,7 @@ def test_query_into_dataframe(self): assert_frame_equal(expected[k], result[k]) def test_multiquery_into_dataframe(self): - """Test multiquyer into df for TestDataFrameClient object.""" + """Test multiquery into df for TestDataFrameClient object.""" data = { "results": [ { @@ -871,18 +912,22 @@ def test_multiquery_into_dataframe(self): index=pd.to_datetime([ "2015-01-29 21:55:43.702900257+0000", "2015-01-29 21:55:43.702900257+0000", - "2015-06-11 20:46:02+0000"])).tz_localize('UTC') + "2015-06-11 20:46:02+0000"])) + if pd1.index.tzinfo is None: + pd1.index = pd1.index.tz_localize('UTC') pd2 = pd.DataFrame( [[3]], columns=['count'], - index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))\ - .tz_localize('UTC') + index=pd.to_datetime(["1970-01-01 00:00:00+00:00"])) + if pd2.index.tzinfo is None: + pd2.index = pd2.index.tz_localize('UTC') expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}] cli = DataFrameClient('host', 8086, 'username', 'password', 'db') - iql = "SELECT value FROM cpu_load_short WHERE region='us-west';"\ - "SELECT count(value) FROM cpu_load_short WHERE region='us-west'" + iql = "SELECT value FROM cpu_load_short WHERE region=$region;"\ + "SELECT count(value) FROM cpu_load_short WHERE region=$region" + bind_params = {'region': 'us-west'} with _mocked_session(cli, 'GET', 200, data): - result = cli.query(iql) + result = cli.query(iql, bind_params=bind_params) for r, e in zip(result, expected): for k in e: assert_frame_equal(e[k], r[k]) diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py index 6e6fa2cc..0a766af0 100644 --- a/influxdb/tests/influxdb08/dataframe_client_test.py +++ b/influxdb/tests/influxdb08/dataframe_client_test.py @@ -12,7 +12,7 @@ from nose.tools import raises -from influxdb.tests import skipIfPYpy, using_pypy +from influxdb.tests import skip_if_pypy, using_pypy from .client_test import _mocked_session @@ -22,7 +22,7 @@ from influxdb.influxdb08 import DataFrameClient -@skipIfPYpy +@skip_if_pypy class TestDataFrameClient(unittest.TestCase): """Define the DataFramClient test object.""" diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 2f8a2097..fda3f720 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -23,7 +23,7 @@ from influxdb import InfluxDBClient from influxdb.exceptions import InfluxDBClientError -from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests +from influxdb.tests import skip_if_pypy, using_pypy, skip_server_tests from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin @@ -82,7 +82,7 @@ def point(series_name, timestamp=None, tags=None, **fields): ] if not using_pypy: - dummy_pointDF = { + dummy_point_df = { "measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, @@ -90,7 +90,7 @@ def point(series_name, timestamp=None, tags=None, **fields): [[0.64]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) } - dummy_pointsDF = [{ + dummy_points_df = [{ "measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "dataframe": pd.DataFrame( @@ -120,7 +120,7 @@ def point(series_name, timestamp=None, tags=None, **fields): ] -@skipServerTests +@skip_server_tests class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase): """Define the class of simple tests.""" @@ -267,7 +267,7 @@ def test_invalid_port_fails(self): InfluxDBClient('host', '80/redir', 'username', 'password') -@skipServerTests +@skip_server_tests class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase): """Define a class to handle common tests for the server.""" @@ -293,15 +293,15 @@ def test_write_points(self): """Test writing points to the server.""" self.assertIs(True, self.cli.write_points(dummy_point)) - @skipIfPYpy + @skip_if_pypy def test_write_points_DF(self): """Test writing points with dataframe.""" self.assertIs( True, self.cliDF.write_points( - dummy_pointDF['dataframe'], - dummy_pointDF['measurement'], - dummy_pointDF['tags'] + dummy_point_df['dataframe'], + dummy_point_df['measurement'], + dummy_point_df['tags'] ) ) @@ -342,7 +342,7 @@ def test_write_points_check_read_DF(self): rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], - dummy_pointDF['dataframe'] + dummy_point_df['dataframe'] ) # Query with Tags @@ -351,7 +351,7 @@ def test_write_points_check_read_DF(self): assert_frame_equal( rsp[('cpu_load_short', (('host', 'server01'), ('region', 'us-west')))], - dummy_pointDF['dataframe'] + dummy_point_df['dataframe'] ) def test_write_multiple_points_different_series(self): @@ -407,21 +407,21 @@ def test_write_multiple_points_different_series_DF(self): for i in range(2): self.assertIs( True, self.cliDF.write_points( - dummy_pointsDF[i]['dataframe'], - dummy_pointsDF[i]['measurement'], - dummy_pointsDF[i]['tags'])) + dummy_points_df[i]['dataframe'], + dummy_points_df[i]['measurement'], + dummy_points_df[i]['tags'])) time.sleep(1) rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], - dummy_pointsDF[0]['dataframe'] + dummy_points_df[0]['dataframe'] ) rsp = self.cliDF.query('SELECT * FROM memory') assert_frame_equal( rsp['memory'], - dummy_pointsDF[1]['dataframe'] + dummy_points_df[1]['dataframe'] ) def test_write_points_batch(self): @@ -440,7 +440,9 @@ def test_write_points_batch(self): batch_size=2) time.sleep(5) net_in = self.cli.query("SELECT value FROM network " - "WHERE direction='in'").raw + "WHERE direction=$dir", + bind_params={'dir': 'in'} + ).raw net_out = self.cli.query("SELECT value FROM network " "WHERE direction='out'").raw cpu = self.cli.query("SELECT value FROM cpu_usage").raw @@ -720,6 +722,36 @@ def test_drop_retention_policy(self): rsp ) + def test_create_continuous_query(self): + """Test continuous query creation.""" + self.cli.create_retention_policy('some_rp', '1d', 1) + query = 'select count("value") into "some_rp"."events" from ' \ + '"events" group by time(10m)' + self.cli.create_continuous_query('test_cq', query, 'db') + cqs = self.cli.get_list_continuous_queries() + expected_cqs = [ + { + 'db': [ + { + 'name': 'test_cq', + 'query': 'CREATE CONTINUOUS QUERY test_cq ON db ' + 'BEGIN SELECT count(value) INTO ' + 'db.some_rp.events FROM db.autogen.events ' + 'GROUP BY time(10m) END' + } + ] + } + ] + self.assertEqual(cqs, expected_cqs) + + def test_drop_continuous_query(self): + """Test continuous query drop.""" + self.test_create_continuous_query() + self.cli.drop_continuous_query('test_cq', 'db') + cqs = self.cli.get_list_continuous_queries() + expected_cqs = [{'db': []}] + self.assertEqual(cqs, expected_cqs) + def test_issue_143(self): """Test for PR#143 from repo.""" pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z') @@ -786,7 +818,7 @@ def test_query_multiple_series(self): self.cli.write_points(pts) -@skipServerTests +@skip_server_tests class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): """Define a class to test UDP series.""" diff --git a/setup.py b/setup.py index cd6e4e9b..d44875f6 100755 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ tests_require=test_requires, install_requires=requires, extras_require={'test': test_requires}, - classifiers=( + classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', @@ -55,5 +55,5 @@ 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', - ), + ], ) diff --git a/tox.ini b/tox.ini index d0d87fec..4a1921e2 100644 --- a/tox.ini +++ b/tox.ini @@ -1,14 +1,21 @@ [tox] -envlist = py27, py35, py36, pypy, pypy3, flake8, pep257, coverage, docs +envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - py27,py34,py35,py36: pandas==0.20.1 - py27,py34,py35,py36: numpy==1.13.3 + py27: pandas==0.21.1 + py27: numpy==1.13.3 + py35: pandas==0.22.0 + py35: numpy==1.14.6 + py36: pandas==0.23.4 + py36: numpy==1.15.4 + py37: pandas==0.24.2 + py37: numpy==1.16.2 # Only install pandas with non-pypy interpreters +# Testing all combinations would be too expensive commands = nosetests -v --with-doctest {posargs} [testenv:flake8] @@ -26,19 +33,22 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt pandas coverage - numpy==1.13.3 + numpy commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb [testenv:docs] deps = -r{toxinidir}/requirements.txt - pandas==0.20.1 - numpy==1.13.3 - Sphinx==1.5.5 + pandas==0.24.2 + numpy==1.16.2 + Sphinx==1.8.5 sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build [flake8] -ignore = N802,F821,E402 -# E402: module level import not at top of file +ignore = W503,W504,W605,N802,F821,E402 +# W503: Line break occurred before a binary operator +# W504: Line break occurred after a binary operator +# W605: invalid escape sequence # N802: nosetests's setUp function # F821: False positive in intluxdb/dataframe_client.py +# E402: module level import not at top of file pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy