From 3e6db3ef19b3890a1bc45ea815ed75361478b2a4 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 19 Mar 2015 12:17:33 -0400 Subject: [PATCH 1/2] Bring back 0.8.x support --- README.rst | 10 +- influxdb/influxdb08/__init__.py | 11 + influxdb/influxdb08/chunked_json.py | 21 + influxdb/influxdb08/client.py | 786 ++++++++++++++++++ influxdb/influxdb08/dataframe_client.py | 146 ++++ influxdb/influxdb08/helper.py | 150 ++++ tests/influxdb/influxdb08/__init__.py | 1 + tests/influxdb/influxdb08/client_test.py | 692 +++++++++++++++ .../influxdb08/dataframe_client_test.py | 288 +++++++ tests/influxdb/influxdb08/helper_test.py | 194 +++++ 10 files changed, 2292 insertions(+), 7 deletions(-) create mode 100644 influxdb/influxdb08/__init__.py create mode 100644 influxdb/influxdb08/chunked_json.py create mode 100644 influxdb/influxdb08/client.py create mode 100644 influxdb/influxdb08/dataframe_client.py create mode 100644 influxdb/influxdb08/helper.py create mode 100644 tests/influxdb/influxdb08/__init__.py create mode 100644 tests/influxdb/influxdb08/client_test.py create mode 100644 tests/influxdb/influxdb08/dataframe_client_test.py create mode 100644 tests/influxdb/influxdb08/helper_test.py diff --git a/README.rst b/README.rst index bf307dbb..5afd83c5 100644 --- a/README.rst +++ b/README.rst @@ -36,14 +36,10 @@ InfluxDB is an open-source distributed time series database, find more about Inf .. _installation: -InfluxDB < 0.9.0 -================ +InfluxDB v0.8.X users +===================== -This library only supports InfluxDB>=0.9.0. Users of previous versions of InfluxDB may use the influxdb_0.8 branch. - -You may install it from pip with the following command:: - - $ pip install https://github.com/influxdb/influxdb-python/archive/influxdb_0.8.zip +Influxdb >=0.9.0 brings many breaking changes to the API. InfluxDB 0.8.X users may use the legacy client by using ```from influxdb.influxdb08 import InfluxDBClient``` instead. Installation ============ diff --git a/influxdb/influxdb08/__init__.py b/influxdb/influxdb08/__init__.py new file mode 100644 index 00000000..6ba218e3 --- /dev/null +++ b/influxdb/influxdb08/__init__.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +from .client import InfluxDBClient +from .dataframe_client import DataFrameClient +from .helper import SeriesHelper + + +__all__ = [ + 'InfluxDBClient', + 'DataFrameClient', + 'SeriesHelper', +] diff --git a/influxdb/influxdb08/chunked_json.py b/influxdb/influxdb08/chunked_json.py new file mode 100644 index 00000000..50d304f1 --- /dev/null +++ b/influxdb/influxdb08/chunked_json.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +# +# Author: Adrian Sampson +# Source: https://gist.github.com/sampsyo/920215 +# + +import json + +_decoder = json.JSONDecoder() + + +def loads(s): + """A generator reading a sequence of JSON values from a string.""" + while s: + s = s.strip() + obj, pos = _decoder.raw_decode(s) + if not pos: + raise ValueError('no JSON object found at %i' % pos) + yield obj + s = s[pos:] diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py new file mode 100644 index 00000000..f17921b7 --- /dev/null +++ b/influxdb/influxdb08/client.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +""" +Python client for InfluxDB +""" +import json +import socket +import requests +import requests.exceptions +import warnings + +from influxdb import chunked_json + +try: + xrange +except NameError: + xrange = range + +session = requests.Session() + + +class InfluxDBClientError(Exception): + "Raised when an error occurs in the request" + def __init__(self, content, code): + super(InfluxDBClientError, self).__init__( + "{0}: {1}".format(code, content)) + self.content = content + self.code = code + + +class InfluxDBClient(object): + + """ + The ``InfluxDBClient`` object holds information necessary to connect + to InfluxDB. Requests can be made to InfluxDB directly through the client. + + :param host: hostname to connect to InfluxDB, defaults to 'localhost' + :type host: string + :param port: port to connect to InfluxDB, defaults to 'localhost' + :type port: int + :param username: user to connect, defaults to 'root' + :type username: string + :param password: password of the user, defaults to 'root' + :type password: string + :param database: database name to connect to, defaults is None + :type database: string + :param ssl: use https instead of http to connect to InfluxDB, defaults is + False + :type ssl: boolean + :param verify_ssl: verify SSL certificates for HTTPS requests, defaults is + False + :type verify_ssl: boolean + :param timeout: number of seconds Requests will wait for your client to + establish a connection, defaults to None + :type timeout: int + :param use_udp: use UDP to connect to InfluxDB, defaults is False + :type use_udp: int + :param udp_port: UDP port to connect to InfluxDB, defaults is 4444 + :type udp_port: int + """ + + def __init__(self, + host='localhost', + port=8086, + username='root', + password='root', + database=None, + ssl=False, + verify_ssl=False, + timeout=None, + use_udp=False, + udp_port=4444): + """ + Construct a new InfluxDBClient object. + """ + self._host = host + self._port = port + self._username = username + self._password = password + self._database = database + self._timeout = timeout + + self._verify_ssl = verify_ssl + + self.use_udp = use_udp + self.udp_port = udp_port + if use_udp: + self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + self._scheme = "http" + + if ssl is True: + self._scheme = "https" + + self._baseurl = "{0}://{1}:{2}".format( + self._scheme, + self._host, + self._port) + + self._headers = { + 'Content-type': 'application/json', + 'Accept': 'text/plain'} + + # Change member variables + + def switch_database(self, database): + """ + switch_database() + + Change client database. + + :param database: the new database name to switch to + :type database: string + """ + self._database = database + + def switch_db(self, database): + """ + DEPRECATED. Change client database. + + """ + warnings.warn( + "switch_db is deprecated, and will be removed " + "in future versions. Please use " + "``InfluxDBClient.switch_database(database)`` instead.", + FutureWarning) + return self.switch_database(database) + + def switch_user(self, username, password): + """ + switch_user() + + Change client username. + + :param username: the new username to switch to + :type username: string + :param password: the new password to switch to + :type password: string + """ + self._username = username + self._password = password + + def request(self, url, method='GET', params=None, data=None, + expected_response_code=200): + """ + Make a http request to API + """ + url = "{0}/{1}".format(self._baseurl, url) + + if params is None: + params = {} + + auth = { + 'u': self._username, + 'p': self._password + } + + params.update(auth) + + if data is not None and not isinstance(data, str): + data = json.dumps(data) + + # Try to send the request a maximum of three times. (see #103) + # TODO (aviau): Make this configurable. + for i in range(0, 3): + try: + response = session.request( + method=method, + url=url, + params=params, + data=data, + headers=self._headers, + verify=self._verify_ssl, + timeout=self._timeout + ) + break + except requests.exceptions.ConnectionError as e: + if i < 2: + continue + else: + raise e + + if response.status_code == expected_response_code: + return response + else: + raise InfluxDBClientError(response.content, response.status_code) + + def write(self, data): + """ Provided as convenience for influxdb v0.9.0, this may change. """ + self.request( + url="write", + method='POST', + params=None, + data=data, + expected_response_code=200 + ) + return True + + # Writing Data + # + # Assuming you have a database named foo_production you can write data + # by doing a POST to /db/foo_production/series?u=some_user&p=some_password + # with a JSON body of points. + + def write_points(self, data, time_precision='s', *args, **kwargs): + """ + Write to multiple time series names. + + :param data: A list of dicts. + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param batch_size: [Optional] Value to write the points in batches + instead of all at one time. Useful for when doing data dumps from + one database to another or when doing a massive write operation + :type batch_size: int + """ + + def list_chunks(l, n): + """ Yield successive n-sized chunks from l. + """ + for i in xrange(0, len(l), n): + yield l[i:i + n] + + batch_size = kwargs.get('batch_size') + if batch_size: + for item in data: + name = item.get('name') + columns = item.get('columns') + point_list = item.get('points') + + for batch in list_chunks(point_list, batch_size): + item = [{ + "points": batch, + "name": name, + "columns": columns + }] + self._write_points( + data=item, + time_precision=time_precision) + + return True + + return self._write_points(data=data, time_precision=time_precision) + + def write_points_with_precision(self, data, time_precision='s'): + """ + DEPRECATED. Write to multiple time series names + + """ + warnings.warn( + "write_points_with_precision is deprecated, and will be removed " + "in future versions. Please use " + "``InfluxDBClient.write_points(time_precision='..')`` instead.", + FutureWarning) + return self._write_points(data=data, time_precision=time_precision) + + def _write_points(self, data, time_precision): + if time_precision not in ['s', 'm', 'ms', 'u']: + raise Exception( + "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + + if self.use_udp and time_precision != 's': + raise Exception( + "InfluxDB only supports seconds precision for udp writes" + ) + + url = "db/{0}/series".format(self._database) + + params = { + 'time_precision': time_precision + } + + if self.use_udp: + self.send_packet(data) + else: + self.request( + url=url, + method='POST', + params=params, + data=data, + expected_response_code=200 + ) + + return True + + # One Time Deletes + + def delete_points(self, name): + """ + Delete an entire series + """ + url = "db/{0}/series/{1}".format(self._database, name) + + self.request( + url=url, + method='DELETE', + expected_response_code=204 + ) + + return True + + # Regularly Scheduled Deletes + + def create_scheduled_delete(self, json_body): + """ + TODO: Create scheduled delete + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + # get list of deletes + # curl http://localhost:8086/db/site_dev/scheduled_deletes + # + # remove a regularly scheduled delete + # curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id + + def get_list_scheduled_delete(self): + """ + TODO: Get list of scheduled deletes + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def remove_scheduled_delete(self, delete_id): + """ + TODO: Remove scheduled delete + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def query(self, query, time_precision='s', chunked=False): + """ + Quering data + + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param chunked: [Optional, default=False] True if the data shall be + retrieved in chunks, False otherwise. + """ + return self._query(query, time_precision=time_precision, + chunked=chunked) + + # Querying Data + # + # GET db/:name/series. It takes five parameters + def _query(self, query, time_precision='s', chunked=False): + if time_precision not in ['s', 'm', 'ms', 'u']: + raise Exception( + "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + + if chunked is True: + chunked_param = 'true' + else: + chunked_param = 'false' + + # Build the URL of the serie to query + url = "db/{0}/series".format(self._database) + + params = { + 'q': query, + 'time_precision': time_precision, + 'chunked': chunked_param + } + + response = self.request( + url=url, + method='GET', + params=params, + expected_response_code=200 + ) + + if chunked: + return list(chunked_json.loads(response.content.decode())) + else: + return response.json() + + # Creating and Dropping Databases + # + # ### create a database + # curl -X POST http://localhost:8086/db -d '{"name": "site_development"}' + # + # ### drop a database + # curl -X DELETE http://localhost:8086/db/site_development + + def create_database(self, database): + """ + create_database() + + Create a database on the InfluxDB server. + + :param database: the name of the database to create + :type database: string + :rtype: boolean + """ + url = "db" + + data = {'name': database} + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=201 + ) + + return True + + def delete_database(self, database): + """ + delete_database() + + Drop a database on the InfluxDB server. + + :param database: the name of the database to delete + :type database: string + :rtype: boolean + """ + url = "db/{0}".format(database) + + self.request( + url=url, + method='DELETE', + expected_response_code=204 + ) + + return True + + # ### get list of databases + # curl -X GET http://localhost:8086/db + + def get_list_database(self): + """ + Get the list of databases + """ + url = "db" + + response = self.request( + url=url, + method='GET', + expected_response_code=200 + ) + + return response.json() + + def get_database_list(self): + """ + DEPRECATED. Get the list of databases + + """ + warnings.warn( + "get_database_list is deprecated, and will be removed " + "in future versions. Please use " + "``InfluxDBClient.get_list_database`` instead.", + FutureWarning) + return self.get_list_database() + + def delete_series(self, series): + """ + delete_series() + + Drop a series on the InfluxDB server. + + :param series: the name of the series to delete + :type series: string + :rtype: boolean + """ + url = "db/{0}/series/{1}".format( + self._database, + series + ) + + self.request( + url=url, + method='DELETE', + expected_response_code=204 + ) + + return True + + def get_list_series(self): + """ + Get a list of all time series in a database + """ + + response = self._query('list series') + + series_list = [] + for series in response[0]['points']: + series_list.append(series[1]) + + return series_list + + def get_list_continuous_queries(self): + """ + Get a list of continuous queries + """ + + response = self._query('list continuous queries') + queries_list = [] + for query in response[0]['points']: + queries_list.append(query[2]) + + return queries_list + + # Security + # get list of cluster admins + # curl http://localhost:8086/cluster_admins?u=root&p=root + + # add cluster admin + # curl -X POST http://localhost:8086/cluster_admins?u=root&p=root \ + # -d '{"name": "paul", "password": "i write teh docz"}' + + # update cluster admin password + # curl -X POST http://localhost:8086/cluster_admins/paul?u=root&p=root \ + # -d '{"password": "new pass"}' + + # delete cluster admin + # curl -X DELETE http://localhost:8086/cluster_admins/paul?u=root&p=root + + # Database admins, with a database name of site_dev + # get list of database admins + # curl http://localhost:8086/db/site_dev/admins?u=root&p=root + + # add database admin + # curl -X POST http://localhost:8086/db/site_dev/admins?u=root&p=root \ + # -d '{"name": "paul", "password": "i write teh docz"}' + + # update database admin password + # curl -X POST http://localhost:8086/db/site_dev/admins/paul?u=root&p=root\ + # -d '{"password": "new pass"}' + + # delete database admin + # curl -X DELETE \ + # http://localhost:8086/db/site_dev/admins/paul?u=root&p=root + + def get_list_cluster_admins(self): + """ + Get list of cluster admins + """ + response = self.request( + url="cluster_admins", + method='GET', + expected_response_code=200 + ) + + return response.json() + + def add_cluster_admin(self, new_username, new_password): + """ + Add cluster admin + """ + data = { + 'name': new_username, + 'password': new_password + } + + self.request( + url="cluster_admins", + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def update_cluster_admin_password(self, username, new_password): + """ + Update cluster admin password + """ + url = "cluster_admins/{0}".format(username) + + data = { + 'password': new_password + } + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def delete_cluster_admin(self, username): + """ + Delete cluster admin + """ + url = "cluster_admins/{0}".format(username) + + self.request( + url=url, + method='DELETE', + expected_response_code=200 + ) + + return True + + def set_database_admin(self, username): + """ + Set user as database admin + """ + return self.alter_database_admin(username, True) + + def unset_database_admin(self, username): + """ + Unset user as database admin + """ + return self.alter_database_admin(username, False) + + def alter_database_admin(self, username, is_admin): + url = "db/{0}/users/{1}".format(self._database, username) + + data = {'admin': is_admin} + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def get_list_database_admins(self): + """ + TODO: Get list of database admins + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def add_database_admin(self, new_username, new_password): + """ + TODO: Add cluster admin + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def update_database_admin_password(self, username, new_password): + """ + TODO: Update database admin password + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def delete_database_admin(self, username): + """ + TODO: Delete database admin + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + ### + # Limiting User Access + + # Database users + # get list of database users + # curl http://localhost:8086/db/site_dev/users?u=root&p=root + + # add database user + # curl -X POST http://localhost:8086/db/site_dev/users?u=root&p=root \ + # -d '{"name": "paul", "password": "i write teh docz"}' + + # update database user password + # curl -X POST http://localhost:8086/db/site_dev/users/paul?u=root&p=root \ + # -d '{"password": "new pass"}' + + # delete database user + # curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root + + def get_database_users(self): + """ + Get list of database users + """ + url = "db/{0}/users".format(self._database) + + response = self.request( + url=url, + method='GET', + expected_response_code=200 + ) + + return response.json() + + def add_database_user(self, new_username, new_password, permissions=None): + """ + Add database user + + :param permissions: A ``(readFrom, writeTo)`` tuple + """ + url = "db/{0}/users".format(self._database) + + data = { + 'name': new_username, + 'password': new_password + } + + if permissions: + try: + data['readFrom'], data['writeTo'] = permissions + except (ValueError, TypeError): + raise TypeError( + "'permissions' must be (readFrom, writeTo) tuple" + ) + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def update_database_user_password(self, username, new_password): + """ + Update password + """ + url = "db/{0}/users/{1}".format(self._database, username) + + data = { + 'password': new_password + } + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + if username == self._username: + self._password = new_password + + return True + + def delete_database_user(self, username): + """ + Delete database user + """ + url = "db/{0}/users/{1}".format(self._database, username) + + self.request( + url=url, + method='DELETE', + expected_response_code=200 + ) + + return True + + # update the user by POSTing to db/site_dev/users/paul + + def update_permission(self, username, json_body): + """ + TODO: Update read/write permission + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def send_packet(self, packet): + data = json.dumps(packet) + byte = data.encode('utf-8') + self.udp_socket.sendto(byte, (self._host, self.udp_port)) diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py new file mode 100644 index 00000000..641b60c1 --- /dev/null +++ b/influxdb/influxdb08/dataframe_client.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +""" +DataFrame client for InfluxDB +""" +import math +import warnings + +from .client import InfluxDBClient + + +class DataFrameClient(InfluxDBClient): + """ + The ``DataFrameClient`` object holds information necessary to connect + to InfluxDB. Requests can be made to InfluxDB directly through the client. + The client reads and writes from pandas DataFrames. + """ + + def __init__(self, *args, **kwargs): + super(DataFrameClient, self).__init__(*args, **kwargs) + try: + global pd + import pandas as pd + except ImportError as ex: + raise ImportError( + 'DataFrameClient requires Pandas, "{ex}" problem importing' + .format(ex=str(ex)) + ) + + self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') + + def write_points(self, data, *args, **kwargs): + """ + Write to multiple time series names. + + :param data: A dictionary mapping series names to pandas DataFrames + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param batch_size: [Optional] Value to write the points in batches + instead of all at one time. Useful for when doing data dumps from + one database to another or when doing a massive write operation + :type batch_size: int + """ + + batch_size = kwargs.get('batch_size') + time_precision = kwargs.get('time_precision', 's') + if batch_size: + kwargs.pop('batch_size') # don't hand over to InfluxDBClient + for key, data_frame in data.items(): + number_batches = int(math.ceil( + len(data_frame) / float(batch_size))) + for batch in range(number_batches): + start_index = batch * batch_size + end_index = (batch + 1) * batch_size + data = [self._convert_dataframe_to_json( + name=key, + dataframe=data_frame.ix[start_index:end_index].copy(), + time_precision=time_precision)] + InfluxDBClient.write_points(self, data, *args, **kwargs) + return True + else: + data = [self._convert_dataframe_to_json( + name=key, dataframe=dataframe, time_precision=time_precision) + for key, dataframe in data.items()] + return InfluxDBClient.write_points(self, data, *args, **kwargs) + + def write_points_with_precision(self, data, time_precision='s'): + """ + DEPRECATED. Write to multiple time series names + + """ + warnings.warn( + "write_points_with_precision is deprecated, and will be removed " + "in future versions. Please use " + "``DataFrameClient.write_points(time_precision='..')`` instead.", + FutureWarning) + return self.write_points(data, time_precision='s') + + def query(self, query, time_precision='s', chunked=False): + """ + Quering data into DataFrames. + + Returns a DataFrame for a single time series and a map for multiple + time series with the time series as value and its name as key. + + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param chunked: [Optional, default=False] True if the data shall be + retrieved in chunks, False otherwise. + + """ + result = InfluxDBClient.query(self, query=query, + time_precision=time_precision, + chunked=chunked) + if len(result) == 0: + return result + elif len(result) == 1: + return self._to_dataframe(result[0], time_precision) + else: + return {time_series['name']: self._to_dataframe(time_series, + time_precision) + for time_series in result} + + def _to_dataframe(self, json_result, time_precision): + dataframe = pd.DataFrame(data=json_result['points'], + columns=json_result['columns']) + if 'sequence_number' in dataframe.keys(): + dataframe.sort(['time', 'sequence_number'], inplace=True) + else: + dataframe.sort(['time'], inplace=True) + pandas_time_unit = time_precision + if time_precision == 'm': + pandas_time_unit = 'ms' + elif time_precision == 'u': + pandas_time_unit = 'us' + dataframe.index = pd.to_datetime(list(dataframe['time']), + unit=pandas_time_unit, + utc=True) + del dataframe['time'] + return dataframe + + def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): + if not isinstance(dataframe, pd.DataFrame): + raise TypeError('Must be DataFrame, but type was: {}.' + .format(type(dataframe))) + if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or + isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): + raise TypeError('Must be DataFrame with DatetimeIndex or \ + PeriodIndex.') + dataframe.index = dataframe.index.to_datetime() + if dataframe.index.tzinfo is None: + dataframe.index = dataframe.index.tz_localize('UTC') + dataframe['time'] = [self._datetime_to_epoch(dt, time_precision) + for dt in dataframe.index] + data = {'name': name, + 'columns': [str(column) for column in dataframe.columns], + 'points': list([list(x) for x in dataframe.values])} + return data + + def _datetime_to_epoch(self, datetime, time_precision='s'): + seconds = (datetime - self.EPOCH).total_seconds() + if time_precision == 's': + return seconds + elif time_precision == 'm' or time_precision == 'ms': + return seconds * 1000 + elif time_precision == 'u': + return seconds * 1000000 diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py new file mode 100644 index 00000000..b2f8f8bb --- /dev/null +++ b/influxdb/influxdb08/helper.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +""" +Helper class for InfluxDB +""" +from collections import namedtuple, defaultdict +from warnings import warn + +import six + + +class SeriesHelper(object): + + """ + Subclassing this helper eases writing data points in bulk. + All data points are immutable, insuring they do not get overwritten. + Each subclass can write to its own database. + The time series names can also be based on one or more defined fields. + + Annotated example:: + + class MySeriesHelper(SeriesHelper): + class Meta: + # Meta class stores time series helper configuration. + series_name = 'events.stats.{server_name}' + # Series name must be a string, curly brackets for dynamic use. + fields = ['time', 'server_name'] + # Defines all the fields in this time series. + ### Following attributes are optional. ### + client = TestSeriesHelper.client + # Client should be an instance of InfluxDBClient. + :warning: Only used if autocommit is True. + bulk_size = 5 + # Defines the number of data points to write simultaneously. + # Only applicable if autocommit is True. + autocommit = True + # If True and no bulk_size, then will set bulk_size to 1. + + """ + __initialized__ = False + + def __new__(cls, *args, **kwargs): + """ + Initializes class attributes for subsequent constructor calls. + + :note: *args and **kwargs are not explicitly used in this function, + but needed for Python 2 compatibility. + """ + if not cls.__initialized__: + cls.__initialized__ = True + try: + _meta = getattr(cls, 'Meta') + except AttributeError: + raise AttributeError( + 'Missing Meta class in {}.'.format( + cls.__name__)) + + for attr in ['series_name', 'fields']: + try: + setattr(cls, '_' + attr, getattr(_meta, attr)) + except AttributeError: + raise AttributeError( + 'Missing {} in {} Meta class.'.format( + attr, + cls.__name__)) + + cls._autocommit = getattr(_meta, 'autocommit', False) + + cls._client = getattr(_meta, 'client', None) + if cls._autocommit and not cls._client: + raise AttributeError( + 'In {}, autocommit is set to True, but no client is set.' + .format(cls.__name__)) + + try: + cls._bulk_size = getattr(_meta, 'bulk_size') + if cls._bulk_size < 1 and cls._autocommit: + warn( + 'Definition of bulk_size in {} forced to 1, ' + 'was less than 1.'.format(cls.__name__)) + cls._bulk_size = 1 + except AttributeError: + cls._bulk_size = -1 + else: + if not cls._autocommit: + warn( + 'Definition of bulk_size in {} has no affect because' + ' autocommit is false.'.format(cls.__name__)) + + cls._datapoints = defaultdict(list) + cls._type = namedtuple(cls.__name__, cls._fields) + + return super(SeriesHelper, cls).__new__(cls) + + def __init__(self, **kw): + """ + Constructor call creates a new data point. All fields must be present. + + :note: Data points written when `bulk_size` is reached per Helper. + :warning: Data points are *immutable* (`namedtuples`). + """ + cls = self.__class__ + + if sorted(cls._fields) != sorted(kw.keys()): + raise NameError( + 'Expected {0}, got {1}.'.format( + cls._fields, + kw.keys())) + + cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw)) + + if cls._autocommit and \ + sum(len(series) for series in cls._datapoints.values()) \ + >= cls._bulk_size: + cls.commit() + + @classmethod + def commit(cls, client=None): + """ + Commit everything from datapoints via the client. + + :param client: InfluxDBClient instance for writing points to InfluxDB. + :attention: any provided client will supersede the class client. + :return: result of client.write_points. + """ + if not client: + client = cls._client + rtn = client.write_points(cls._json_body_()) + cls._reset_() + return rtn + + @classmethod + def _json_body_(cls): + """ + :return: JSON body of these datapoints. + """ + json = [] + for series_name, data in six.iteritems(cls._datapoints): + json.append({'name': series_name, + 'columns': cls._fields, + 'points': [[point.__dict__[k] for k in cls._fields] + for point in data] + }) + return json + + @classmethod + def _reset_(cls): + """ + Reset data storage. + """ + cls._datapoints = defaultdict(list) diff --git a/tests/influxdb/influxdb08/__init__.py b/tests/influxdb/influxdb08/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/tests/influxdb/influxdb08/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py new file mode 100644 index 00000000..f5631388 --- /dev/null +++ b/tests/influxdb/influxdb08/client_test.py @@ -0,0 +1,692 @@ +# -*- coding: utf-8 -*- +""" +unit tests +""" +import json +import requests +import requests.exceptions +import socket +import unittest +import requests_mock +from nose.tools import raises +from mock import patch +import warnings +import mock + +from influxdb.influxdb08 import InfluxDBClient +from influxdb.influxdb08.client import session + + +def _build_response_object(status_code=200, content=""): + resp = requests.Response() + resp.status_code = status_code + resp._content = content.encode("utf8") + return resp + + +def _mocked_session(method="GET", status_code=200, content=""): + + method = method.upper() + + def request(*args, **kwargs): + c = content + + # Check method + assert method == kwargs.get('method', 'GET') + + if method == 'POST': + data = kwargs.get('data', None) + + if data is not None: + # Data must be a string + assert isinstance(data, str) + + # Data must be a JSON string + assert c == json.loads(data, strict=True) + + c = data + + # Anyway, Content must be a JSON string (or empty string) + if not isinstance(c, str): + c = json.dumps(c) + + return _build_response_object(status_code=status_code, content=c) + + mocked = patch.object( + session, + 'request', + side_effect=request + ) + + return mocked + + +class TestInfluxDBClient(unittest.TestCase): + + def setUp(self): + # By default, raise exceptions on warnings + warnings.simplefilter('error', FutureWarning) + + self.dummy_points = [ + { + "points": [ + ["1", 1, 1.0], + ["2", 2, 2.0] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three"] + } + ] + + def test_scheme(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + assert cli._baseurl == 'http://host:8086' + + cli = InfluxDBClient( + 'host', 8086, 'username', 'password', 'database', ssl=True + ) + assert cli._baseurl == 'https://host:8086' + + def test_switch_database(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + cli.switch_database('another_database') + assert cli._database == 'another_database' + + @raises(FutureWarning) + def test_switch_db_deprecated(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + cli.switch_db('another_database') + assert cli._database == 'another_database' + + def test_switch_user(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + cli.switch_user('another_username', 'another_password') + assert cli._username == 'another_username' + assert cli._password == 'another_password' + + def test_write(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write" + ) + cli = InfluxDBClient(database='db') + cli.write( + {"database": "mydb", + "retentionPolicy": "mypolicy", + "points": [{"name": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "timestamp": "2009-11-10T23:00:00Z", + "values": {"value": 0.64}}]} + ) + + self.assertEqual( + json.loads(m.last_request.body), + {"database": "mydb", + "retentionPolicy": "mypolicy", + "points": [{"name": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "timestamp": "2009-11-10T23:00:00Z", + "values": {"value": 0.64}}]} + ) + + def test_write_points(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/series" + ) + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points + ) + + self.assertListEqual( + json.loads(m.last_request.body), + self.dummy_points + ) + + def test_write_points_string(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/series" + ) + + cli = InfluxDBClient(database='db') + cli.write_points( + str(json.dumps(self.dummy_points)) + ) + + self.assertListEqual( + json.loads(m.last_request.body), + self.dummy_points + ) + + def test_write_points_batch(self): + with _mocked_session('post', 200, self.dummy_points): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.write_points( + data=self.dummy_points, + batch_size=2 + ) is True + + def test_write_points_udp(self): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.bind(('0.0.0.0', 4444)) + + cli = InfluxDBClient( + 'localhost', 8086, 'root', 'root', + 'test', use_udp=True, udp_port=4444 + ) + cli.write_points(self.dummy_points) + + received_data, addr = s.recvfrom(1024) + + assert self.dummy_points == \ + json.loads(received_data.decode(), strict=True) + + def test_write_bad_precision_udp(self): + cli = InfluxDBClient( + 'localhost', 8086, 'root', 'root', + 'test', use_udp=True, udp_port=4444 + ) + + with self.assertRaisesRegexp( + Exception, + "InfluxDB only supports seconds precision for udp writes" + ): + cli.write_points( + self.dummy_points, + time_precision='ms' + ) + + @raises(Exception) + def test_write_points_fails(self): + with _mocked_session('post', 500): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.write_points([]) + + def test_write_points_with_precision(self): + with _mocked_session('post', 200, self.dummy_points): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.write_points(self.dummy_points) is True + + def test_write_points_bad_precision(self): + cli = InfluxDBClient() + with self.assertRaisesRegexp( + Exception, + "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" + ): + cli.write_points( + self.dummy_points, + time_precision='g' + ) + + @raises(Exception) + def test_write_points_with_precision_fails(self): + with _mocked_session('post', 500): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.write_points_with_precision([]) + + def test_delete_points(self): + with _mocked_session('delete', 204) as mocked: + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.delete_points("foo") is True + + assert len(mocked.call_args_list) == 1 + args, kwds = mocked.call_args_list[0] + + assert kwds['params'] == {'u': 'username', 'p': 'password'} + assert kwds['url'] == 'http://host:8086/db/db/series/foo' + + @raises(Exception) + def test_delete_points_with_wrong_name(self): + with _mocked_session('delete', 400): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_points("nonexist") + + @raises(NotImplementedError) + def test_create_scheduled_delete(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.create_scheduled_delete([]) + + @raises(NotImplementedError) + def test_get_list_scheduled_delete(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.get_list_scheduled_delete() + + @raises(NotImplementedError) + def test_remove_scheduled_delete(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.remove_scheduled_delete(1) + + def test_query(self): + data = [ + { + "name": "foo", + "columns": ["time", "sequence_number", "column_one"], + "points": [ + [1383876043, 16, "2"], [1383876043, 15, "1"], + [1383876035, 14, "2"], [1383876035, 13, "1"] + ] + } + ] + with _mocked_session('get', 200, data): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + result = cli.query('select column_one from foo;') + assert len(result[0]['points']) == 4 + + def test_query_chunked(self): + cli = InfluxDBClient(database='db') + example_object = { + 'points': [ + [1415206250119, 40001, 667], + [1415206244555, 30001, 7], + [1415206228241, 20001, 788], + [1415206212980, 10001, 555], + [1415197271586, 10001, 23] + ], + 'name': 'foo', + 'columns': [ + 'time', + 'sequence_number', + 'val' + ] + } + example_response = \ + json.dumps(example_object) + json.dumps(example_object) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.query('select * from foo', chunked=True), + [example_object, example_object] + ) + + @raises(Exception) + def test_query_fail(self): + with _mocked_session('get', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.query('select column_one from foo;') + + def test_query_bad_precision(self): + cli = InfluxDBClient() + with self.assertRaisesRegexp( + Exception, + "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" + ): + cli.query('select column_one from foo', time_precision='g') + + def test_create_database(self): + with _mocked_session('post', 201, {"name": "new_db"}): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.create_database('new_db') is True + + @raises(Exception) + def test_create_database_fails(self): + with _mocked_session('post', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.create_database('new_db') + + def test_delete_database(self): + with _mocked_session('delete', 204): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.delete_database('old_db') is True + + @raises(Exception) + def test_delete_database_fails(self): + with _mocked_session('delete', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_database('old_db') + + def test_get_list_database(self): + data = [ + {"name": "a_db"} + ] + with _mocked_session('get', 200, data): + cli = InfluxDBClient('host', 8086, 'username', 'password') + assert len(cli.get_list_database()) == 1 + assert cli.get_list_database()[0]['name'] == 'a_db' + + @raises(Exception) + def test_get_list_database_fails(self): + with _mocked_session('get', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password') + cli.get_list_database() + + @raises(FutureWarning) + def test_get_database_list_deprecated(self): + data = [ + {"name": "a_db"} + ] + with _mocked_session('get', 200, data): + cli = InfluxDBClient('host', 8086, 'username', 'password') + assert len(cli.get_database_list()) == 1 + assert cli.get_database_list()[0]['name'] == 'a_db' + + def test_delete_series(self): + with _mocked_session('delete', 204): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_series('old_series') + + @raises(Exception) + def test_delete_series_fails(self): + with _mocked_session('delete', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_series('old_series') + + def test_get_series_list(self): + cli = InfluxDBClient(database='db') + + with requests_mock.Mocker() as m: + example_response = \ + '[{"name":"list_series_result","columns":' \ + '["time","name"],"points":[[0,"foo"],[0,"bar"]]}]' + + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.get_list_series(), + ['foo', 'bar'] + ) + + def test_get_continuous_queries(self): + cli = InfluxDBClient(database='db') + + with requests_mock.Mocker() as m: + + # Tip: put this in a json linter! + example_response = '[ { "name": "continuous queries", "columns"' \ + ': [ "time", "id", "query" ], "points": [ [ ' \ + '0, 1, "select foo(bar,95) from \\"foo_bar' \ + 's\\" group by time(5m) into response_times.' \ + 'percentiles.5m.95" ], [ 0, 2, "select perce' \ + 'ntile(value,95) from \\"response_times\\" g' \ + 'roup by time(5m) into response_times.percen' \ + 'tiles.5m.95" ] ] } ]' + + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.get_list_continuous_queries(), + [ + 'select foo(bar,95) from "foo_bars" group ' + 'by time(5m) into response_times.percentiles.5m.95', + + 'select percentile(value,95) from "response_times" group ' + 'by time(5m) into response_times.percentiles.5m.95' + ] + ) + + def test_get_list_cluster_admins(self): + pass + + def test_add_cluster_admin(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/cluster_admins" + ) + + cli = InfluxDBClient(database='db') + cli.add_cluster_admin( + new_username='paul', + new_password='laup' + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'name': 'paul', + 'password': 'laup' + } + ) + + def test_update_cluster_admin_password(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/cluster_admins/paul" + ) + + cli = InfluxDBClient(database='db') + cli.update_cluster_admin_password( + username='paul', + new_password='laup' + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + {'password': 'laup'} + ) + + def test_delete_cluster_admin(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.DELETE, + "http://localhost:8086/cluster_admins/paul", + status_code=200, + ) + + cli = InfluxDBClient(database='db') + cli.delete_cluster_admin(username='paul') + + self.assertIsNone(m.last_request.body) + + def test_set_database_admin(self): + pass + + def test_unset_database_admin(self): + pass + + def test_alter_database_admin(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.alter_database_admin( + username='paul', + is_admin=False + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'admin': False + } + ) + + @raises(NotImplementedError) + def test_get_list_database_admins(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.get_list_database_admins() + + @raises(NotImplementedError) + def test_add_database_admin(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.add_database_admin('admin', 'admin_secret_password') + + @raises(NotImplementedError) + def test_update_database_admin_password(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.update_database_admin_password('admin', 'admin_secret_password') + + @raises(NotImplementedError) + def test_delete_database_admin(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_database_admin('admin') + + def test_get_database_users(self): + cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') + + example_response = \ + '[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\ + '{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/users", + text=example_response + ) + users = cli.get_database_users() + + self.assertEqual(json.loads(example_response), users) + + def test_add_database_user(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users" + ) + cli = InfluxDBClient(database='db') + cli.add_database_user( + new_username='paul', + new_password='laup', + permissions=('.*', '.*') + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'writeTo': '.*', + 'password': 'laup', + 'readFrom': '.*', + 'name': 'paul' + } + ) + + def test_add_database_user_bad_permissions(self): + cli = InfluxDBClient() + + with self.assertRaisesRegexp( + Exception, + "'permissions' must be \(readFrom, writeTo\) tuple" + ): + cli.add_database_user( + new_password='paul', + new_username='paul', + permissions=('hello', 'hello', 'hello') + ) + + def test_update_database_user_password(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.update_database_user_password( + username='paul', + new_password='laup' + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + {'password': 'laup'} + ) + + def test_update_database_user_password_current_user(self): + cli = InfluxDBClient( + username='root', + password='hello', + database='database' + ) + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/database/users/root" + ) + + cli.update_database_user_password( + username='root', + new_password='bye' + ) + + self.assertEqual(cli._password, 'bye') + + def test_delete_database_user(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.DELETE, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.delete_database_user(username='paul') + + self.assertIsNone(m.last_request.body) + + @raises(NotImplementedError) + def test_update_permission(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.update_permission('admin', []) + + @mock.patch('requests.Session.request') + def test_request_retry(self, mock_request): + """Tests that two connection errors will be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 3: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points + ) + + @mock.patch('requests.Session.request') + def test_request_retry_raises(self, mock_request): + """Tests that three connection errors will not be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 4: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + + with self.assertRaises(requests.exceptions.ConnectionError): + cli.write_points(self.dummy_points) diff --git a/tests/influxdb/influxdb08/dataframe_client_test.py b/tests/influxdb/influxdb08/dataframe_client_test.py new file mode 100644 index 00000000..9fc54b9e --- /dev/null +++ b/tests/influxdb/influxdb08/dataframe_client_test.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +""" +unit tests for misc module +""" +from .client_test import _mocked_session + +import unittest +import json +import requests_mock +from nose.tools import raises +from datetime import timedelta +from tests import skipIfPYpy, using_pypy +import copy +import warnings + +if not using_pypy: + import pandas as pd + from pandas.util.testing import assert_frame_equal + from influxdb.influxdb08 import DataFrameClient + + +@skipIfPYpy +class TestDataFrameClient(unittest.TestCase): + + def setUp(self): + # By default, raise exceptions on warnings + warnings.simplefilter('error', FutureWarning) + + def test_write_points_from_dataframe(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 3600] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + + def test_write_points_from_dataframe_in_batches(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + assert cli.write_points({"foo": dataframe}, + batch_size=1) is True + + def test_write_points_from_dataframe_with_numeric_column_names(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + # df with numeric column names + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 3600] + ], + "name": "foo", + "columns": ['0', '1', '2', "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + + def test_write_points_from_dataframe_with_period_index(self): + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[pd.Period('1970-01-01'), + pd.Period('1970-01-02')], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 86400] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + + def test_write_points_from_dataframe_with_time_precision(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 3600] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + points_ms = copy.deepcopy(points) + points_ms[0]["points"][1][-1] = 3600 * 1000 + + points_us = copy.deepcopy(points) + points_us[0]["points"][1][-1] = 3600 * 1000000 + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + + cli.write_points({"foo": dataframe}, time_precision='s') + self.assertListEqual(json.loads(m.last_request.body), points) + + cli.write_points({"foo": dataframe}, time_precision='m') + self.assertListEqual(json.loads(m.last_request.body), points_ms) + + cli.write_points({"foo": dataframe}, time_precision='u') + self.assertListEqual(json.loads(m.last_request.body), points_us) + + @raises(TypeError) + def test_write_points_from_dataframe_fails_without_time_index(self): + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + columns=["column_one", "column_two", + "column_three"]) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + @raises(TypeError) + def test_write_points_from_dataframe_fails_with_series(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.Series(data=[1.0, 2.0], + index=[now, now + timedelta(hours=1)]) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + def test_query_into_dataframe(self): + data = [ + { + "name": "foo", + "columns": ["time", "sequence_number", "column_one"], + "points": [ + [3600, 16, 2], [3600, 15, 1], + [0, 14, 2], [0, 13, 1] + ] + } + ] + # dataframe sorted ascending by time first, then sequence_number + dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]], + index=pd.to_datetime([0, 0, + 3600, 3600], + unit='s', utc=True), + columns=['sequence_number', 'column_one']) + with _mocked_session('get', 200, data): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query('select column_one from foo;') + assert_frame_equal(dataframe, result) + + def test_query_multiple_time_series(self): + data = [ + { + "name": "series1", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, 323048, 323048, 323048, 0]] + }, + { + "name": "series2", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]] + }, + { + "name": "series3", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, -0.01220, -0.01220, -0.01220, 0]] + } + ] + dataframes = { + 'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']), + 'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']), + 'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']) + } + with _mocked_session('get', 200, data): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query("""select mean(value), min(value), max(value), + stddev(value) from series1, series2, series3""") + assert dataframes.keys() == result.keys() + for key in dataframes.keys(): + assert_frame_equal(dataframes[key], result[key]) + + def test_query_with_empty_result(self): + with _mocked_session('get', 200, []): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query('select column_one from foo;') + assert result == [] + + def test_list_series(self): + response = [ + { + 'columns': ['time', 'name'], + 'name': 'list_series_result', + 'points': [[0, 'seriesA'], [0, 'seriesB']] + } + ] + with _mocked_session('get', 200, response): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + series_list = cli.get_list_series() + assert series_list == ['seriesA', 'seriesB'] + + def test_datetime_to_epoch(self): + timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + + self.assertEqual( + cli._datetime_to_epoch(timestamp), + 1356998400.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='s'), + 1356998400.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='m'), + 1356998400000.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='ms'), + 1356998400000.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='u'), + 1356998400000000.0 + ) diff --git a/tests/influxdb/influxdb08/helper_test.py b/tests/influxdb/influxdb08/helper_test.py new file mode 100644 index 00000000..3f546a99 --- /dev/null +++ b/tests/influxdb/influxdb08/helper_test.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- + +import unittest +import warnings + +import mock +from influxdb.influxdb08 import SeriesHelper, InfluxDBClient +from requests.exceptions import ConnectionError + + +class TestSeriesHelper(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super(TestSeriesHelper, cls).setUpClass() + + TestSeriesHelper.client = InfluxDBClient( + 'host', + 8086, + 'username', + 'password', + 'database' + ) + + class MySeriesHelper(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 5 + autocommit = True + + TestSeriesHelper.MySeriesHelper = MySeriesHelper + + def test_auto_commit(self): + """ + Tests that write_points is called after the right number of events + """ + class AutoCommitTest(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 5 + client = InfluxDBClient() + autocommit = True + + fake_write_points = mock.MagicMock() + AutoCommitTest(server_name='us.east-1', time=159) + AutoCommitTest._client.write_points = fake_write_points + AutoCommitTest(server_name='us.east-1', time=158) + AutoCommitTest(server_name='us.east-1', time=157) + AutoCommitTest(server_name='us.east-1', time=156) + self.assertFalse(fake_write_points.called) + AutoCommitTest(server_name='us.east-1', time=3443) + self.assertTrue(fake_write_points.called) + + def testSingleSeriesName(self): + """ + Tests JSON conversion when there is only one series name. + """ + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158) + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157) + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156) + expectation = [{'points': [[159, 'us.east-1'], + [158, 'us.east-1'], + [157, 'us.east-1'], + [156, 'us.east-1']], + 'name': 'events.stats.us.east-1', + 'columns': ['time', 'server_name']}] + + rcvd = TestSeriesHelper.MySeriesHelper._json_body_() + self.assertTrue(all([el in expectation for el in rcvd]) and + all([el in rcvd for el in expectation]), + 'Invalid JSON body of time series returned from ' + '_json_body_ for one series name: {}.'.format(rcvd)) + TestSeriesHelper.MySeriesHelper._reset_() + self.assertEqual( + TestSeriesHelper.MySeriesHelper._json_body_(), + [], + 'Resetting helper did not empty datapoints.') + + def testSeveralSeriesNames(self): + ''' + Tests JSON conversion when there is only one series name. + ''' + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) + TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158) + TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157) + TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156) + expectation = [{'points': [[157, 'lu.lux']], + 'name': 'events.stats.lu.lux', + 'columns': ['time', 'server_name']}, + {'points': [[156, 'uk.london']], + 'name': 'events.stats.uk.london', + 'columns': ['time', 'server_name']}, + {'points': [[158, 'fr.paris-10']], + 'name': 'events.stats.fr.paris-10', + 'columns': ['time', 'server_name']}, + {'points': [[159, 'us.east-1']], + 'name': 'events.stats.us.east-1', + 'columns': ['time', 'server_name']}] + + rcvd = TestSeriesHelper.MySeriesHelper._json_body_() + self.assertTrue(all([el in expectation for el in rcvd]) and + all([el in rcvd for el in expectation]), + 'Invalid JSON body of time series returned from ' + '_json_body_ for several series names: {}.' + .format(rcvd)) + TestSeriesHelper.MySeriesHelper._reset_() + self.assertEqual( + TestSeriesHelper.MySeriesHelper._json_body_(), + [], + 'Resetting helper did not empty datapoints.') + + def testInvalidHelpers(self): + ''' + Tests errors in invalid helpers. + ''' + class MissingMeta(SeriesHelper): + pass + + class MissingClient(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + autocommit = True + + class MissingSeriesName(SeriesHelper): + + class Meta: + fields = ['time', 'server_name'] + + class MissingFields(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + + for cls in [MissingMeta, MissingClient, MissingFields, + MissingSeriesName]: + self.assertRaises( + AttributeError, cls, **{'time': 159, + 'server_name': 'us.east-1'}) + + def testWarnBulkSizeZero(self): + """ + Tests warning for an invalid bulk size. + """ + class WarnBulkSizeZero(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 0 + autocommit = True + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + try: + WarnBulkSizeZero(time=159, server_name='us.east-1') + except ConnectionError: + # Server defined in the client is invalid, we're testing + # the warning only. + pass + self.assertEqual(len(w), 1, + '{} call should have generated one warning.' + .format(WarnBulkSizeZero)) + self.assertIn('forced to 1', str(w[-1].message), + 'Warning message did not contain "forced to 1".') + + def testWarnBulkSizeNoEffect(self): + """ + Tests warning for a set bulk size but autocommit False. + """ + class WarnBulkSizeNoEffect(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 5 + autocommit = False + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + WarnBulkSizeNoEffect(time=159, server_name='us.east-1') + self.assertEqual(len(w), 1, + '{} call should have generated one warning.' + .format(WarnBulkSizeNoEffect)) + self.assertIn('has no affect', str(w[-1].message), + 'Warning message did not contain "has not affect".') From f95bd8212ae4221162cc5452704e620cf57e45d5 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 19 Mar 2015 16:33:27 -0400 Subject: [PATCH 2/2] Run UDP tests on random ports --- tests/influxdb/client_test.py | 6 ++++-- tests/influxdb/influxdb08/client_test.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 7b73e63f..69858c42 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -8,6 +8,7 @@ import socket import unittest import requests_mock +import random from nose.tools import raises from mock import patch import warnings @@ -160,11 +161,12 @@ def test_write_points_batch(self): def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.bind(('0.0.0.0', 4444)) + port = random.randint(4000, 8000) + s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', - 'test', use_udp=True, udp_port=4444 + 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index f5631388..ca83004a 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -8,6 +8,7 @@ import socket import unittest import requests_mock +import random from nose.tools import raises from mock import patch import warnings @@ -176,11 +177,12 @@ def test_write_points_batch(self): def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.bind(('0.0.0.0', 4444)) + port = random.randint(4000, 8000) + s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', - 'test', use_udp=True, udp_port=4444 + 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy