+ >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\
+ ... localhost:8086/databasename', timeout=5, udp_port=159)
+ >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli))
+ http://localhost:8086 - True 159
+
+ :param dsn: data source name
+ :type dsn: string
+ :param **kwargs: additional parameters for InfluxDBClient.
+ :type **kwargs: dict
+ :note: parameters provided in **kwargs may override dsn parameters.
+ :note: when using "udp+influxdb" the specified port (if any) will be
+ used for the TCP connection; specify the udp port with the additional
+ udp_port parameter (cf. examples).
+ :raise ValueError: if the provided DSN has any unexpected value.
+
+ """
+ init_args = {}
+ conn_params = urlparse(dsn)
+ scheme_info = conn_params.scheme.split('+')
+
+ if len(scheme_info) == 1:
+ scheme = scheme_info[0]
+ modifier = None
+ else:
+ modifier, scheme = scheme_info
+
+ if scheme != 'influxdb':
+ raise ValueError('Unknown scheme "{0}".'.format(scheme))
+
+ if modifier:
+ if modifier == 'udp':
+ init_args['use_udp'] = True
+ elif modifier == 'https':
+ init_args['ssl'] = True
+ else:
+ raise ValueError('Unknown modifier "{0}".'.format(modifier))
+
+ if conn_params.hostname:
+ init_args['host'] = conn_params.hostname
+ if conn_params.port:
+ init_args['port'] = conn_params.port
+ if conn_params.username:
+ init_args['username'] = conn_params.username
+ if conn_params.password:
+ init_args['password'] = conn_params.password
+ if conn_params.path and len(conn_params.path) > 1:
+ init_args['database'] = conn_params.path[1:]
+
+ init_args.update(kwargs)
+
+ return InfluxDBClient(**init_args)
+
+ # Change member variables
+
+ def switch_database(self, database):
+ """Change client database.
+
+ :param database: the new database name to switch to
+ :type database: string
+ """
+ self._database = database
+
+ def switch_db(self, database):
+ """Change client database.
+
+ DEPRECATED.
+ """
+ warnings.warn(
+ "switch_db is deprecated, and will be removed "
+ "in future versions. Please use "
+ "``InfluxDBClient.switch_database(database)`` instead.",
+ FutureWarning)
+ return self.switch_database(database)
+
+ def switch_user(self, username, password):
+ """Change client username.
+
+ :param username: the new username to switch to
+ :type username: string
+ :param password: the new password to switch to
+ :type password: string
+ """
+ self._username = username
+ self._password = password
+
+ def request(self, url, method='GET', params=None, data=None,
+ expected_response_code=200):
+ """Make a http request to API."""
+ url = "{0}/{1}".format(self._baseurl, url)
+
+ if params is None:
+ params = {}
+
+ auth = {
+ 'u': self._username,
+ 'p': self._password
+ }
+
+ params.update(auth)
+
+ if data is not None and not isinstance(data, str):
+ data = json.dumps(data)
+
+ retry = True
+ _try = 0
+ # Try to send the request more than once by default (see #103)
+ while retry:
+ try:
+ response = session.request(
+ method=method,
+ url=url,
+ params=params,
+ data=data,
+ headers=self._headers,
+ verify=self._verify_ssl,
+ timeout=self._timeout
+ )
+ break
+ except (requests.exceptions.ConnectionError,
+ requests.exceptions.Timeout):
+ _try += 1
+ if self._retries != 0:
+ retry = _try < self._retries
+ else:
+ raise requests.exceptions.ConnectionError
+
+ if response.status_code == expected_response_code:
+ return response
+ else:
+ raise InfluxDBClientError(response.content, response.status_code)
+
+ def write(self, data):
+ """Provide as convenience for influxdb v0.9.0, this may change."""
+ self.request(
+ url="write",
+ method='POST',
+ params=None,
+ data=data,
+ expected_response_code=200
+ )
+ return True
+
+ # Writing Data
+ #
+ # Assuming you have a database named foo_production you can write data
+ # by doing a POST to /db/foo_production/series?u=some_user&p=some_password
+ # with a JSON body of points.
+
+ def write_points(self, data, time_precision='s', *args, **kwargs):
+ """Write to multiple time series names.
+
+ An example data blob is:
+
+ data = [
+ {
+ "points": [
+ [
+ 12
+ ]
+ ],
+ "name": "cpu_load_short",
+ "columns": [
+ "value"
+ ]
+ }
+ ]
+
+ :param data: A list of dicts in InfluxDB 0.8.x data format.
+ :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
+ or 'u'.
+ :param batch_size: [Optional] Value to write the points in batches
+ instead of all at one time. Useful for when doing data dumps from
+ one database to another or when doing a massive write operation
+ :type batch_size: int
+
+ """
+ def list_chunks(data_list, n):
+ """Yield successive n-sized chunks from l."""
+ for i in xrange(0, len(data_list), n):
+ yield data_list[i:i + n]
+
+ batch_size = kwargs.get('batch_size')
+ if batch_size and batch_size > 0:
+ for item in data:
+ name = item.get('name')
+ columns = item.get('columns')
+ point_list = item.get('points', [])
+
+ for batch in list_chunks(point_list, batch_size):
+ item = [{
+ "points": batch,
+ "name": name,
+ "columns": columns
+ }]
+ self._write_points(
+ data=item,
+ time_precision=time_precision)
+ return True
+
+ return self._write_points(data=data,
+ time_precision=time_precision)
+
+ def write_points_with_precision(self, data, time_precision='s'):
+ """Write to multiple time series names.
+
+ DEPRECATED.
+ """
+ warnings.warn(
+ "write_points_with_precision is deprecated, and will be removed "
+ "in future versions. Please use "
+ "``InfluxDBClient.write_points(time_precision='..')`` instead.",
+ FutureWarning)
+ return self._write_points(data=data, time_precision=time_precision)
+
+ def _write_points(self, data, time_precision):
+ if time_precision not in ['s', 'm', 'ms', 'u']:
+ raise Exception(
+ "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')")
+
+ if self._use_udp and time_precision != 's':
+ raise Exception(
+ "InfluxDB only supports seconds precision for udp writes"
+ )
+
+ url = "db/{0}/series".format(self._database)
+
+ params = {
+ 'time_precision': time_precision
+ }
+
+ if self._use_udp:
+ self.send_packet(data)
+ else:
+ self.request(
+ url=url,
+ method='POST',
+ params=params,
+ data=data,
+ expected_response_code=200
+ )
+
+ return True
+
+ # One Time Deletes
+
+ def delete_points(self, name):
+ """Delete an entire series."""
+ url = "db/{0}/series/{1}".format(self._database, name)
+
+ self.request(
+ url=url,
+ method='DELETE',
+ expected_response_code=204
+ )
+
+ return True
+
+ # Regularly Scheduled Deletes
+
+ def create_scheduled_delete(self, json_body):
+ """Create schedule delete from database.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ # get list of deletes
+ # curl http://localhost:8086/db/site_dev/scheduled_deletes
+ #
+ # remove a regularly scheduled delete
+ # curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id
+
+ def get_list_scheduled_delete(self):
+ """Get list of scheduled deletes.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ def remove_scheduled_delete(self, delete_id):
+ """Remove scheduled delete.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ def query(self, query, time_precision='s', chunked=False):
+ """Query data from the influxdb v0.8 database.
+
+ :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
+ or 'u'.
+ :param chunked: [Optional, default=False] True if the data shall be
+ retrieved in chunks, False otherwise.
+ """
+ return self._query(query, time_precision=time_precision,
+ chunked=chunked)
+
+ # Querying Data
+ #
+ # GET db/:name/series. It takes five parameters
+ def _query(self, query, time_precision='s', chunked=False):
+ if time_precision not in ['s', 'm', 'ms', 'u']:
+ raise Exception(
+ "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')")
+
+ if chunked is True:
+ chunked_param = 'true'
+ else:
+ chunked_param = 'false'
+
+ # Build the URL of the series to query
+ url = "db/{0}/series".format(self._database)
+
+ params = {
+ 'q': query,
+ 'time_precision': time_precision,
+ 'chunked': chunked_param
+ }
+
+ response = self.request(
+ url=url,
+ method='GET',
+ params=params,
+ expected_response_code=200
+ )
+
+ if chunked:
+ try:
+ decoded = chunked_json.loads(response.content.decode())
+ except UnicodeDecodeError:
+ decoded = chunked_json.loads(response.content.decode('utf-8'))
+
+ return list(decoded)
+
+ return response.json()
+
+ # Creating and Dropping Databases
+ #
+ # ### create a database
+ # curl -X POST http://localhost:8086/db -d '{"name": "site_development"}'
+ #
+ # ### drop a database
+ # curl -X DELETE http://localhost:8086/db/site_development
+
+ def create_database(self, database):
+ """Create a database on the InfluxDB server.
+
+ :param database: the name of the database to create
+ :type database: string
+ :rtype: boolean
+ """
+ url = "db"
+
+ data = {'name': database}
+
+ self.request(
+ url=url,
+ method='POST',
+ data=data,
+ expected_response_code=201
+ )
+
+ return True
+
+ def delete_database(self, database):
+ """Drop a database on the InfluxDB server.
+
+ :param database: the name of the database to delete
+ :type database: string
+ :rtype: boolean
+ """
+ url = "db/{0}".format(database)
+
+ self.request(
+ url=url,
+ method='DELETE',
+ expected_response_code=204
+ )
+
+ return True
+
+ # ### get list of databases
+ # curl -X GET http://localhost:8086/db
+
+ def get_list_database(self):
+ """Get the list of databases."""
+ url = "db"
+
+ response = self.request(
+ url=url,
+ method='GET',
+ expected_response_code=200
+ )
+
+ return response.json()
+
+ def get_database_list(self):
+ """Get the list of databases.
+
+ DEPRECATED.
+ """
+ warnings.warn(
+ "get_database_list is deprecated, and will be removed "
+ "in future versions. Please use "
+ "``InfluxDBClient.get_list_database`` instead.",
+ FutureWarning)
+ return self.get_list_database()
+
+ def delete_series(self, series):
+ """Drop a series on the InfluxDB server.
+
+ :param series: the name of the series to delete
+ :type series: string
+ :rtype: boolean
+ """
+ url = "db/{0}/series/{1}".format(
+ self._database,
+ series
+ )
+
+ self.request(
+ url=url,
+ method='DELETE',
+ expected_response_code=204
+ )
+
+ return True
+
+ def get_list_series(self):
+ """Get a list of all time series in a database."""
+ response = self._query('list series')
+ return [series[1] for series in response[0]['points']]
+
+ def get_list_continuous_queries(self):
+ """Get a list of continuous queries."""
+ response = self._query('list continuous queries')
+ return [query[2] for query in response[0]['points']]
+
+ # Security
+ # get list of cluster admins
+ # curl http://localhost:8086/cluster_admins?u=root&p=root
+
+ # add cluster admin
+ # curl -X POST http://localhost:8086/cluster_admins?u=root&p=root \
+ # -d '{"name": "paul", "password": "i write teh docz"}'
+
+ # update cluster admin password
+ # curl -X POST http://localhost:8086/cluster_admins/paul?u=root&p=root \
+ # -d '{"password": "new pass"}'
+
+ # delete cluster admin
+ # curl -X DELETE http://localhost:8086/cluster_admins/paul?u=root&p=root
+
+ # Database admins, with a database name of site_dev
+ # get list of database admins
+ # curl http://localhost:8086/db/site_dev/admins?u=root&p=root
+
+ # add database admin
+ # curl -X POST http://localhost:8086/db/site_dev/admins?u=root&p=root \
+ # -d '{"name": "paul", "password": "i write teh docz"}'
+
+ # update database admin password
+ # curl -X POST http://localhost:8086/db/site_dev/admins/paul?u=root&p=root\
+ # -d '{"password": "new pass"}'
+
+ # delete database admin
+ # curl -X DELETE \
+ # http://localhost:8086/db/site_dev/admins/paul?u=root&p=root
+
+ def get_list_cluster_admins(self):
+ """Get list of cluster admins."""
+ response = self.request(
+ url="cluster_admins",
+ method='GET',
+ expected_response_code=200
+ )
+
+ return response.json()
+
+ def add_cluster_admin(self, new_username, new_password):
+ """Add cluster admin."""
+ data = {
+ 'name': new_username,
+ 'password': new_password
+ }
+
+ self.request(
+ url="cluster_admins",
+ method='POST',
+ data=data,
+ expected_response_code=200
+ )
+
+ return True
+
+ def update_cluster_admin_password(self, username, new_password):
+ """Update cluster admin password."""
+ url = "cluster_admins/{0}".format(username)
+
+ data = {
+ 'password': new_password
+ }
+
+ self.request(
+ url=url,
+ method='POST',
+ data=data,
+ expected_response_code=200
+ )
+
+ return True
+
+ def delete_cluster_admin(self, username):
+ """Delete cluster admin."""
+ url = "cluster_admins/{0}".format(username)
+
+ self.request(
+ url=url,
+ method='DELETE',
+ expected_response_code=200
+ )
+
+ return True
+
+ def set_database_admin(self, username):
+ """Set user as database admin."""
+ return self.alter_database_admin(username, True)
+
+ def unset_database_admin(self, username):
+ """Unset user as database admin."""
+ return self.alter_database_admin(username, False)
+
+ def alter_database_admin(self, username, is_admin):
+ """Alter the database admin."""
+ url = "db/{0}/users/{1}".format(self._database, username)
+
+ data = {'admin': is_admin}
+
+ self.request(
+ url=url,
+ method='POST',
+ data=data,
+ expected_response_code=200
+ )
+
+ return True
+
+ def get_list_database_admins(self):
+ """Get list of database admins.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ def add_database_admin(self, new_username, new_password):
+ """Add cluster admin.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ def update_database_admin_password(self, username, new_password):
+ """Update database admin password.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ def delete_database_admin(self, username):
+ """Delete database admin.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ ###
+ # Limiting User Access
+
+ # Database users
+ # get list of database users
+ # curl http://localhost:8086/db/site_dev/users?u=root&p=root
+
+ # add database user
+ # curl -X POST http://localhost:8086/db/site_dev/users?u=root&p=root \
+ # -d '{"name": "paul", "password": "i write teh docz"}'
+
+ # update database user password
+ # curl -X POST http://localhost:8086/db/site_dev/users/paul?u=root&p=root \
+ # -d '{"password": "new pass"}'
+
+ # delete database user
+ # curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root
+
+ def get_database_users(self):
+ """Get list of database users."""
+ url = "db/{0}/users".format(self._database)
+
+ response = self.request(
+ url=url,
+ method='GET',
+ expected_response_code=200
+ )
+
+ return response.json()
+
+ def add_database_user(self, new_username, new_password, permissions=None):
+ """Add database user.
+
+ :param permissions: A ``(readFrom, writeTo)`` tuple
+ """
+ url = "db/{0}/users".format(self._database)
+
+ data = {
+ 'name': new_username,
+ 'password': new_password
+ }
+
+ if permissions:
+ try:
+ data['readFrom'], data['writeTo'] = permissions
+ except (ValueError, TypeError):
+ raise TypeError(
+ "'permissions' must be (readFrom, writeTo) tuple"
+ )
+
+ self.request(
+ url=url,
+ method='POST',
+ data=data,
+ expected_response_code=200
+ )
+
+ return True
+
+ def update_database_user_password(self, username, new_password):
+ """Update password."""
+ return self.alter_database_user(username, new_password)
+
+ def alter_database_user(self, username, password=None, permissions=None):
+ """Alter a database user and/or their permissions.
+
+ :param permissions: A ``(readFrom, writeTo)`` tuple
+ :raise TypeError: if permissions cannot be read.
+ :raise ValueError: if neither password nor permissions provided.
+ """
+ url = "db/{0}/users/{1}".format(self._database, username)
+
+ if not password and not permissions:
+ raise ValueError("Nothing to alter for user {0}.".format(username))
+
+ data = {}
+
+ if password:
+ data['password'] = password
+
+ if permissions:
+ try:
+ data['readFrom'], data['writeTo'] = permissions
+ except (ValueError, TypeError):
+ raise TypeError(
+ "'permissions' must be (readFrom, writeTo) tuple"
+ )
+
+ self.request(
+ url=url,
+ method='POST',
+ data=data,
+ expected_response_code=200
+ )
+
+ if username == self._username:
+ self._password = password
+
+ return True
+
+ def delete_database_user(self, username):
+ """Delete database user."""
+ url = "db/{0}/users/{1}".format(self._database, username)
+
+ self.request(
+ url=url,
+ method='DELETE',
+ expected_response_code=200
+ )
+
+ return True
+
+ # update the user by POSTing to db/site_dev/users/paul
+
+ def update_permission(self, username, json_body):
+ """Update read/write permission.
+
+ 2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
+ but it is documented in http://influxdb.org/docs/api/http.html.
+ See also: src/api/http/api.go:l57
+
+ """
+ raise NotImplementedError()
+
+ def send_packet(self, packet):
+ """Send a UDP packet along the wire."""
+ data = json.dumps(packet)
+ byte = data.encode('utf-8')
+ self.udp_socket.sendto(byte, (self._host, self._udp_port))
diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py
new file mode 100644
index 00000000..2867125d
--- /dev/null
+++ b/influxdb/influxdb08/dataframe_client.py
@@ -0,0 +1,177 @@
+# -*- coding: utf-8 -*-
+"""DataFrame client for InfluxDB v0.8."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import math
+import warnings
+
+from .client import InfluxDBClient
+
+
+class DataFrameClient(InfluxDBClient):
+ """Primary defintion of the DataFrameClient for v0.8.
+
+ The ``DataFrameClient`` object holds information necessary to connect
+ to InfluxDB. Requests can be made to InfluxDB directly through the client.
+ The client reads and writes from pandas DataFrames.
+ """
+
+ def __init__(self, ignore_nan=True, *args, **kwargs):
+ """Initialize an instance of the DataFrameClient."""
+ super(DataFrameClient, self).__init__(*args, **kwargs)
+
+ try:
+ global pd
+ import pandas as pd
+ except ImportError as ex:
+ raise ImportError('DataFrameClient requires Pandas, '
+ '"{ex}" problem importing'.format(ex=str(ex)))
+
+ self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00')
+ self.ignore_nan = ignore_nan
+
+ def write_points(self, data, *args, **kwargs):
+ """Write to multiple time series names.
+
+ :param data: A dictionary mapping series names to pandas DataFrames
+ :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
+ or 'u'.
+ :param batch_size: [Optional] Value to write the points in batches
+ instead of all at one time. Useful for when doing data dumps from
+ one database to another or when doing a massive write operation
+ :type batch_size: int
+ """
+ batch_size = kwargs.get('batch_size')
+ time_precision = kwargs.get('time_precision', 's')
+ if batch_size:
+ kwargs.pop('batch_size') # don't hand over to InfluxDBClient
+ for key, data_frame in data.items():
+ number_batches = int(math.ceil(
+ len(data_frame) / float(batch_size)))
+ for batch in range(number_batches):
+ start_index = batch * batch_size
+ end_index = (batch + 1) * batch_size
+ outdata = [
+ self._convert_dataframe_to_json(
+ name=key,
+ dataframe=data_frame
+ .iloc[start_index:end_index].copy(),
+ time_precision=time_precision)]
+ InfluxDBClient.write_points(self, outdata, *args, **kwargs)
+ return True
+
+ outdata = [
+ self._convert_dataframe_to_json(name=key, dataframe=dataframe,
+ time_precision=time_precision)
+ for key, dataframe in data.items()]
+ return InfluxDBClient.write_points(self, outdata, *args, **kwargs)
+
+ def write_points_with_precision(self, data, time_precision='s'):
+ """Write to multiple time series names.
+
+ DEPRECATED
+ """
+ warnings.warn(
+ "write_points_with_precision is deprecated, and will be removed "
+ "in future versions. Please use "
+ "``DataFrameClient.write_points(time_precision='..')`` instead.",
+ FutureWarning)
+ return self.write_points(data, time_precision='s')
+
+ def query(self, query, time_precision='s', chunked=False):
+ """Query data into DataFrames.
+
+ Returns a DataFrame for a single time series and a map for multiple
+ time series with the time series as value and its name as key.
+
+ :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
+ or 'u'.
+ :param chunked: [Optional, default=False] True if the data shall be
+ retrieved in chunks, False otherwise.
+ """
+ result = InfluxDBClient.query(self, query=query,
+ time_precision=time_precision,
+ chunked=chunked)
+ if len(result) == 0:
+ return result
+ elif len(result) == 1:
+ return self._to_dataframe(result[0], time_precision)
+ else:
+ ret = {}
+ for time_series in result:
+ ret[time_series['name']] = self._to_dataframe(time_series,
+ time_precision)
+ return ret
+
+ @staticmethod
+ def _to_dataframe(json_result, time_precision):
+ dataframe = pd.DataFrame(data=json_result['points'],
+ columns=json_result['columns'])
+ if 'sequence_number' in dataframe.keys():
+ dataframe.sort_values(['time', 'sequence_number'], inplace=True)
+ else:
+ dataframe.sort_values(['time'], inplace=True)
+
+ pandas_time_unit = time_precision
+ if time_precision == 'm':
+ pandas_time_unit = 'ms'
+ elif time_precision == 'u':
+ pandas_time_unit = 'us'
+
+ dataframe.index = pd.to_datetime(list(dataframe['time']),
+ unit=pandas_time_unit,
+ utc=True)
+ del dataframe['time']
+ return dataframe
+
+ def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'):
+ if not isinstance(dataframe, pd.DataFrame):
+ raise TypeError('Must be DataFrame, but type was: {0}.'
+ .format(type(dataframe)))
+ if not (isinstance(dataframe.index, pd.PeriodIndex) or
+ isinstance(dataframe.index, pd.DatetimeIndex)):
+ raise TypeError('Must be DataFrame with DatetimeIndex or \
+ PeriodIndex.')
+
+ if isinstance(dataframe.index, pd.PeriodIndex):
+ dataframe.index = dataframe.index.to_timestamp()
+ else:
+ dataframe.index = pd.to_datetime(dataframe.index)
+
+ if dataframe.index.tzinfo is None:
+ dataframe.index = dataframe.index.tz_localize('UTC')
+ dataframe['time'] = [self._datetime_to_epoch(dt, time_precision)
+ for dt in dataframe.index]
+ data = {'name': name,
+ 'columns': [str(column) for column in dataframe.columns],
+ 'points': [self._convert_array(x) for x in dataframe.values]}
+ return data
+
+ def _convert_array(self, array):
+ try:
+ global np
+ import numpy as np
+ except ImportError as ex:
+ raise ImportError('DataFrameClient requires Numpy, '
+ '"{ex}" problem importing'.format(ex=str(ex)))
+
+ if self.ignore_nan:
+ number_types = (int, float, np.number)
+ condition = (all(isinstance(el, number_types) for el in array) and
+ np.isnan(array))
+ return list(np.where(condition, None, array))
+
+ return list(array)
+
+ def _datetime_to_epoch(self, datetime, time_precision='s'):
+ seconds = (datetime - self.EPOCH).total_seconds()
+ if time_precision == 's':
+ return seconds
+ elif time_precision == 'm' or time_precision == 'ms':
+ return seconds * 1000
+ elif time_precision == 'u':
+ return seconds * 1000000
diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py
new file mode 100644
index 00000000..5f2d4614
--- /dev/null
+++ b/influxdb/influxdb08/helper.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+"""Helper class for InfluxDB for v0.8."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from collections import namedtuple, defaultdict
+from warnings import warn
+
+import six
+
+
+class SeriesHelper(object):
+ """Define the SeriesHelper object for InfluxDB v0.8.
+
+ Subclassing this helper eases writing data points in bulk.
+ All data points are immutable, ensuring they do not get overwritten.
+ Each subclass can write to its own database.
+ The time series names can also be based on one or more defined fields.
+
+ Annotated example::
+
+ class MySeriesHelper(SeriesHelper):
+ class Meta:
+ # Meta class stores time series helper configuration.
+ series_name = 'events.stats.{server_name}'
+ # Series name must be a string, curly brackets for dynamic use.
+ fields = ['time', 'server_name']
+ # Defines all the fields in this time series.
+ ### Following attributes are optional. ###
+ client = TestSeriesHelper.client
+ # Client should be an instance of InfluxDBClient.
+ :warning: Only used if autocommit is True.
+ bulk_size = 5
+ # Defines the number of data points to write simultaneously.
+ # Only applicable if autocommit is True.
+ autocommit = True
+ # If True and no bulk_size, then will set bulk_size to 1.
+
+ """
+
+ __initialized__ = False
+
+ def __new__(cls, *args, **kwargs):
+ """Initialize class attributes for subsequent constructor calls.
+
+ :note: *args and **kwargs are not explicitly used in this function,
+ but needed for Python 2 compatibility.
+ """
+ if not cls.__initialized__:
+ cls.__initialized__ = True
+ try:
+ _meta = getattr(cls, 'Meta')
+ except AttributeError:
+ raise AttributeError(
+ 'Missing Meta class in {0}.'.format(
+ cls.__name__))
+
+ for attr in ['series_name', 'fields']:
+ try:
+ setattr(cls, '_' + attr, getattr(_meta, attr))
+ except AttributeError:
+ raise AttributeError(
+ 'Missing {0} in {1} Meta class.'.format(
+ attr,
+ cls.__name__))
+
+ cls._autocommit = getattr(_meta, 'autocommit', False)
+
+ cls._client = getattr(_meta, 'client', None)
+ if cls._autocommit and not cls._client:
+ raise AttributeError(
+ 'In {0}, autocommit is set to True, but no client is set.'
+ .format(cls.__name__))
+
+ try:
+ cls._bulk_size = getattr(_meta, 'bulk_size')
+ if cls._bulk_size < 1 and cls._autocommit:
+ warn(
+ 'Definition of bulk_size in {0} forced to 1, '
+ 'was less than 1.'.format(cls.__name__))
+ cls._bulk_size = 1
+ except AttributeError:
+ cls._bulk_size = -1
+ else:
+ if not cls._autocommit:
+ warn(
+ 'Definition of bulk_size in {0} has no affect because'
+ ' autocommit is false.'.format(cls.__name__))
+
+ cls._datapoints = defaultdict(list)
+ cls._type = namedtuple(cls.__name__, cls._fields)
+
+ return super(SeriesHelper, cls).__new__(cls)
+
+ def __init__(self, **kw):
+ """Create a new data point.
+
+ All fields must be present.
+
+ :note: Data points written when `bulk_size` is reached per Helper.
+ :warning: Data points are *immutable* (`namedtuples`).
+ """
+ cls = self.__class__
+
+ if sorted(cls._fields) != sorted(kw.keys()):
+ raise NameError(
+ 'Expected {0}, got {1}.'.format(
+ cls._fields,
+ kw.keys()))
+
+ cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw))
+
+ if cls._autocommit and \
+ sum(len(series) for series in cls._datapoints.values()) \
+ >= cls._bulk_size:
+ cls.commit()
+
+ @classmethod
+ def commit(cls, client=None):
+ """Commit everything from datapoints via the client.
+
+ :param client: InfluxDBClient instance for writing points to InfluxDB.
+ :attention: any provided client will supersede the class client.
+ :return: result of client.write_points.
+ """
+ if not client:
+ client = cls._client
+ rtn = client.write_points(cls._json_body_())
+ cls._reset_()
+ return rtn
+
+ @classmethod
+ def _json_body_(cls):
+ """Return JSON body of the datapoints.
+
+ :return: JSON body of the datapoints.
+ """
+ json = []
+ if not cls.__initialized__:
+ cls._reset_()
+ for series_name, data in six.iteritems(cls._datapoints):
+ json.append({'name': series_name,
+ 'columns': cls._fields,
+ 'points': [[getattr(point, k) for k in cls._fields]
+ for point in data]
+ })
+ return json
+
+ @classmethod
+ def _reset_(cls):
+ """Reset data storage."""
+ cls._datapoints = defaultdict(list)
diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py
new file mode 100644
index 00000000..25dd2ad7
--- /dev/null
+++ b/influxdb/line_protocol.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+"""Define the line_protocol handler."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from datetime import datetime
+from numbers import Integral
+
+from pytz import UTC
+from dateutil.parser import parse
+from six import binary_type, text_type, integer_types, PY2
+
+EPOCH = UTC.localize(datetime.utcfromtimestamp(0))
+
+
+def _to_nanos(timestamp):
+ delta = timestamp - EPOCH
+ nanos_in_days = delta.days * 86400 * 10 ** 9
+ nanos_in_seconds = delta.seconds * 10 ** 9
+ nanos_in_micros = delta.microseconds * 10 ** 3
+ return nanos_in_days + nanos_in_seconds + nanos_in_micros
+
+
+def _convert_timestamp(timestamp, precision=None):
+ if isinstance(timestamp, Integral):
+ return timestamp # assume precision is correct if timestamp is int
+
+ if isinstance(_get_unicode(timestamp), text_type):
+ timestamp = parse(timestamp)
+
+ if isinstance(timestamp, datetime):
+ if not timestamp.tzinfo:
+ timestamp = UTC.localize(timestamp)
+
+ ns = _to_nanos(timestamp)
+ if precision is None or precision == 'n':
+ return ns
+
+ if precision == 'u':
+ return ns / 10**3
+
+ if precision == 'ms':
+ return ns / 10**6
+
+ if precision == 's':
+ return ns / 10**9
+
+ if precision == 'm':
+ return ns / 10**9 / 60
+
+ if precision == 'h':
+ return ns / 10**9 / 3600
+
+ raise ValueError(timestamp)
+
+
+def _escape_tag(tag):
+ tag = _get_unicode(tag, force=True)
+ return tag.replace(
+ "\\", "\\\\"
+ ).replace(
+ " ", "\\ "
+ ).replace(
+ ",", "\\,"
+ ).replace(
+ "=", "\\="
+ ).replace(
+ "\n", "\\n"
+ )
+
+
+def _escape_tag_value(value):
+ ret = _escape_tag(value)
+ if ret.endswith('\\'):
+ ret += ' '
+ return ret
+
+
+def quote_ident(value):
+ """Indent the quotes."""
+ return "\"{}\"".format(value
+ .replace("\\", "\\\\")
+ .replace("\"", "\\\"")
+ .replace("\n", "\\n"))
+
+
+def quote_literal(value):
+ """Quote provided literal."""
+ return "'{}'".format(value
+ .replace("\\", "\\\\")
+ .replace("'", "\\'"))
+
+
+def _is_float(value):
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ return False
+
+ return True
+
+
+def _escape_value(value):
+ if value is None:
+ return ''
+
+ value = _get_unicode(value)
+ if isinstance(value, text_type):
+ return quote_ident(value)
+
+ if isinstance(value, integer_types) and not isinstance(value, bool):
+ return str(value) + 'i'
+
+ if isinstance(value, bool):
+ return str(value)
+
+ if _is_float(value):
+ return repr(float(value))
+
+ return str(value)
+
+
+def _get_unicode(data, force=False):
+ """Try to return a text aka unicode object from the given data."""
+ if isinstance(data, binary_type):
+ return data.decode('utf-8')
+
+ if data is None:
+ return ''
+
+ if force:
+ if PY2:
+ return unicode(data)
+ return str(data)
+
+ return data
+
+
+def make_line(measurement, tags=None, fields=None, time=None, precision=None):
+ """Extract the actual point from a given measurement line."""
+ tags = tags or {}
+ fields = fields or {}
+
+ line = _escape_tag(_get_unicode(measurement))
+
+ # tags should be sorted client-side to take load off server
+ tag_list = []
+ for tag_key in sorted(tags.keys()):
+ key = _escape_tag(tag_key)
+ value = _escape_tag(tags[tag_key])
+
+ if key != '' and value != '':
+ tag_list.append(
+ "{key}={value}".format(key=key, value=value)
+ )
+
+ if tag_list:
+ line += ',' + ','.join(tag_list)
+
+ field_list = []
+ for field_key in sorted(fields.keys()):
+ key = _escape_tag(field_key)
+ value = _escape_value(fields[field_key])
+
+ if key != '' and value != '':
+ field_list.append("{key}={value}".format(
+ key=key,
+ value=value
+ ))
+
+ if field_list:
+ line += ' ' + ','.join(field_list)
+
+ if time is not None:
+ timestamp = _get_unicode(str(int(
+ _convert_timestamp(time, precision)
+ )))
+ line += ' ' + timestamp
+
+ return line
+
+
+def make_lines(data, precision=None):
+ """Extract points from given dict.
+
+ Extracts the points from the given dict and returns a Unicode string
+ matching the line protocol introduced in InfluxDB 0.9.0.
+ """
+ lines = []
+ static_tags = data.get('tags')
+ for point in data['points']:
+ if static_tags:
+ tags = dict(static_tags) # make a copy, since we'll modify
+ tags.update(point.get('tags') or {})
+ else:
+ tags = point.get('tags') or {}
+
+ line = make_line(
+ point.get('measurement', data.get('measurement')),
+ tags=tags,
+ fields=point.get('fields'),
+ precision=precision,
+ time=point.get('time')
+ )
+ lines.append(line)
+
+ return '\n'.join(lines) + '\n'
diff --git a/influxdb/resultset.py b/influxdb/resultset.py
new file mode 100644
index 00000000..ba4f3c13
--- /dev/null
+++ b/influxdb/resultset.py
@@ -0,0 +1,206 @@
+# -*- coding: utf-8 -*-
+"""Module to prepare the resultset."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import warnings
+
+from influxdb.exceptions import InfluxDBClientError
+
+_sentinel = object()
+
+
+class ResultSet(object):
+ """A wrapper around a single InfluxDB query result."""
+
+ def __init__(self, series, raise_errors=True):
+ """Initialize the ResultSet."""
+ self._raw = series
+ self._error = self._raw.get('error', None)
+
+ if self.error is not None and raise_errors is True:
+ raise InfluxDBClientError(self.error)
+
+ @property
+ def raw(self):
+ """Raw JSON from InfluxDB."""
+ return self._raw
+
+ @raw.setter
+ def raw(self, value):
+ self._raw = value
+
+ @property
+ def error(self):
+ """Error returned by InfluxDB."""
+ return self._error
+
+ def __getitem__(self, key):
+ """Retrieve the series name or specific set based on key.
+
+ :param key: Either a series name, or a tags_dict, or
+ a 2-tuple(series_name, tags_dict).
+ If the series name is None (or not given) then any serie
+ matching the eventual given tags will be given its points
+ one after the other.
+ To get the points of every series in this resultset then
+ you have to provide None as key.
+ :return: A generator yielding `Point`s matching the given key.
+ NB:
+ The order in which the points are yielded is actually undefined but
+ it might change..
+ """
+ warnings.warn(
+ ("ResultSet's ``__getitem__`` method will be deprecated. Use"
+ "``get_points`` instead."),
+ DeprecationWarning
+ )
+
+ if isinstance(key, tuple):
+ if len(key) != 2:
+ raise TypeError('only 2-tuples allowed')
+
+ name = key[0]
+ tags = key[1]
+
+ if not isinstance(tags, dict) and tags is not None:
+ raise TypeError('tags should be a dict')
+ elif isinstance(key, dict):
+ name = None
+ tags = key
+ else:
+ name = key
+ tags = None
+
+ return self.get_points(name, tags)
+
+ def get_points(self, measurement=None, tags=None):
+ """Return a generator for all the points that match the given filters.
+
+ :param measurement: The measurement name
+ :type measurement: str
+
+ :param tags: Tags to look for
+ :type tags: dict
+
+ :return: Points generator
+ """
+ # Raise error if measurement is not str or bytes
+ if not isinstance(measurement,
+ (bytes, type(b''.decode()), type(None))):
+ raise TypeError('measurement must be an str or None')
+
+ for series in self._get_series():
+ series_name = series.get('measurement',
+ series.get('name', 'results'))
+ if series_name is None:
+ # this is a "system" query or a query which
+ # doesn't return a name attribute.
+ # like 'show retention policies' ..
+ if tags is None:
+ for item in self._get_points_for_series(series):
+ yield item
+
+ elif measurement in (None, series_name):
+ # by default if no tags was provided then
+ # we will matches every returned series
+ series_tags = series.get('tags', {})
+ for item in self._get_points_for_series(series):
+ if tags is None or \
+ self._tag_matches(item, tags) or \
+ self._tag_matches(series_tags, tags):
+ yield item
+
+ def __repr__(self):
+ """Representation of ResultSet object."""
+ items = []
+
+ for item in self.items():
+ items.append("'%s': %s" % (item[0], list(item[1])))
+
+ return "ResultSet({%s})" % ", ".join(items)
+
+ def __iter__(self):
+ """Yield one dict instance per series result."""
+ for key in self.keys():
+ yield list(self.__getitem__(key))
+
+ @staticmethod
+ def _tag_matches(tags, filter):
+ """Check if all key/values in filter match in tags."""
+ for tag_name, tag_value in filter.items():
+ # using _sentinel as I'm not sure that "None"
+ # could be used, because it could be a valid
+ # series_tags value : when a series has no such tag
+ # then I think it's set to /null/None/.. TBC..
+ series_tag_value = tags.get(tag_name, _sentinel)
+ if series_tag_value != tag_value:
+ return False
+
+ return True
+
+ def _get_series(self):
+ """Return all series."""
+ return self.raw.get('series', [])
+
+ def __len__(self):
+ """Return the len of the keys in the ResultSet."""
+ return len(self.keys())
+
+ def keys(self):
+ """Return the list of keys in the ResultSet.
+
+ :return: List of keys. Keys are tuples (series_name, tags)
+ """
+ keys = []
+ for series in self._get_series():
+ keys.append(
+ (series.get('measurement',
+ series.get('name', 'results')),
+ series.get('tags', None))
+ )
+ return keys
+
+ def items(self):
+ """Return the set of items from the ResultSet.
+
+ :return: List of tuples, (key, generator)
+ """
+ items = []
+ for series in self._get_series():
+ series_key = (series.get('measurement',
+ series.get('name', 'results')),
+ series.get('tags', None))
+ items.append(
+ (series_key, self._get_points_for_series(series))
+ )
+ return items
+
+ def _get_points_for_series(self, series):
+ """Return generator of dict from columns and values of a series.
+
+ :param series: One series
+ :return: Generator of dicts
+ """
+ for point in series.get('values', []):
+ yield self.point_from_cols_vals(
+ series['columns'],
+ point
+ )
+
+ @staticmethod
+ def point_from_cols_vals(cols, vals):
+ """Create a dict from columns and values lists.
+
+ :param cols: List of columns
+ :param vals: List of values
+ :return: Dict where keys are columns.
+ """
+ point = {}
+ for col_index, col_name in enumerate(cols):
+ point[col_name] = vals[col_index]
+
+ return point
diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py
new file mode 100644
index 00000000..f7c5dfb9
--- /dev/null
+++ b/influxdb/tests/__init__.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+"""Configure the tests package for InfluxDBClient."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import sys
+import os
+
+import unittest
+
+using_pypy = hasattr(sys, "pypy_version_info")
+skip_if_pypy = unittest.skipIf(using_pypy, "Skipping this test on pypy.")
+
+_skip_server_tests = os.environ.get(
+ 'INFLUXDB_PYTHON_SKIP_SERVER_TESTS',
+ None) == 'True'
+skip_server_tests = unittest.skipIf(_skip_server_tests,
+ "Skipping server tests...")
diff --git a/influxdb/tests/chunked_json_test.py b/influxdb/tests/chunked_json_test.py
new file mode 100644
index 00000000..f633bcb1
--- /dev/null
+++ b/influxdb/tests/chunked_json_test.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+"""Chunked JSON test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import unittest
+
+from influxdb import chunked_json
+
+
+class TestChunkJson(unittest.TestCase):
+ """Set up the TestChunkJson object."""
+
+ @classmethod
+ def setUpClass(cls):
+ """Initialize the TestChunkJson object."""
+ super(TestChunkJson, cls).setUpClass()
+
+ def test_load(self):
+ """Test reading a sequence of JSON values from a string."""
+ example_response = \
+ '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
+ '"columns": ["time", "value"], "values": ' \
+ '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
+ '[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\
+ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
+
+ res = list(chunked_json.loads(example_response))
+ # import ipdb; ipdb.set_trace()
+
+ self.assertListEqual(
+ [
+ {
+ 'results': [
+ {'series': [{
+ 'values': [['2009-11-10T23:00:00Z', 0.64]],
+ 'measurement': 'sdfsdfsdf',
+ 'columns':
+ ['time', 'value']}]},
+ {'series': [{
+ 'values': [['2009-11-10T23:00:00Z', 0.64]],
+ 'measurement': 'cpu_load_short',
+ 'columns': ['time', 'value']}]}
+ ]
+ }
+ ],
+ res
+ )
diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py
new file mode 100644
index 00000000..115fbc48
--- /dev/null
+++ b/influxdb/tests/client_test.py
@@ -0,0 +1,1557 @@
+# -*- coding: utf-8 -*-
+"""Unit tests for the InfluxDBClient.
+
+NB/WARNING:
+This module implements tests for the InfluxDBClient class
+but does so
+ + without any server instance running
+ + by mocking all the expected responses.
+
+So any change of (response format from) the server will **NOT** be
+detected by this module.
+
+See client_test_with_server.py for tests against a running server instance.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import random
+import socket
+import unittest
+import warnings
+
+import io
+import gzip
+import json
+import mock
+import requests
+import requests.exceptions
+import requests_mock
+
+from nose.tools import raises
+from urllib3.connection import HTTPConnection
+
+from influxdb import InfluxDBClient
+from influxdb.resultset import ResultSet
+
+
+def _build_response_object(status_code=200, content=""):
+ resp = requests.Response()
+ resp.status_code = status_code
+ resp._content = content.encode("utf8")
+ return resp
+
+
+def _mocked_session(cli, method="GET", status_code=200, content=""):
+ method = method.upper()
+
+ def request(*args, **kwargs):
+ """Request content from the mocked session."""
+ c = content
+
+ # Check method
+ assert method == kwargs.get('method', 'GET')
+
+ if method == 'POST':
+ data = kwargs.get('data', None)
+
+ if data is not None:
+ # Data must be a string
+ assert isinstance(data, str)
+
+ # Data must be a JSON string
+ assert c == json.loads(data, strict=True)
+
+ c = data
+
+ # Anyway, Content must be a JSON string (or empty string)
+ if not isinstance(c, str):
+ c = json.dumps(c)
+
+ return _build_response_object(status_code=status_code, content=c)
+
+ return mock.patch.object(cli._session, 'request', side_effect=request)
+
+
+class TestInfluxDBClient(unittest.TestCase):
+ """Set up the TestInfluxDBClient object."""
+
+ def setUp(self):
+ """Initialize an instance of TestInfluxDBClient object."""
+ # By default, raise exceptions on warnings
+ warnings.simplefilter('error', FutureWarning)
+
+ self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
+ self.dummy_points = [
+ {
+ "measurement": "cpu_load_short",
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 0.64
+ }
+ }
+ ]
+
+ self.dsn_string = 'influxdb://uSr:pWd@my.host.fr:1886/db'
+
+ def test_scheme(self):
+ """Set up the test schema for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ self.assertEqual('http://host:8086', cli._baseurl)
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True
+ )
+ self.assertEqual('https://host:8086', cli._baseurl)
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True,
+ path="somepath"
+ )
+ self.assertEqual('https://host:8086/somepath', cli._baseurl)
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True,
+ path=None
+ )
+ self.assertEqual('https://host:8086', cli._baseurl)
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True,
+ path="/somepath"
+ )
+ self.assertEqual('https://host:8086/somepath', cli._baseurl)
+
+ def test_dsn(self):
+ """Set up the test datasource name for TestInfluxDBClient object."""
+ cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886')
+ self.assertEqual('http://192.168.0.1:1886', cli._baseurl)
+
+ cli = InfluxDBClient.from_dsn(self.dsn_string)
+ self.assertEqual('http://my.host.fr:1886', cli._baseurl)
+ self.assertEqual('uSr', cli._username)
+ self.assertEqual('pWd', cli._password)
+ self.assertEqual('db', cli._database)
+ self.assertFalse(cli._use_udp)
+
+ cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
+ self.assertTrue(cli._use_udp)
+
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
+ self.assertEqual('https://my.host.fr:1886', cli._baseurl)
+
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
+ **{'ssl': False})
+ self.assertEqual('http://my.host.fr:1886', cli._baseurl)
+
+ def test_cert(self):
+ """Test mutual TLS authentication for TestInfluxDBClient object."""
+ cli = InfluxDBClient(ssl=True, cert='/etc/pki/tls/private/dummy.crt')
+ self.assertEqual(cli._session.cert, '/etc/pki/tls/private/dummy.crt')
+
+ with self.assertRaises(ValueError):
+ cli = InfluxDBClient(cert='/etc/pki/tls/private/dummy.crt')
+
+ def test_switch_database(self):
+ """Test switch database in TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ cli.switch_database('another_database')
+ self.assertEqual('another_database', cli._database)
+
+ def test_switch_user(self):
+ """Test switch user in TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ cli.switch_user('another_username', 'another_password')
+ self.assertEqual('another_username', cli._username)
+ self.assertEqual('another_password', cli._password)
+
+ def test_write(self):
+ """Test write in TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+ cli = InfluxDBClient(database='db')
+ cli.write(
+ {"database": "mydb",
+ "retentionPolicy": "mypolicy",
+ "points": [{"measurement": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "time": "2009-11-10T23:00:00Z",
+ "fields": {"value": 0.64}}]}
+ )
+
+ self.assertEqual(
+ m.last_request.body,
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000000000000\n",
+ )
+
+ def test_write_points(self):
+ """Test write points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.write_points(
+ self.dummy_points,
+ )
+ self.assertEqual(
+ 'cpu_load_short,host=server01,region=us-west '
+ 'value=0.64 1257894000123456000\n',
+ m.last_request.body.decode('utf-8'),
+ )
+
+ def test_write_gzip(self):
+ """Test write in TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db', gzip=True)
+ cli.write(
+ {"database": "mydb",
+ "retentionPolicy": "mypolicy",
+ "points": [{"measurement": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "time": "2009-11-10T23:00:00Z",
+ "fields": {"value": 0.64}}]}
+ )
+
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000000000000\n"
+ )
+
+ self.assertEqual(
+ m.last_request.body,
+ compressed.getvalue(),
+ )
+
+ def test_write_points_gzip(self):
+ """Test write points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db', gzip=True)
+ cli.write_points(
+ self.dummy_points,
+ )
+
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456000\n'
+ )
+ self.assertEqual(
+ m.last_request.body,
+ compressed.getvalue(),
+ )
+
+ def test_write_points_toplevel_attributes(self):
+ """Test write points attrs for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.write_points(
+ self.dummy_points,
+ database='testdb',
+ tags={"tag": "hello"},
+ retention_policy="somepolicy"
+ )
+ self.assertEqual(
+ 'cpu_load_short,host=server01,region=us-west,tag=hello '
+ 'value=0.64 1257894000123456000\n',
+ m.last_request.body.decode('utf-8'),
+ )
+
+ def test_write_points_batch(self):
+ """Test write points batch for TestInfluxDBClient object."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ expected_last_body = (
+ "network,direction=out,host=server01,region=us-west "
+ "value=12.0 1257894000000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = InfluxDBClient(database='db')
+ cli.write_points(points=dummy_points,
+ database='db',
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ self.assertEqual(m.call_count, 2)
+ self.assertEqual(expected_last_body,
+ m.last_request.body.decode('utf-8'))
+
+ def test_write_points_batch_generator(self):
+ """Test write points batch from a generator for TestInfluxDBClient."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ dummy_points_generator = (point for point in dummy_points)
+ expected_last_body = (
+ "network,direction=out,host=server01,region=us-west "
+ "value=12.0 1257894000000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = InfluxDBClient(database='db')
+ cli.write_points(points=dummy_points_generator,
+ database='db',
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ self.assertEqual(m.call_count, 2)
+ self.assertEqual(expected_last_body,
+ m.last_request.body.decode('utf-8'))
+
+ def test_write_points_udp(self):
+ """Test write points UDP for TestInfluxDBClient object."""
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ port = random.randint(4000, 8000)
+ s.bind(('0.0.0.0', port))
+
+ cli = InfluxDBClient(
+ 'localhost', 8086, 'root', 'root',
+ 'test', use_udp=True, udp_port=port
+ )
+ cli.write_points(self.dummy_points)
+
+ received_data, addr = s.recvfrom(1024)
+
+ self.assertEqual(
+ 'cpu_load_short,host=server01,region=us-west '
+ 'value=0.64 1257894000123456000\n',
+ received_data.decode()
+ )
+
+ @raises(Exception)
+ def test_write_points_fails(self):
+ """Test write points fail for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ with _mocked_session(cli, 'post', 500):
+ cli.write_points([])
+
+ def test_write_points_with_precision(self):
+ """Test write points with precision for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db')
+
+ cli.write_points(self.dummy_points, time_precision='n')
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456000\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='u')
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='ms')
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='s')
+ self.assertEqual(
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000\n",
+ m.last_request.body,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='m')
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 20964900\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='h')
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 349415\n',
+ m.last_request.body,
+ )
+
+ def test_write_points_with_consistency(self):
+ """Test write points with consistency for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ 'http://localhost:8086/write',
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db')
+
+ cli.write_points(self.dummy_points, consistency='any')
+ self.assertEqual(
+ m.last_request.qs,
+ {'db': ['db'], 'consistency': ['any']}
+ )
+
+ def test_write_points_with_precision_udp(self):
+ """Test write points with precision for TestInfluxDBClient object."""
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ port = random.randint(4000, 8000)
+ s.bind(('0.0.0.0', port))
+
+ cli = InfluxDBClient(
+ 'localhost', 8086, 'root', 'root',
+ 'test', use_udp=True, udp_port=port
+ )
+
+ cli.write_points(self.dummy_points, time_precision='n')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456000\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='u')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='ms')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='s')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000\n",
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='m')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 20964900\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='h')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 349415\n',
+ received_data,
+ )
+
+ def test_write_points_bad_precision(self):
+ """Test write points w/bad precision TestInfluxDBClient object."""
+ cli = InfluxDBClient()
+ with self.assertRaisesRegexp(
+ Exception,
+ "Invalid time precision is given. "
+ "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
+ ):
+ cli.write_points(
+ self.dummy_points,
+ time_precision='g'
+ )
+
+ def test_write_points_bad_consistency(self):
+ """Test write points w/bad consistency value."""
+ cli = InfluxDBClient()
+ with self.assertRaises(ValueError):
+ cli.write_points(
+ self.dummy_points,
+ consistency='boo'
+ )
+
+ @raises(Exception)
+ def test_write_points_with_precision_fails(self):
+ """Test write points w/precision fail for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ with _mocked_session(cli, 'post', 500):
+ cli.write_points_with_precision([])
+
+ def test_query(self):
+ """Test query method for TestInfluxDBClient object."""
+ example_response = (
+ '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
+ '"columns": ["time", "value"], "values": '
+ '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
+ '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
+ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ rs = self.cli.query('select * from foo')
+
+ self.assertListEqual(
+ list(rs[0].get_points()),
+ [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]
+ )
+
+ def test_query_msgpack(self):
+ """Test query method with a messagepack response."""
+ example_response = bytes(bytearray.fromhex(
+ "81a7726573756c74739182ac73746174656d656e745f696400a673657269"
+ "65739183a46e616d65a161a7636f6c756d6e7392a474696d65a176a67661"
+ "6c7565739192c70c05000000005d26178a019096c8cb3ff0000000000000"
+ ))
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ request_headers={"Accept": "application/x-msgpack"},
+ headers={"Content-Type": "application/x-msgpack"},
+ content=example_response
+ )
+ rs = self.cli.query('select * from a')
+
+ self.assertListEqual(
+ list(rs.get_points()),
+ [{'v': 1.0, 'time': '2019-07-10T16:51:22.026253Z'}]
+ )
+
+ def test_select_into_post(self):
+ """Test SELECT.*INTO is POSTed."""
+ example_response = (
+ '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
+ '"columns": ["time", "value"], "values": '
+ '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
+ '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
+ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ rs = self.cli.query('select * INTO newmeas from foo')
+
+ self.assertListEqual(
+ list(rs[0].get_points()),
+ [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]
+ )
+
+ @unittest.skip('Not implemented for 0.9')
+ def test_query_chunked(self):
+ """Test chunked query for TestInfluxDBClient object."""
+ cli = InfluxDBClient(database='db')
+ example_object = {
+ 'points': [
+ [1415206250119, 40001, 667],
+ [1415206244555, 30001, 7],
+ [1415206228241, 20001, 788],
+ [1415206212980, 10001, 555],
+ [1415197271586, 10001, 23]
+ ],
+ 'measurement': 'foo',
+ 'columns': [
+ 'time',
+ 'sequence_number',
+ 'val'
+ ]
+ }
+ example_response = \
+ json.dumps(example_object) + json.dumps(example_object)
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/db/db/series",
+ text=example_response
+ )
+
+ self.assertListEqual(
+ cli.query('select * from foo', chunked=True),
+ [example_object, example_object]
+ )
+
+ @raises(Exception)
+ def test_query_fail(self):
+ """Test query failed for TestInfluxDBClient object."""
+ with _mocked_session(self.cli, 'get', 401):
+ self.cli.query('select column_one from foo;')
+
+ def test_ping(self):
+ """Test ping querying InfluxDB version."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+ version = self.cli.ping()
+ self.assertEqual(version, '1.2.3')
+
+ def test_create_database(self):
+ """Test create database for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.create_database('new_db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "new_db"'
+ )
+
+ def test_create_numeric_named_database(self):
+ """Test create db w/numeric name for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.create_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "123"'
+ )
+
+ @raises(Exception)
+ def test_create_database_fails(self):
+ """Test create database fail for TestInfluxDBClient object."""
+ with _mocked_session(self.cli, 'post', 401):
+ self.cli.create_database('new_db')
+
+ def test_drop_database(self):
+ """Test drop database for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.drop_database('new_db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "new_db"'
+ )
+
+ def test_drop_measurement(self):
+ """Test drop measurement for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.drop_measurement('new_measurement')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop measurement "new_measurement"'
+ )
+
+ def test_drop_numeric_named_database(self):
+ """Test drop numeric db for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.drop_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "123"'
+ )
+
+ def test_get_list_database(self):
+ """Test get list of databases for TestInfluxDBClient object."""
+ data = {'results': [
+ {'series': [
+ {'name': 'databases',
+ 'values': [
+ ['new_db_1'],
+ ['new_db_2']],
+ 'columns': ['name']}]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_database(),
+ [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
+ )
+
+ @raises(Exception)
+ def test_get_list_database_fails(self):
+ """Test get list of dbs fail for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_database()
+
+ def test_get_list_measurements(self):
+ """Test get list of measurements for TestInfluxDBClient object."""
+ data = {
+ "results": [{
+ "series": [
+ {"name": "measurements",
+ "columns": ["name"],
+ "values": [["cpu"], ["disk"]
+ ]}]}
+ ]
+ }
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_measurements(),
+ [{'name': 'cpu'}, {'name': 'disk'}]
+ )
+
+ def test_get_list_series(self):
+ """Test get a list of series from the database."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west'],
+ ['memory_usage,host=server02,region=us-east']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(),
+ ['cpu_load_short,host=server01,region=us-west',
+ 'memory_usage,host=server02,region=us-east'])
+
+ def test_get_list_series_with_measurement(self):
+ """Test get a list of series from the database by filter."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(measurement='cpu_load_short'),
+ ['cpu_load_short,host=server01,region=us-west'])
+
+ def test_get_list_series_with_tags(self):
+ """Test get a list of series from the database by tags."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(tags={'region': 'us-west'}),
+ ['cpu_load_short,host=server01,region=us-west'])
+
+ @raises(Exception)
+ def test_get_list_series_fails(self):
+ """Test get a list of series from the database but fail."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_series()
+
+ def test_create_retention_policy_default(self):
+ """Test create default ret policy for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename', '1d', 4, default=True, database='db'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s default'
+ )
+
+ def test_create_retention_policy(self):
+ """Test create retention policy for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename', '1d', 4, database='db'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s'
+ )
+
+ def test_create_retention_policy_shard_duration(self):
+ """Test create retention policy with a custom shard duration."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename2', '1d', 4, database='db',
+ shard_duration='1h'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename2" on '
+ '"db" duration 1d replication 4 shard duration 1h'
+ )
+
+ def test_create_retention_policy_shard_duration_default(self):
+ """Test create retention policy with a default shard duration."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename3', '1d', 4, database='db',
+ shard_duration='1h', default=True
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename3" on '
+ '"db" duration 1d replication 4 shard duration 1h '
+ 'default'
+ )
+
+ def test_alter_retention_policy(self):
+ """Test alter retention policy for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ # Test alter duration
+ self.cli.alter_retention_policy('somename', 'db',
+ duration='4d')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" duration 4d'
+ )
+ # Test alter replication
+ self.cli.alter_retention_policy('somename', 'db',
+ replication=4)
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" replication 4'
+ )
+
+ # Test alter shard duration
+ self.cli.alter_retention_policy('somename', 'db',
+ shard_duration='1h')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" shard duration 1h'
+ )
+
+ # Test alter default
+ self.cli.alter_retention_policy('somename', 'db',
+ default=True)
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" default'
+ )
+
+ @raises(Exception)
+ def test_alter_retention_policy_invalid(self):
+ """Test invalid alter ret policy for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.alter_retention_policy('somename', 'db')
+
+ def test_drop_retention_policy(self):
+ """Test drop retention policy for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.drop_retention_policy('somename', 'db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop retention policy "somename" on "db"'
+ )
+
+ @raises(Exception)
+ def test_drop_retention_policy_fails(self):
+ """Test failed drop ret policy for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'delete', 401):
+ cli.drop_retention_policy('default', 'db')
+
+ def test_get_list_retention_policies(self):
+ """Test get retention policies for TestInfluxDBClient object."""
+ example_response = \
+ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
+ ' "columns": ["name", "duration", "replicaN"]}]}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.assertListEqual(
+ self.cli.get_list_retention_policies("db"),
+ [{'duration': '24h0m0s',
+ 'name': 'fsfdsdf', 'replicaN': 2}]
+ )
+
+ @mock.patch('requests.Session.request')
+ def test_request_retry(self, mock_request):
+ """Test that two connection errors will be handled."""
+ class CustomMock(object):
+ """Create custom mock object for test."""
+
+ def __init__(self):
+ self.i = 0
+
+ def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
+ self.i += 1
+
+ if self.i < 3:
+ raise requests.exceptions.ConnectionError
+
+ r = requests.Response()
+ r.status_code = 204
+ return r
+
+ mock_request.side_effect = CustomMock().connection_error
+
+ cli = InfluxDBClient(database='db')
+ cli.write_points(
+ self.dummy_points
+ )
+
+ @mock.patch('requests.Session.request')
+ def test_request_retry_raises(self, mock_request):
+ """Test that three requests errors will not be handled."""
+ class CustomMock(object):
+ """Create custom mock object for test."""
+
+ def __init__(self):
+ self.i = 0
+
+ def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
+ self.i += 1
+
+ if self.i < 4:
+ raise requests.exceptions.HTTPError
+ else:
+ r = requests.Response()
+ r.status_code = 200
+ return r
+
+ mock_request.side_effect = CustomMock().connection_error
+
+ cli = InfluxDBClient(database='db')
+
+ with self.assertRaises(requests.exceptions.HTTPError):
+ cli.write_points(self.dummy_points)
+
+ @mock.patch('requests.Session.request')
+ def test_random_request_retry(self, mock_request):
+ """Test that a random number of connection errors will be handled."""
+ class CustomMock(object):
+ """Create custom mock object for test."""
+
+ def __init__(self, retries):
+ self.i = 0
+ self.retries = retries
+
+ def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
+ self.i += 1
+
+ if self.i < self.retries:
+ raise requests.exceptions.ConnectionError
+ else:
+ r = requests.Response()
+ r.status_code = 204
+ return r
+
+ retries = random.randint(1, 5)
+ mock_request.side_effect = CustomMock(retries).connection_error
+
+ cli = InfluxDBClient(database='db', retries=retries)
+ cli.write_points(self.dummy_points)
+
+ @mock.patch('requests.Session.request')
+ def test_random_request_retry_raises(self, mock_request):
+ """Test a random number of conn errors plus one will not be handled."""
+ class CustomMock(object):
+ """Create custom mock object for test."""
+
+ def __init__(self, retries):
+ self.i = 0
+ self.retries = retries
+
+ def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
+ self.i += 1
+
+ if self.i < self.retries + 1:
+ raise requests.exceptions.ConnectionError
+ else:
+ r = requests.Response()
+ r.status_code = 200
+ return r
+
+ retries = random.randint(1, 5)
+ mock_request.side_effect = CustomMock(retries).connection_error
+
+ cli = InfluxDBClient(database='db', retries=retries)
+
+ with self.assertRaises(requests.exceptions.ConnectionError):
+ cli.write_points(self.dummy_points)
+
+ def test_get_list_users(self):
+ """Test get users for TestInfluxDBClient object."""
+ example_response = (
+ '{"results":[{"series":[{"columns":["user","admin"],'
+ '"values":[["test",false]]}]}]}'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+
+ self.assertListEqual(
+ self.cli.get_list_users(),
+ [{'user': 'test', 'admin': False}]
+ )
+
+ def test_get_list_users_empty(self):
+ """Test get empty userlist for TestInfluxDBClient object."""
+ example_response = (
+ '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
+ )
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+
+ self.assertListEqual(self.cli.get_list_users(), [])
+
+ def test_grant_admin_privileges(self):
+ """Test grant admin privs for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.grant_admin_privileges('test')
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'grant all privileges to "test"'
+ )
+
+ @raises(Exception)
+ def test_grant_admin_privileges_invalid(self):
+ """Test grant invalid admin privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.grant_admin_privileges('')
+
+ def test_revoke_admin_privileges(self):
+ """Test revoke admin privs for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.revoke_admin_privileges('test')
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'revoke all privileges from "test"'
+ )
+
+ @raises(Exception)
+ def test_revoke_admin_privileges_invalid(self):
+ """Test revoke invalid admin privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.revoke_admin_privileges('')
+
+ def test_grant_privilege(self):
+ """Test grant privs for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.grant_privilege('read', 'testdb', 'test')
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'grant read on "testdb" to "test"'
+ )
+
+ @raises(Exception)
+ def test_grant_privilege_invalid(self):
+ """Test grant invalid privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.grant_privilege('', 'testdb', 'test')
+
+ def test_revoke_privilege(self):
+ """Test revoke privs for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.revoke_privilege('read', 'testdb', 'test')
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'revoke read on "testdb" from "test"'
+ )
+
+ @raises(Exception)
+ def test_revoke_privilege_invalid(self):
+ """Test revoke invalid privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.revoke_privilege('', 'testdb', 'test')
+
+ def test_get_list_privileges(self):
+ """Test get list of privs for TestInfluxDBClient object."""
+ data = {'results': [
+ {'series': [
+ {'columns': ['database', 'privilege'],
+ 'values': [
+ ['db1', 'READ'],
+ ['db2', 'ALL PRIVILEGES'],
+ ['db3', 'NO PRIVILEGES']]}
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_privileges('test'),
+ [{'database': 'db1', 'privilege': 'READ'},
+ {'database': 'db2', 'privilege': 'ALL PRIVILEGES'},
+ {'database': 'db3', 'privilege': 'NO PRIVILEGES'}]
+ )
+
+ @raises(Exception)
+ def test_get_list_privileges_fails(self):
+ """Test failed get list of privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_privileges('test')
+
+ def test_get_list_continuous_queries(self):
+ """Test getting a list of continuous queries."""
+ data = {
+ "results": [
+ {
+ "statement_id": 0,
+ "series": [
+ {
+ "name": "testdb01",
+ "columns": ["name", "query"],
+ "values": [["testname01", "testquery01"],
+ ["testname02", "testquery02"]]
+ },
+ {
+ "name": "testdb02",
+ "columns": ["name", "query"],
+ "values": [["testname03", "testquery03"]]
+ },
+ {
+ "name": "testdb03",
+ "columns": ["name", "query"]
+ }
+ ]
+ }
+ ]
+ }
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_continuous_queries(),
+ [
+ {
+ 'testdb01': [
+ {'name': 'testname01', 'query': 'testquery01'},
+ {'name': 'testname02', 'query': 'testquery02'}
+ ]
+ },
+ {
+ 'testdb02': [
+ {'name': 'testname03', 'query': 'testquery03'}
+ ]
+ },
+ {
+ 'testdb03': []
+ }
+ ]
+ )
+
+ @raises(Exception)
+ def test_get_list_continuous_queries_fails(self):
+ """Test failing to get a list of continuous queries."""
+ with _mocked_session(self.cli, 'get', 400):
+ self.cli.get_list_continuous_queries()
+
+ def test_create_continuous_query(self):
+ """Test continuous query creation."""
+ data = {"results": [{}]}
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=json.dumps(data)
+ )
+ query = 'SELECT count("value") INTO "6_months"."events" FROM ' \
+ '"events" GROUP BY time(10m)'
+ self.cli.create_continuous_query('cq_name', query, 'db_name')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create continuous query "cq_name" on "db_name" begin select '
+ 'count("value") into "6_months"."events" from "events" group '
+ 'by time(10m) end'
+ )
+ self.cli.create_continuous_query('cq_name', query, 'db_name',
+ 'EVERY 10s FOR 2m')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create continuous query "cq_name" on "db_name" resample '
+ 'every 10s for 2m begin select count("value") into '
+ '"6_months"."events" from "events" group by time(10m) end'
+ )
+
+ @raises(Exception)
+ def test_create_continuous_query_fails(self):
+ """Test failing to create a continuous query."""
+ with _mocked_session(self.cli, 'get', 400):
+ self.cli.create_continuous_query('cq_name', 'select', 'db_name')
+
+ def test_drop_continuous_query(self):
+ """Test dropping a continuous query."""
+ data = {"results": [{}]}
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=json.dumps(data)
+ )
+ self.cli.drop_continuous_query('cq_name', 'db_name')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop continuous query "cq_name" on "db_name"'
+ )
+
+ @raises(Exception)
+ def test_drop_continuous_query_fails(self):
+ """Test failing to drop a continuous query."""
+ with _mocked_session(self.cli, 'get', 400):
+ self.cli.drop_continuous_query('cq_name', 'db_name')
+
+ def test_invalid_port_fails(self):
+ """Test invalid port fail for TestInfluxDBClient object."""
+ with self.assertRaises(ValueError):
+ InfluxDBClient('host', '80/redir', 'username', 'password')
+
+ def test_chunked_response(self):
+ """Test chunked response for TestInfluxDBClient object."""
+ example_response = \
+ u'{"results":[{"statement_id":0,"series":[{"columns":["key"],' \
+ '"values":[["cpu"],["memory"],["iops"],["network"]],"partial":' \
+ 'true}],"partial":true}]}\n{"results":[{"statement_id":0,' \
+ '"series":[{"columns":["key"],"values":[["qps"],["uptime"],' \
+ '["df"],["mount"]]}]}]}\n'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ response = self.cli.query('show series',
+ chunked=True, chunk_size=4)
+ res = list(response)
+ self.assertTrue(len(res) == 2)
+ self.assertEqual(res[0].__repr__(), ResultSet(
+ {'series': [{
+ 'columns': ['key'],
+ 'values': [['cpu'], ['memory'], ['iops'], ['network']]
+ }]}).__repr__())
+ self.assertEqual(res[1].__repr__(), ResultSet(
+ {'series': [{
+ 'columns': ['key'],
+ 'values': [['qps'], ['uptime'], ['df'], ['mount']]
+ }]}).__repr__())
+
+ def test_auth_default(self):
+ """Test auth with default settings."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient()
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic cm9vdDpyb290")
+
+ def test_auth_username_password(self):
+ """Test auth with custom username and password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username='my-username',
+ password='my-password')
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic bXktdXNlcm5hbWU6bXktcGFzc3dvcmQ=")
+
+ def test_auth_username_password_none(self):
+ """Test auth with not defined username or password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(username=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ def test_auth_token(self):
+ """Test auth with custom authorization header."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None,
+ headers={"Authorization": "my-token"})
+ cli.ping()
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "my-token")
+
+ def test_custom_socket_options(self):
+ """Test custom socket options."""
+ test_socket_options = HTTPConnection.default_socket_options + \
+ [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)]
+
+ cli = InfluxDBClient(username=None, password=None,
+ socket_options=test_socket_options)
+
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ test_socket_options)
+ self.assertEquals(cli._session.adapters.get("http://").poolmanager.
+ connection_pool_kw.get("socket_options"),
+ test_socket_options)
+
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options, test_socket_options)
+
+ def test_none_socket_options(self):
+ """Test default socket options."""
+ cli = InfluxDBClient(username=None, password=None)
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ None)
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options,
+ HTTPConnection.default_socket_options)
+
+
+class FakeClient(InfluxDBClient):
+ """Set up a fake client instance of InfluxDBClient."""
+
+ def __init__(self, *args, **kwargs):
+ """Initialize an instance of the FakeClient object."""
+ super(FakeClient, self).__init__(*args, **kwargs)
+
+ def query(self,
+ query,
+ params=None,
+ expected_response_code=200,
+ database=None):
+ """Query data from the FakeClient object."""
+ if query == 'Fail':
+ raise Exception("Fail")
+ elif query == 'Fail once' and self._host == 'host1':
+ raise Exception("Fail Once")
+ elif query == 'Fail twice' and self._host in 'host1 host2':
+ raise Exception("Fail Twice")
+ else:
+ return "Success"
diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py
new file mode 100644
index 00000000..87b8e0d8
--- /dev/null
+++ b/influxdb/tests/dataframe_client_test.py
@@ -0,0 +1,1348 @@
+# -*- coding: utf-8 -*-
+"""Unit tests for misc module."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from datetime import timedelta
+
+import json
+import unittest
+import warnings
+import requests_mock
+
+from nose.tools import raises
+from influxdb.tests import skip_if_pypy, using_pypy
+
+from .client_test import _mocked_session
+
+if not using_pypy:
+ import pandas as pd
+ from pandas.util.testing import assert_frame_equal
+ from influxdb import DataFrameClient
+ import numpy as np
+
+
+@skip_if_pypy
+class TestDataFrameClient(unittest.TestCase):
+ """Set up a test DataFrameClient object."""
+
+ def setUp(self):
+ """Instantiate a TestDataFrameClient object."""
+ # By default, raise exceptions on warnings
+ warnings.simplefilter('error', FutureWarning)
+
+ def test_write_points_from_dataframe(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
+ b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_write_points_with_whitespace_measurement(self):
+ """write_points should escape white space in measurements."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"meas\\ with\\ space "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 0\n"
+ b"meas\\ with\\ space "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, 'meas with space')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_write_points_with_whitespace_in_column_names(self):
+ """write_points should escape white space in column names."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column one", "column two",
+ "column three"])
+ expected = (
+ b"foo column\\ one=\"1\",column\\ two=1i,column\\ three=1.0 0\n"
+ b"foo column\\ one=\"2\",column\\ two=2i,column\\ three=2.0 "
+ b"3600000000000\n"
+ )
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_none(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", None, 1.0], ["2", 2.0, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_three=1.0 0\n"
+ b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_line_of_none(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[[None, None, None], ["2", 2.0, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_all_none(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[[None, None, None], [None, None, None]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_in_batches(self):
+ """Test write points in batch from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1))
+
+ def test_write_points_from_dataframe_with_tag_columns(self):
+ """Test write points from df w/tag in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
+ ['red', 0, "2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'], tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self):
+ """Test write points from df w/tag + cols in TestDataFrameClient."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
+ ['red', 0, "2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,global_tag=value,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 "
+ b"0\n"
+ b"foo,global_tag=value,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'],
+ tags={'global_tag': 'value'})
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tag_cols_and_defaults(self):
+ """Test default write points from df w/tag in TestDataFrameClient."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0, 'hot'],
+ ['red', 0, "2", 2, 2.0, 'cold']],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three",
+ "tag_three"])
+ expected_tags_and_fields = (
+ b"foo,tag_one=blue "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ expected_tags_no_fields = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0,"
+ b"tag_three=\"hot\" 0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0,"
+ b"tag_three=\"cold\" 3600000000000\n"
+ )
+
+ expected_fields_no_tags = (
+ b"foo,tag_one=blue,tag_three=hot,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 "
+ b"0\n"
+ b"foo,tag_one=red,tag_three=cold,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ expected_no_tags_no_fields = (
+ b"foo "
+ b"tag_one=\"blue\",tag_two=1i,column_one=\"1\","
+ b"column_two=1i,column_three=1.0,tag_three=\"hot\" "
+ b"0\n"
+ b"foo "
+ b"tag_one=\"red\",tag_two=0i,column_one=\"2\","
+ b"column_two=2i,column_three=2.0,tag_three=\"cold\" "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo',
+ field_columns=['column_one', 'column_two'],
+ tag_columns=['tag_one'])
+ self.assertEqual(m.last_request.body, expected_tags_and_fields)
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected_tags_no_fields)
+
+ cli.write_points(dataframe, 'foo',
+ field_columns=['column_one', 'column_two',
+ 'column_three'])
+ self.assertEqual(m.last_request.body, expected_fields_no_tags)
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected_no_tags_no_fields)
+
+ def test_write_points_from_dataframe_with_tag_escaped(self):
+ """Test write points from df w/escaped tag in TestDataFrameClient."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(
+ data=[
+ ['blue orange', "1", 1, 'hot=cold'], # space, equal
+ ['red,green', "2", 2, r'cold\fire'], # comma, backslash
+ ['some', "2", 2, ''], # skip empty
+ ['some', "2", 2, None], # skip None
+ ['', "2", 2, None], # all tags empty
+ ],
+ index=pd.period_range(now, freq='H', periods=5),
+ columns=["tag_one", "column_one", "column_two", "tag_three"]
+ )
+
+ expected_escaped_tags = (
+ b"foo,tag_one=blue\\ orange,tag_three=hot\\=cold "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red\\,green,tag_three=cold\\\\fire "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ b"foo,tag_one=some "
+ b"column_one=\"2\",column_two=2i "
+ b"7200000000000\n"
+ b"foo,tag_one=some "
+ b"column_one=\"2\",column_two=2i "
+ b"10800000000000\n"
+ b"foo "
+ b"column_one=\"2\",column_two=2i "
+ b"14400000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, 'foo',
+ field_columns=['column_one', 'column_two'],
+ tag_columns=['tag_one', 'tag_three'])
+ self.assertEqual(m.last_request.body, expected_escaped_tags)
+
+ def test_write_points_from_dataframe_with_numeric_column_names(self):
+ """Test write points from df with numeric cols."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ # df with numeric column names
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)])
+
+ expected = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.0 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.0 3600000000000\n'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"})
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_leading_none_column(self):
+ """write_points detect erroneous leading comma for null first field."""
+ dataframe = pd.DataFrame(
+ dict(
+ first=[1, None, None, 8, 9],
+ second=[2, None, None, None, 10],
+ third=[3, 4.1, None, None, 11],
+ first_tag=["one", None, None, "eight", None],
+ second_tag=["two", None, None, None, None],
+ third_tag=["three", "four", None, None, None],
+ comment=[
+ "All columns filled",
+ "First two of three empty",
+ "All empty",
+ "Last two of three empty",
+ "Empty tags with values",
+ ]
+ ),
+ index=pd.date_range(
+ start=pd.to_datetime('2018-01-01'),
+ freq='1D',
+ periods=5,
+ )
+ )
+ expected = (
+ b'foo,first_tag=one,second_tag=two,third_tag=three'
+ b' comment="All columns filled",first=1.0,second=2.0,third=3.0'
+ b' 1514764800000000000\n'
+ b'foo,third_tag=four'
+ b' comment="First two of three empty",third=4.1'
+ b' 1514851200000000000\n'
+ b'foo comment="All empty" 1514937600000000000\n'
+ b'foo,first_tag=eight'
+ b' comment="Last two of three empty",first=8.0'
+ b' 1515024000000000000\n'
+ b'foo'
+ b' comment="Empty tags with values",first=9.0,second=10.0'
+ b',third=11.0'
+ b' 1515110400000000000\n'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ colnames = [
+ "first_tag",
+ "second_tag",
+ "third_tag",
+ "comment",
+ "first",
+ "second",
+ "third"
+ ]
+ cli.write_points(dataframe.loc[:, colnames], 'foo',
+ tag_columns=[
+ "first_tag",
+ "second_tag",
+ "third_tag"])
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_numeric_precision(self):
+ """Test write points from df with numeric precision."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ # df with numeric column names
+ dataframe = pd.DataFrame(data=[["1", 1, 1.1111111111111],
+ ["2", 2, 2.2222222222222]],
+ index=[now, now + timedelta(hours=1)])
+
+ if np.lib.NumpyVersion(np.__version__) <= '1.13.3':
+ expected_default_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
+ )
+ else:
+ expected_default_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n' # noqa E501 line too long
+ )
+
+ expected_specified_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.2222 3600000000000\n'
+ )
+
+ expected_full_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"})
+
+ print(expected_default_precision)
+ print(m.last_request.body)
+
+ self.assertEqual(m.last_request.body, expected_default_precision)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"},
+ numeric_precision=4)
+
+ self.assertEqual(m.last_request.body, expected_specified_precision)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"},
+ numeric_precision='full')
+
+ self.assertEqual(m.last_request.body, expected_full_precision)
+
+ def test_write_points_from_dataframe_with_period_index(self):
+ """Test write points from df with period index."""
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[pd.Period('1970-01-01'),
+ pd.Period('1970-01-02')],
+ columns=["column_one", "column_two",
+ "column_three"])
+
+ expected = (
+ b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
+ b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"86400000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo")
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_time_precision(self):
+ """Test write points from df with time precision."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ measurement = "foo"
+
+ cli.write_points(dataframe, measurement, time_precision='h')
+ self.assertEqual(m.last_request.qs['precision'], ['h'])
+ self.assertEqual(
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 1\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(dataframe, measurement, time_precision='m')
+ self.assertEqual(m.last_request.qs['precision'], ['m'])
+ self.assertEqual(
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 60\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(dataframe, measurement, time_precision='s')
+ self.assertEqual(m.last_request.qs['precision'], ['s'])
+ self.assertEqual(
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 3600\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(dataframe, measurement, time_precision='ms')
+ self.assertEqual(m.last_request.qs['precision'], ['ms'])
+ self.assertEqual(
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 3600000\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(dataframe, measurement, time_precision='u')
+ self.assertEqual(m.last_request.qs['precision'], ['u'])
+ self.assertEqual(
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 3600000000\n',
+ m.last_request.body,
+ )
+
+ cli.write_points(dataframe, measurement, time_precision='n')
+ self.assertEqual(m.last_request.qs['precision'], ['n'])
+ self.assertEqual(
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\n'
+ b'foo column_one="2",column_two=2i,column_three=2.0 '
+ b'3600000000000\n',
+ m.last_request.body,
+ )
+
+ @raises(TypeError)
+ def test_write_points_from_dataframe_fails_without_time_index(self):
+ """Test failed write points from df without time index."""
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ columns=["column_one", "column_two",
+ "column_three"])
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo")
+
+ @raises(TypeError)
+ def test_write_points_from_dataframe_fails_with_series(self):
+ """Test failed write points from df with series."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.Series(data=[1.0, 2.0],
+ index=[now, now + timedelta(hours=1)])
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo")
+
+ def test_create_database(self):
+ """Test create database for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.create_database('new_db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "new_db"'
+ )
+
+ def test_create_numeric_named_database(self):
+ """Test create db w/numeric name for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.create_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "123"'
+ )
+
+ @raises(Exception)
+ def test_create_database_fails(self):
+ """Test create database fail for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with _mocked_session(cli, 'post', 401):
+ cli.create_database('new_db')
+
+ def test_drop_database(self):
+ """Test drop database for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.drop_database('new_db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "new_db"'
+ )
+
+ def test_drop_measurement(self):
+ """Test drop measurement for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.drop_measurement('new_measurement')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop measurement "new_measurement"'
+ )
+
+ def test_drop_numeric_named_database(self):
+ """Test drop numeric db for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.drop_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "123"'
+ )
+
+ @raises(Exception)
+ def test_get_list_database_fails(self):
+ """Test get list of dbs fail for TestInfluxDBClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_database()
+
+ def test_get_list_measurements(self):
+ """Test get list of measurements for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ data = {
+ "results": [{
+ "series": [
+ {"name": "measurements",
+ "columns": ["name"],
+ "values": [["cpu"], ["disk"]
+ ]}]}
+ ]
+ }
+
+ with _mocked_session(cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ cli.get_list_measurements(),
+ [{'name': 'cpu'}, {'name': 'disk'}]
+ )
+
+ def test_create_retention_policy_default(self):
+ """Test create default ret policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ cli.create_retention_policy(
+ 'somename', '1d', 4, default=True, database='db'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s default'
+ )
+
+ def test_create_retention_policy(self):
+ """Test create retention policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ cli.create_retention_policy(
+ 'somename', '1d', 4, database='db'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s'
+ )
+
+ def test_alter_retention_policy(self):
+ """Test alter retention policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ # Test alter duration
+ cli.alter_retention_policy('somename', 'db',
+ duration='4d')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" duration 4d'
+ )
+ # Test alter replication
+ cli.alter_retention_policy('somename', 'db',
+ replication=4)
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" replication 4'
+ )
+
+ # Test alter shard duration
+ cli.alter_retention_policy('somename', 'db',
+ shard_duration='1h')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" shard duration 1h'
+ )
+
+ # Test alter default
+ cli.alter_retention_policy('somename', 'db',
+ default=True)
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" default'
+ )
+
+ @raises(Exception)
+ def test_alter_retention_policy_invalid(self):
+ """Test invalid alter ret policy for TestInfluxDBClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ cli.alter_retention_policy('somename', 'db')
+
+ def test_drop_retention_policy(self):
+ """Test drop retention policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ cli.drop_retention_policy('somename', 'db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop retention policy "somename" on "db"'
+ )
+
+ @raises(Exception)
+ def test_drop_retention_policy_fails(self):
+ """Test failed drop ret policy for TestInfluxDBClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'delete', 401):
+ cli.drop_retention_policy('default', 'db')
+
+ def test_get_list_retention_policies(self):
+ """Test get retention policies for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = \
+ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
+ ' "columns": ["name", "duration", "replicaN"]}]}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.assertListEqual(
+ cli.get_list_retention_policies("db"),
+ [{'duration': '24h0m0s',
+ 'name': 'fsfdsdf', 'replicaN': 2}]
+ )
+
+ def test_query_into_dataframe(self):
+ """Test query into df for TestDataFrameClient object."""
+ data = {
+ "results": [{
+ "series": [
+ {"measurement": "network",
+ "tags": {"direction": ""},
+ "columns": ["time", "value"],
+ "values": [["2009-11-10T23:00:00Z", 23422]]
+ },
+ {"measurement": "network",
+ "tags": {"direction": "in"},
+ "columns": ["time", "value"],
+ "values": [["2009-11-10T23:00:00Z", 23422],
+ ["2009-11-10T23:00:00Z", 23422],
+ ["2009-11-10T23:00:00Z", 23422]]
+ }
+ ]
+ }]
+ }
+
+ pd1 = pd.DataFrame(
+ [[23422]], columns=['value'],
+ index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
+ pd2 = pd.DataFrame(
+ [[23422], [23422], [23422]], columns=['value'],
+ index=pd.to_datetime(["2009-11-10T23:00:00Z",
+ "2009-11-10T23:00:00Z",
+ "2009-11-10T23:00:00Z"]))
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
+ expected = {
+ ('network', (('direction', ''),)): pd1,
+ ('network', (('direction', 'in'),)): pd2
+ }
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query('select value from network group by direction;')
+ for k in expected:
+ assert_frame_equal(expected[k], result[k])
+
+ def test_multiquery_into_dataframe(self):
+ """Test multiquery into df for TestDataFrameClient object."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value"],
+ "values": [
+ ["2015-01-29T21:55:43.702900257Z", 0.55],
+ ["2015-01-29T21:55:43.702900257Z", 23422],
+ ["2015-06-11T20:46:02Z", 0.64]
+ ]
+ }
+ ]
+ }, {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "count"],
+ "values": [
+ ["1970-01-01T00:00:00Z", 3]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ pd1 = pd.DataFrame(
+ [[0.55], [23422.0], [0.64]], columns=['value'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
+ pd2 = pd.DataFrame(
+ [[3]], columns=['count'],
+ index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
+ expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}]
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;"\
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params)
+ for r, e in zip(result, expected):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+
+ def test_multiquery_into_dataframe_dropna(self):
+ """Test multiquery into df for TestDataFrameClient object."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "value2", "value3"],
+ "values": [
+ ["2015-01-29T21:55:43.702900257Z",
+ 0.55, 0.254, np.NaN],
+ ["2015-01-29T21:55:43.702900257Z",
+ 23422, 122878, np.NaN],
+ ["2015-06-11T20:46:02Z",
+ 0.64, 0.5434, np.NaN]
+ ]
+ }
+ ]
+ }, {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "count"],
+ "values": [
+ ["1970-01-01T00:00:00Z", 3]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ pd1 = pd.DataFrame(
+ [[0.55, 0.254, np.NaN],
+ [23422.0, 122878, np.NaN],
+ [0.64, 0.5434, np.NaN]],
+ columns=['value', 'value2', 'value3'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
+
+ pd1_dropna = pd.DataFrame(
+ [[0.55, 0.254], [23422.0, 122878], [0.64, 0.5434]],
+ columns=['value', 'value2'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+
+ if pd1_dropna.index.tzinfo is None:
+ pd1_dropna.index = pd1_dropna.index.tz_localize('UTC')
+
+ pd2 = pd.DataFrame(
+ [[3]], columns=['count'],
+ index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))
+
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
+
+ expected_dropna_true = [
+ {'cpu_load_short': pd1_dropna},
+ {'cpu_load_short': pd2}]
+ expected_dropna_false = [
+ {'cpu_load_short': pd1},
+ {'cpu_load_short': pd2}]
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+
+ for dropna in [True, False]:
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params, dropna=dropna)
+ expected = \
+ expected_dropna_true if dropna else expected_dropna_false
+ for r, e in zip(result, expected):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+
+ # test default value (dropna = True)
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params)
+ for r, e in zip(result, expected_dropna_true):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+
+ def test_query_with_empty_result(self):
+ """Test query with empty results in TestDataFrameClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ with _mocked_session(cli, 'GET', 200, {"results": [{}]}):
+ result = cli.query('select column_one from foo;')
+ self.assertEqual(result, {})
+
+ def test_get_list_database(self):
+ """Test get list of databases in TestDataFrameClient object."""
+ data = {'results': [
+ {'series': [
+ {'measurement': 'databases',
+ 'values': [
+ ['new_db_1'],
+ ['new_db_2']],
+ 'columns': ['name']}]}
+ ]}
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ with _mocked_session(cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ cli.get_list_database(),
+ [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
+ )
+
+ def test_datetime_to_epoch(self):
+ """Test convert datetime to epoch in TestDataFrameClient object."""
+ timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp),
+ 1356998400.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='h'),
+ 1356998400.0 / 3600
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='m'),
+ 1356998400.0 / 60
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='s'),
+ 1356998400.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='ms'),
+ 1356998400000.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='u'),
+ 1356998400000000.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='n'),
+ 1356998400000000000.0
+ )
+
+ def test_dsn_constructor(self):
+ """Test data source name deconstructor in TestDataFrameClient."""
+ client = DataFrameClient.from_dsn('influxdb://localhost:8086')
+ self.assertIsInstance(client, DataFrameClient)
+ self.assertEqual('http://localhost:8086', client._baseurl)
+
+ def test_write_points_from_dataframe_with_nan_line(self):
+ """Test write points from dataframe with Nan lines."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i 0\n"
+ b"foo column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='line')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='line')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_nan_json(self):
+ """Test write points from json with NaN lines."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i 0\n"
+ b"foo column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='json')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='json')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tags_and_nan_line(self):
+ """Test write points from dataframe with NaN lines and tags."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf],
+ ['red', 0, "2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='line',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='line',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tags_and_nan_json(self):
+ """Test write points from json with NaN lines and tags."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf],
+ ['red', 0, "2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='json',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='json',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_query_custom_index(self):
+ """Test query with custom indexes."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "host"],
+ "values": [
+ [1, 0.55, "local"],
+ [2, 23422, "local"],
+ [3, 0.64, "local"]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params,
+ data_frame_index=["time", "host"])
+
+ _data_frame = result['cpu_load_short']
+ print(_data_frame)
+
+ self.assertListEqual(["time", "host"],
+ list(_data_frame.index.names))
+
+ def test_dataframe_nanosecond_precision(self):
+ """Test nanosecond precision."""
+ for_df_dict = {
+ "nanFloats": [1.1, float('nan'), 3.3, 4.4],
+ "onlyFloats": [1.1, 2.2, 3.3, 4.4],
+ "strings": ['one_one', 'two_two', 'three_three', 'four_four']
+ }
+ df = pd.DataFrame.from_dict(for_df_dict)
+ df['time'] = ['2019-10-04 06:27:19.850557111+00:00',
+ '2019-10-04 06:27:19.850557184+00:00',
+ '2019-10-04 06:27:42.251396864+00:00',
+ '2019-10-04 06:27:42.251396974+00:00']
+ df['time'] = pd.to_datetime(df['time'], unit='ns')
+ df = df.set_index('time')
+
+ expected = (
+ b'foo nanFloats=1.1,onlyFloats=1.1,strings="one_one" 1570170439850557111\n' # noqa E501 line too long
+ b'foo onlyFloats=2.2,strings="two_two" 1570170439850557184\n' # noqa E501 line too long
+ b'foo nanFloats=3.3,onlyFloats=3.3,strings="three_three" 1570170462251396864\n' # noqa E501 line too long
+ b'foo nanFloats=4.4,onlyFloats=4.4,strings="four_four" 1570170462251396974\n' # noqa E501 line too long
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_nanosecond_precision_one_microsecond(self):
+ """Test nanosecond precision within one microsecond."""
+ # 1 microsecond = 1000 nanoseconds
+ start = np.datetime64('2019-10-04T06:27:19.850557000')
+ end = np.datetime64('2019-10-04T06:27:19.850558000')
+
+ # generate timestamps with nanosecond precision
+ timestamps = np.arange(
+ start,
+ end + np.timedelta64(1, 'ns'),
+ np.timedelta64(1, 'ns')
+ )
+ # generate values
+ values = np.arange(0.0, len(timestamps))
+
+ df = pd.DataFrame({'value': values}, index=timestamps)
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ lines = m.last_request.body.decode('utf-8').split('\n')
+ self.assertEqual(len(lines), 1002)
+
+ for index, line in enumerate(lines):
+ if index == 1001:
+ self.assertEqual(line, '')
+ continue
+ self.assertEqual(
+ line,
+ f"foo value={index}.0 157017043985055{7000 + index:04}"
+ )
diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py
new file mode 100644
index 00000000..6737f921
--- /dev/null
+++ b/influxdb/tests/helper_test.py
@@ -0,0 +1,437 @@
+# -*- coding: utf-8 -*-
+"""Set of series helper functions for test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from datetime import datetime, timedelta
+
+import unittest
+import warnings
+
+import mock
+from influxdb import SeriesHelper, InfluxDBClient
+from requests.exceptions import ConnectionError
+
+
+class TestSeriesHelper(unittest.TestCase):
+ """Define the SeriesHelper test object."""
+
+ @classmethod
+ def setUpClass(cls):
+ """Set up the TestSeriesHelper object."""
+ super(TestSeriesHelper, cls).setUpClass()
+
+ TestSeriesHelper.client = InfluxDBClient(
+ 'host',
+ 8086,
+ 'username',
+ 'password',
+ 'database'
+ )
+
+ class MySeriesHelper(SeriesHelper):
+ """Define a SeriesHelper object."""
+
+ class Meta:
+ """Define metadata for the SeriesHelper object."""
+
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 5
+ autocommit = True
+
+ TestSeriesHelper.MySeriesHelper = MySeriesHelper
+
+ def setUp(self):
+ """Check that MySeriesHelper has empty datapoints."""
+ super(TestSeriesHelper, self).setUp()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper in teardown did not empty datapoints.')
+
+ def tearDown(self):
+ """Deconstruct the TestSeriesHelper object."""
+ super(TestSeriesHelper, self).tearDown()
+ TestSeriesHelper.MySeriesHelper._reset_()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper did not empty datapoints.')
+
+ def test_auto_commit(self):
+ """Test write_points called after valid number of events."""
+ class AutoCommitTest(SeriesHelper):
+ """Define a SeriesHelper instance to test autocommit."""
+
+ class Meta:
+ """Define metadata for AutoCommitTest."""
+
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 5
+ client = InfluxDBClient()
+ autocommit = True
+
+ fake_write_points = mock.MagicMock()
+ AutoCommitTest(server_name='us.east-1', some_stat=159, other_tag='gg')
+ AutoCommitTest._client.write_points = fake_write_points
+ AutoCommitTest(server_name='us.east-1', some_stat=158, other_tag='gg')
+ AutoCommitTest(server_name='us.east-1', some_stat=157, other_tag='gg')
+ AutoCommitTest(server_name='us.east-1', some_stat=156, other_tag='gg')
+ self.assertFalse(fake_write_points.called)
+ AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg')
+ self.assertTrue(fake_write_points.called)
+
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSingleSeriesName(self, current_timestamp):
+ """Test JSON conversion when there is only one series name."""
+ current_timestamp.return_value = current_date = datetime.today()
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello', some_stat=159)
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello', some_stat=158)
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello', some_stat=157)
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello', some_stat=156)
+ expectation = [
+ {
+ "measurement": "events.stats.us.east-1",
+ "tags": {
+ "other_tag": "ello",
+ "server_name": "us.east-1"
+ },
+ "fields": {
+ "some_stat": 159
+ },
+ "time": current_date,
+ },
+ {
+ "measurement": "events.stats.us.east-1",
+ "tags": {
+ "other_tag": "ello",
+ "server_name": "us.east-1"
+ },
+ "fields": {
+ "some_stat": 158
+ },
+ "time": current_date,
+ },
+ {
+ "measurement": "events.stats.us.east-1",
+ "tags": {
+ "other_tag": "ello",
+ "server_name": "us.east-1"
+ },
+ "fields": {
+ "some_stat": 157
+ },
+ "time": current_date,
+ },
+ {
+ "measurement": "events.stats.us.east-1",
+ "tags": {
+ "other_tag": "ello",
+ "server_name": "us.east-1"
+ },
+ "fields": {
+ "some_stat": 156
+ },
+ "time": current_date,
+ }
+ ]
+
+ rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
+ self.assertTrue(all([el in expectation for el in rcvd]) and
+ all([el in rcvd for el in expectation]),
+ 'Invalid JSON body of time series returned from '
+ '_json_body_ for one series name: {0}.'.format(rcvd))
+
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSeveralSeriesNames(self, current_timestamp):
+ """Test JSON conversion when there are multiple series names."""
+ current_timestamp.return_value = current_date = datetime.today()
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='ello')
+ TestSeriesHelper.MySeriesHelper(
+ server_name='fr.paris-10', some_stat=158, other_tag='ello')
+ TestSeriesHelper.MySeriesHelper(
+ server_name='lu.lux', some_stat=157, other_tag='ello')
+ TestSeriesHelper.MySeriesHelper(
+ server_name='uk.london', some_stat=156, other_tag='ello')
+ expectation = [
+ {
+ 'fields': {
+ 'some_stat': 157
+ },
+ 'measurement': 'events.stats.lu.lux',
+ 'tags': {
+ 'other_tag': 'ello',
+ 'server_name': 'lu.lux'
+ },
+ "time": current_date,
+ },
+ {
+ 'fields': {
+ 'some_stat': 156
+ },
+ 'measurement': 'events.stats.uk.london',
+ 'tags': {
+ 'other_tag': 'ello',
+ 'server_name': 'uk.london'
+ },
+ "time": current_date,
+ },
+ {
+ 'fields': {
+ 'some_stat': 158
+ },
+ 'measurement': 'events.stats.fr.paris-10',
+ 'tags': {
+ 'other_tag': 'ello',
+ 'server_name': 'fr.paris-10'
+ },
+ "time": current_date,
+ },
+ {
+ 'fields': {
+ 'some_stat': 159
+ },
+ 'measurement': 'events.stats.us.east-1',
+ 'tags': {
+ 'other_tag': 'ello',
+ 'server_name': 'us.east-1'
+ },
+ "time": current_date,
+ }
+ ]
+
+ rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
+ self.assertTrue(all([el in expectation for el in rcvd]) and
+ all([el in rcvd for el in expectation]),
+ 'Invalid JSON body of time series returned from '
+ '_json_body_ for several series names: {0}.'
+ .format(rcvd))
+
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSeriesWithoutTimeField(self, current_timestamp):
+ """Test that time is optional on a series without a time field."""
+ current_date = datetime.today()
+ yesterday = current_date - timedelta(days=1)
+ current_timestamp.return_value = yesterday
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=159, time=current_date
+ )
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=158,
+ )
+ point1, point2 = TestSeriesHelper.MySeriesHelper._json_body_()
+ self.assertTrue('time' in point1 and 'time' in point2)
+ self.assertEqual(point1['time'], current_date)
+ self.assertEqual(point2['time'], yesterday)
+
+ def testSeriesWithoutAllTags(self):
+ """Test that creating a data point without a tag throws an error."""
+ class MyTimeFieldSeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 5
+ autocommit = True
+
+ self.assertRaises(NameError, MyTimeFieldSeriesHelper,
+ **{"server_name": 'us.east-1',
+ "some_stat": 158})
+
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSeriesWithTimeField(self, current_timestamp):
+ """Test that time is optional on a series with a time field."""
+ current_date = datetime.today()
+ yesterday = current_date - timedelta(days=1)
+ current_timestamp.return_value = yesterday
+
+ class MyTimeFieldSeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 5
+ autocommit = True
+
+ MyTimeFieldSeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=159, time=current_date
+ )
+ MyTimeFieldSeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=158,
+ )
+ point1, point2 = MyTimeFieldSeriesHelper._json_body_()
+ self.assertTrue('time' in point1 and 'time' in point2)
+ self.assertEqual(point1['time'], current_date)
+ self.assertEqual(point2['time'], yesterday)
+
+ def testInvalidHelpers(self):
+ """Test errors in invalid helpers."""
+ class MissingMeta(SeriesHelper):
+ """Define instance of SeriesHelper for missing meta."""
+
+ pass
+
+ class MissingClient(SeriesHelper):
+ """Define SeriesHelper for missing client data."""
+
+ class Meta:
+ """Define metadat for MissingClient."""
+
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ autocommit = True
+
+ class MissingSeriesName(SeriesHelper):
+ """Define instance of SeriesHelper for missing series."""
+
+ class Meta:
+ """Define metadata for MissingSeriesName."""
+
+ fields = ['time', 'server_name']
+
+ class MissingFields(SeriesHelper):
+ """Define instance of SeriesHelper for missing fields."""
+
+ class Meta:
+ """Define metadata for MissingFields."""
+
+ series_name = 'events.stats.{server_name}'
+
+ class InvalidTimePrecision(SeriesHelper):
+ """Define instance of SeriesHelper for invalid time precision."""
+
+ class Meta:
+ """Define metadata for InvalidTimePrecision."""
+
+ series_name = 'events.stats.{server_name}'
+ time_precision = "ks"
+ fields = ['time', 'server_name']
+ autocommit = True
+
+ for cls in [MissingMeta, MissingClient, MissingFields,
+ MissingSeriesName, InvalidTimePrecision]:
+ self.assertRaises(
+ AttributeError, cls, **{'time': 159,
+ 'server_name': 'us.east-1'})
+
+ @unittest.skip("Fails on py32")
+ def testWarnBulkSizeZero(self):
+ """Test warning for an invalid bulk size."""
+ class WarnBulkSizeZero(SeriesHelper):
+
+ class Meta:
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ tags = []
+ bulk_size = 0
+ autocommit = True
+
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("always")
+ try:
+ WarnBulkSizeZero(time=159, server_name='us.east-1')
+ except ConnectionError:
+ # Server defined in the client is invalid, we're testing
+ # the warning only.
+ pass
+ self.assertEqual(len(w), 1,
+ '{0} call should have generated one warning.'
+ .format(WarnBulkSizeZero))
+ self.assertIn('forced to 1', str(w[-1].message),
+ 'Warning message did not contain "forced to 1".')
+
+ def testWarnBulkSizeNoEffect(self):
+ """Test warning for a set bulk size but autocommit False."""
+ class WarnBulkSizeNoEffect(SeriesHelper):
+ """Define SeriesHelper for warning on bulk size."""
+
+ class Meta:
+ """Define metadat for WarnBulkSizeNoEffect."""
+
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ bulk_size = 5
+ tags = []
+ autocommit = False
+
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("always")
+ WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
+ self.assertEqual(len(w), 1,
+ '{0} call should have generated one warning.'
+ .format(WarnBulkSizeNoEffect))
+ self.assertIn('has no affect', str(w[-1].message),
+ 'Warning message did not contain "has not affect".')
+
+ def testSeriesWithRetentionPolicy(self):
+ """Test that the data is saved with the specified retention policy."""
+ my_policy = 'my_policy'
+
+ class RetentionPolicySeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = InfluxDBClient()
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 2
+ autocommit = True
+ retention_policy = my_policy
+
+ fake_write_points = mock.MagicMock()
+ RetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='gg')
+ RetentionPolicySeriesHelper._client.write_points = fake_write_points
+ RetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=158, other_tag='aa')
+
+ kall = fake_write_points.call_args
+ args, kwargs = kall
+ self.assertTrue('retention_policy' in kwargs)
+ self.assertEqual(kwargs['retention_policy'], my_policy)
+
+ def testSeriesWithoutRetentionPolicy(self):
+ """Test that the data is saved without any retention policy."""
+ class NoRetentionPolicySeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = InfluxDBClient()
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 2
+ autocommit = True
+
+ fake_write_points = mock.MagicMock()
+ NoRetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='gg')
+ NoRetentionPolicySeriesHelper._client.write_points = fake_write_points
+ NoRetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=158, other_tag='aa')
+
+ kall = fake_write_points.call_args
+ args, kwargs = kall
+ self.assertTrue('retention_policy' in kwargs)
+ self.assertEqual(kwargs['retention_policy'], None)
diff --git a/influxdb/tests/influxdb08/__init__.py b/influxdb/tests/influxdb08/__init__.py
new file mode 100644
index 00000000..0e79ed1c
--- /dev/null
+++ b/influxdb/tests/influxdb08/__init__.py
@@ -0,0 +1,2 @@
+# -*- coding: utf-8 -*-
+"""Define the influxdb08 test package."""
diff --git a/influxdb/tests/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py
new file mode 100644
index 00000000..39ab52d6
--- /dev/null
+++ b/influxdb/tests/influxdb08/client_test.py
@@ -0,0 +1,904 @@
+# -*- coding: utf-8 -*-
+"""Client unit tests."""
+
+import json
+import socket
+import sys
+import unittest
+import random
+import warnings
+
+import mock
+import requests
+import requests.exceptions
+import requests_mock
+
+from nose.tools import raises
+from mock import patch
+
+from influxdb.influxdb08 import InfluxDBClient
+from influxdb.influxdb08.client import session
+
+if sys.version < '3':
+ import codecs
+
+ def u(x):
+ """Test codec."""
+ return codecs.unicode_escape_decode(x)[0]
+else:
+ def u(x):
+ """Test codec."""
+ return x
+
+
+def _build_response_object(status_code=200, content=""):
+ resp = requests.Response()
+ resp.status_code = status_code
+ resp._content = content.encode("utf8")
+ return resp
+
+
+def _mocked_session(method="GET", status_code=200, content=""):
+ method = method.upper()
+
+ def request(*args, **kwargs):
+ """Define a request for the _mocked_session."""
+ c = content
+
+ # Check method
+ assert method == kwargs.get('method', 'GET')
+
+ if method == 'POST':
+ data = kwargs.get('data', None)
+
+ if data is not None:
+ # Data must be a string
+ assert isinstance(data, str)
+
+ # Data must be a JSON string
+ assert c == json.loads(data, strict=True)
+
+ c = data
+
+ # Anyway, Content must be a JSON string (or empty string)
+ if not isinstance(c, str):
+ c = json.dumps(c)
+
+ return _build_response_object(status_code=status_code, content=c)
+
+ mocked = patch.object(
+ session,
+ 'request',
+ side_effect=request
+ )
+
+ return mocked
+
+
+class TestInfluxDBClient(unittest.TestCase):
+ """Define a TestInfluxDBClient object."""
+
+ def setUp(self):
+ """Set up a TestInfluxDBClient object."""
+ # By default, raise exceptions on warnings
+ warnings.simplefilter('error', FutureWarning)
+
+ self.dummy_points = [
+ {
+ "points": [
+ ["1", 1, 1.0],
+ ["2", 2, 2.0]
+ ],
+ "name": "foo",
+ "columns": ["column_one", "column_two", "column_three"]
+ }
+ ]
+
+ self.dsn_string = 'influxdb://uSr:pWd@host:1886/db'
+
+ def test_scheme(self):
+ """Test database scheme for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ self.assertEqual(cli._baseurl, 'http://host:8086')
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True
+ )
+ self.assertEqual(cli._baseurl, 'https://host:8086')
+
+ def test_dsn(self):
+ """Test datasource name for TestInfluxDBClient object."""
+ cli = InfluxDBClient.from_dsn(self.dsn_string)
+ self.assertEqual('http://host:1886', cli._baseurl)
+ self.assertEqual('uSr', cli._username)
+ self.assertEqual('pWd', cli._password)
+ self.assertEqual('db', cli._database)
+ self.assertFalse(cli._use_udp)
+
+ cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
+ self.assertTrue(cli._use_udp)
+
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
+ self.assertEqual('https://host:1886', cli._baseurl)
+
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
+ **{'ssl': False})
+ self.assertEqual('http://host:1886', cli._baseurl)
+
+ def test_switch_database(self):
+ """Test switch database for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ cli.switch_database('another_database')
+ self.assertEqual(cli._database, 'another_database')
+
+ @raises(FutureWarning)
+ def test_switch_db_deprecated(self):
+ """Test deprecated switch database for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ cli.switch_db('another_database')
+ self.assertEqual(cli._database, 'another_database')
+
+ def test_switch_user(self):
+ """Test switch user for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
+ cli.switch_user('another_username', 'another_password')
+ self.assertEqual(cli._username, 'another_username')
+ self.assertEqual(cli._password, 'another_password')
+
+ def test_write(self):
+ """Test write to database for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write"
+ )
+ cli = InfluxDBClient(database='db')
+ cli.write(
+ {"database": "mydb",
+ "retentionPolicy": "mypolicy",
+ "points": [{"name": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "timestamp": "2009-11-10T23:00:00Z",
+ "values": {"value": 0.64}}]}
+ )
+
+ self.assertEqual(
+ json.loads(m.last_request.body),
+ {"database": "mydb",
+ "retentionPolicy": "mypolicy",
+ "points": [{"name": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "timestamp": "2009-11-10T23:00:00Z",
+ "values": {"value": 0.64}}]}
+ )
+
+ def test_write_points(self):
+ """Test write points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/series"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.write_points(
+ self.dummy_points
+ )
+
+ self.assertListEqual(
+ json.loads(m.last_request.body),
+ self.dummy_points
+ )
+
+ def test_write_points_string(self):
+ """Test write string points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/series"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.write_points(
+ str(json.dumps(self.dummy_points))
+ )
+
+ self.assertListEqual(
+ json.loads(m.last_request.body),
+ self.dummy_points
+ )
+
+ def test_write_points_batch(self):
+ """Test write batch points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+ cli = InfluxDBClient('localhost', 8086,
+ 'username', 'password', 'db')
+ cli.write_points(data=self.dummy_points, batch_size=2)
+ self.assertEqual(1, m.call_count)
+
+ def test_write_points_batch_invalid_size(self):
+ """Test write batch points invalid size for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+ cli = InfluxDBClient('localhost', 8086,
+ 'username', 'password', 'db')
+ cli.write_points(data=self.dummy_points, batch_size=-2)
+ self.assertEqual(1, m.call_count)
+
+ def test_write_points_batch_multiple_series(self):
+ """Test write points batch multiple series."""
+ dummy_points = [
+ {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
+ ["4", 4, 4.0], ["5", 5, 5.0]],
+ "name": "foo",
+ "columns": ["val1", "val2", "val3"]},
+ {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
+ ["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0],
+ ["7", 7, 7.0], ["8", 8, 8.0]],
+ "name": "bar",
+ "columns": ["val1", "val2", "val3"]},
+ ]
+ expected_last_body = [{'points': [['7', 7, 7.0], ['8', 8, 8.0]],
+ 'name': 'bar',
+ 'columns': ['val1', 'val2', 'val3']}]
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+ cli = InfluxDBClient('localhost', 8086,
+ 'username', 'password', 'db')
+ cli.write_points(data=dummy_points, batch_size=3)
+ self.assertEqual(m.call_count, 5)
+ self.assertEqual(expected_last_body, m.request_history[4].json())
+
+ def test_write_points_udp(self):
+ """Test write points UDP for TestInfluxDBClient object."""
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ port = random.randint(4000, 8000)
+ s.bind(('0.0.0.0', port))
+
+ cli = InfluxDBClient(
+ 'localhost', 8086, 'root', 'root',
+ 'test', use_udp=True, udp_port=port
+ )
+ cli.write_points(self.dummy_points)
+
+ received_data, addr = s.recvfrom(1024)
+
+ self.assertEqual(self.dummy_points,
+ json.loads(received_data.decode(), strict=True))
+
+ def test_write_bad_precision_udp(self):
+ """Test write UDP w/bad precision."""
+ cli = InfluxDBClient(
+ 'localhost', 8086, 'root', 'root',
+ 'test', use_udp=True, udp_port=4444
+ )
+
+ with self.assertRaisesRegexp(
+ Exception,
+ "InfluxDB only supports seconds precision for udp writes"
+ ):
+ cli.write_points(
+ self.dummy_points,
+ time_precision='ms'
+ )
+
+ @raises(Exception)
+ def test_write_points_fails(self):
+ """Test failed write points for TestInfluxDBClient object."""
+ with _mocked_session('post', 500):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.write_points([])
+
+ def test_write_points_with_precision(self):
+ """Test write points with precision."""
+ with _mocked_session('post', 200, self.dummy_points):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ self.assertTrue(cli.write_points(self.dummy_points))
+
+ def test_write_points_bad_precision(self):
+ """Test write points with bad precision."""
+ cli = InfluxDBClient()
+ with self.assertRaisesRegexp(
+ Exception,
+ "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)"
+ ):
+ cli.write_points(
+ self.dummy_points,
+ time_precision='g'
+ )
+
+ @raises(Exception)
+ def test_write_points_with_precision_fails(self):
+ """Test write points where precision fails."""
+ with _mocked_session('post', 500):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.write_points_with_precision([])
+
+ def test_delete_points(self):
+ """Test delete points for TestInfluxDBClient object."""
+ with _mocked_session('delete', 204) as mocked:
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ self.assertTrue(cli.delete_points("foo"))
+
+ self.assertEqual(len(mocked.call_args_list), 1)
+ args, kwds = mocked.call_args_list[0]
+
+ self.assertEqual(kwds['params'],
+ {'u': 'username', 'p': 'password'})
+ self.assertEqual(kwds['url'], 'http://host:8086/db/db/series/foo')
+
+ @raises(Exception)
+ def test_delete_points_with_wrong_name(self):
+ """Test delete points with wrong name."""
+ with _mocked_session('delete', 400):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.delete_points("nonexist")
+
+ @raises(NotImplementedError)
+ def test_create_scheduled_delete(self):
+ """Test create scheduled deletes."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.create_scheduled_delete([])
+
+ @raises(NotImplementedError)
+ def test_get_list_scheduled_delete(self):
+ """Test get schedule list of deletes TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.get_list_scheduled_delete()
+
+ @raises(NotImplementedError)
+ def test_remove_scheduled_delete(self):
+ """Test remove scheduled delete TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.remove_scheduled_delete(1)
+
+ def test_query(self):
+ """Test query for TestInfluxDBClient object."""
+ data = [
+ {
+ "name": "foo",
+ "columns": ["time", "sequence_number", "column_one"],
+ "points": [
+ [1383876043, 16, "2"], [1383876043, 15, "1"],
+ [1383876035, 14, "2"], [1383876035, 13, "1"]
+ ]
+ }
+ ]
+ with _mocked_session('get', 200, data):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ result = cli.query('select column_one from foo;')
+ self.assertEqual(len(result[0]['points']), 4)
+
+ def test_query_chunked(self):
+ """Test chunked query for TestInfluxDBClient object."""
+ cli = InfluxDBClient(database='db')
+ example_object = {
+ 'points': [
+ [1415206250119, 40001, 667],
+ [1415206244555, 30001, 7],
+ [1415206228241, 20001, 788],
+ [1415206212980, 10001, 555],
+ [1415197271586, 10001, 23]
+ ],
+ 'name': 'foo',
+ 'columns': [
+ 'time',
+ 'sequence_number',
+ 'val'
+ ]
+ }
+ example_response = \
+ json.dumps(example_object) + json.dumps(example_object)
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/db/db/series",
+ text=example_response
+ )
+
+ self.assertListEqual(
+ cli.query('select * from foo', chunked=True),
+ [example_object, example_object]
+ )
+
+ def test_query_chunked_unicode(self):
+ """Test unicode chunked query for TestInfluxDBClient object."""
+ cli = InfluxDBClient(database='db')
+ example_object = {
+ 'points': [
+ [1415206212980, 10001, u('unicode-\xcf\x89')],
+ [1415197271586, 10001, u('more-unicode-\xcf\x90')]
+ ],
+ 'name': 'foo',
+ 'columns': [
+ 'time',
+ 'sequence_number',
+ 'val'
+ ]
+ }
+ example_response = \
+ json.dumps(example_object) + json.dumps(example_object)
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/db/db/series",
+ text=example_response
+ )
+
+ self.assertListEqual(
+ cli.query('select * from foo', chunked=True),
+ [example_object, example_object]
+ )
+
+ @raises(Exception)
+ def test_query_fail(self):
+ """Test failed query for TestInfluxDBClient."""
+ with _mocked_session('get', 401):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.query('select column_one from foo;')
+
+ def test_query_bad_precision(self):
+ """Test query with bad precision for TestInfluxDBClient."""
+ cli = InfluxDBClient()
+ with self.assertRaisesRegexp(
+ Exception,
+ "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)"
+ ):
+ cli.query('select column_one from foo', time_precision='g')
+
+ def test_create_database(self):
+ """Test create database for TestInfluxDBClient."""
+ with _mocked_session('post', 201, {"name": "new_db"}):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ self.assertTrue(cli.create_database('new_db'))
+
+ @raises(Exception)
+ def test_create_database_fails(self):
+ """Test failed create database for TestInfluxDBClient."""
+ with _mocked_session('post', 401):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.create_database('new_db')
+
+ def test_delete_database(self):
+ """Test delete database for TestInfluxDBClient."""
+ with _mocked_session('delete', 204):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ self.assertTrue(cli.delete_database('old_db'))
+
+ @raises(Exception)
+ def test_delete_database_fails(self):
+ """Test failed delete database for TestInfluxDBClient."""
+ with _mocked_session('delete', 401):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.delete_database('old_db')
+
+ def test_get_list_database(self):
+ """Test get list of databases for TestInfluxDBClient."""
+ data = [
+ {"name": "a_db"}
+ ]
+ with _mocked_session('get', 200, data):
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ self.assertEqual(len(cli.get_list_database()), 1)
+ self.assertEqual(cli.get_list_database()[0]['name'], 'a_db')
+
+ @raises(Exception)
+ def test_get_list_database_fails(self):
+ """Test failed get list of databases for TestInfluxDBClient."""
+ with _mocked_session('get', 401):
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ cli.get_list_database()
+
+ @raises(FutureWarning)
+ def test_get_database_list_deprecated(self):
+ """Test deprecated get database list for TestInfluxDBClient."""
+ data = [
+ {"name": "a_db"}
+ ]
+ with _mocked_session('get', 200, data):
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ self.assertEqual(len(cli.get_database_list()), 1)
+ self.assertEqual(cli.get_database_list()[0]['name'], 'a_db')
+
+ def test_delete_series(self):
+ """Test delete series for TestInfluxDBClient."""
+ with _mocked_session('delete', 204):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.delete_series('old_series')
+
+ @raises(Exception)
+ def test_delete_series_fails(self):
+ """Test failed delete series for TestInfluxDBClient."""
+ with _mocked_session('delete', 401):
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.delete_series('old_series')
+
+ def test_get_series_list(self):
+ """Test get list of series for TestInfluxDBClient."""
+ cli = InfluxDBClient(database='db')
+
+ with requests_mock.Mocker() as m:
+ example_response = \
+ '[{"name":"list_series_result","columns":' \
+ '["time","name"],"points":[[0,"foo"],[0,"bar"]]}]'
+
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/db/db/series",
+ text=example_response
+ )
+
+ self.assertListEqual(
+ cli.get_list_series(),
+ ['foo', 'bar']
+ )
+
+ def test_get_continuous_queries(self):
+ """Test get continuous queries for TestInfluxDBClient."""
+ cli = InfluxDBClient(database='db')
+
+ with requests_mock.Mocker() as m:
+
+ # Tip: put this in a json linter!
+ example_response = '[ { "name": "continuous queries", "columns"' \
+ ': [ "time", "id", "query" ], "points": [ [ ' \
+ '0, 1, "select foo(bar,95) from \\"foo_bar' \
+ 's\\" group by time(5m) into response_times.' \
+ 'percentiles.5m.95" ], [ 0, 2, "select perce' \
+ 'ntile(value,95) from \\"response_times\\" g' \
+ 'roup by time(5m) into response_times.percen' \
+ 'tiles.5m.95" ] ] } ]'
+
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/db/db/series",
+ text=example_response
+ )
+
+ self.assertListEqual(
+ cli.get_list_continuous_queries(),
+ [
+ 'select foo(bar,95) from "foo_bars" group '
+ 'by time(5m) into response_times.percentiles.5m.95',
+
+ 'select percentile(value,95) from "response_times" group '
+ 'by time(5m) into response_times.percentiles.5m.95'
+ ]
+ )
+
+ def test_get_list_cluster_admins(self):
+ """Test get list of cluster admins, not implemented."""
+ pass
+
+ def test_add_cluster_admin(self):
+ """Test add cluster admin for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/cluster_admins"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.add_cluster_admin(
+ new_username='paul',
+ new_password='laup'
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {
+ 'name': 'paul',
+ 'password': 'laup'
+ }
+ )
+
+ def test_update_cluster_admin_password(self):
+ """Test update cluster admin pass for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/cluster_admins/paul"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.update_cluster_admin_password(
+ username='paul',
+ new_password='laup'
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {'password': 'laup'}
+ )
+
+ def test_delete_cluster_admin(self):
+ """Test delete cluster admin for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.DELETE,
+ "http://localhost:8086/cluster_admins/paul",
+ status_code=200,
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.delete_cluster_admin(username='paul')
+
+ self.assertIsNone(m.last_request.body)
+
+ def test_set_database_admin(self):
+ """Test set database admin for TestInfluxDBClient."""
+ pass
+
+ def test_unset_database_admin(self):
+ """Test unset database admin for TestInfluxDBClient."""
+ pass
+
+ def test_alter_database_admin(self):
+ """Test alter database admin for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/users/paul"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.alter_database_admin(
+ username='paul',
+ is_admin=False
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {
+ 'admin': False
+ }
+ )
+
+ @raises(NotImplementedError)
+ def test_get_list_database_admins(self):
+ """Test get list of database admins for TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.get_list_database_admins()
+
+ @raises(NotImplementedError)
+ def test_add_database_admin(self):
+ """Test add database admins for TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.add_database_admin('admin', 'admin_secret_password')
+
+ @raises(NotImplementedError)
+ def test_update_database_admin_password(self):
+ """Test update database admin pass for TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.update_database_admin_password('admin', 'admin_secret_password')
+
+ @raises(NotImplementedError)
+ def test_delete_database_admin(self):
+ """Test delete database admin for TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.delete_database_admin('admin')
+
+ def test_get_database_users(self):
+ """Test get database users for TestInfluxDBClient."""
+ cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db')
+
+ example_response = \
+ '[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\
+ '{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/db/db/users",
+ text=example_response
+ )
+ users = cli.get_database_users()
+
+ self.assertEqual(json.loads(example_response), users)
+
+ def test_add_database_user(self):
+ """Test add database user for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/users"
+ )
+ cli = InfluxDBClient(database='db')
+ cli.add_database_user(
+ new_username='paul',
+ new_password='laup',
+ permissions=('.*', '.*')
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {
+ 'writeTo': '.*',
+ 'password': 'laup',
+ 'readFrom': '.*',
+ 'name': 'paul'
+ }
+ )
+
+ def test_add_database_user_bad_permissions(self):
+ """Test add database user with bad perms for TestInfluxDBClient."""
+ cli = InfluxDBClient()
+
+ with self.assertRaisesRegexp(
+ Exception,
+ "'permissions' must be \(readFrom, writeTo\) tuple"
+ ):
+ cli.add_database_user(
+ new_password='paul',
+ new_username='paul',
+ permissions=('hello', 'hello', 'hello')
+ )
+
+ def test_alter_database_user_password(self):
+ """Test alter database user pass for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/users/paul"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.alter_database_user(
+ username='paul',
+ password='n3wp4ss!'
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {
+ 'password': 'n3wp4ss!'
+ }
+ )
+
+ def test_alter_database_user_permissions(self):
+ """Test alter database user perms for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/users/paul"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.alter_database_user(
+ username='paul',
+ permissions=('^$', '.*')
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {
+ 'readFrom': '^$',
+ 'writeTo': '.*'
+ }
+ )
+
+ def test_alter_database_user_password_and_permissions(self):
+ """Test alter database user pass and perms for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/db/users/paul"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.alter_database_user(
+ username='paul',
+ password='n3wp4ss!',
+ permissions=('^$', '.*')
+ )
+
+ self.assertDictEqual(
+ json.loads(m.last_request.body),
+ {
+ 'password': 'n3wp4ss!',
+ 'readFrom': '^$',
+ 'writeTo': '.*'
+ }
+ )
+
+ def test_update_database_user_password_current_user(self):
+ """Test update database user pass for TestInfluxDBClient."""
+ cli = InfluxDBClient(
+ username='root',
+ password='hello',
+ database='database'
+ )
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/db/database/users/root"
+ )
+
+ cli.update_database_user_password(
+ username='root',
+ new_password='bye'
+ )
+
+ self.assertEqual(cli._password, 'bye')
+
+ def test_delete_database_user(self):
+ """Test delete database user for TestInfluxDBClient."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.DELETE,
+ "http://localhost:8086/db/db/users/paul"
+ )
+
+ cli = InfluxDBClient(database='db')
+ cli.delete_database_user(username='paul')
+
+ self.assertIsNone(m.last_request.body)
+
+ @raises(NotImplementedError)
+ def test_update_permission(self):
+ """Test update permission for TestInfluxDBClient."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
+ cli.update_permission('admin', [])
+
+ @mock.patch('requests.Session.request')
+ def test_request_retry(self, mock_request):
+ """Test that two connection errors will be handled."""
+ class CustomMock(object):
+ """Define CustomMock object."""
+
+ def __init__(self):
+ self.i = 0
+
+ def connection_error(self, *args, **kwargs):
+ """Test connection error in CustomMock."""
+ self.i += 1
+
+ if self.i < 3:
+ raise requests.exceptions.ConnectionError
+ else:
+ r = requests.Response()
+ r.status_code = 200
+ return r
+
+ mock_request.side_effect = CustomMock().connection_error
+
+ cli = InfluxDBClient(database='db')
+ cli.write_points(
+ self.dummy_points
+ )
+
+ @mock.patch('requests.Session.request')
+ def test_request_retry_raises(self, mock_request):
+ """Test that three connection errors will not be handled."""
+ class CustomMock(object):
+ """Define CustomMock object."""
+
+ def __init__(self):
+ """Initialize the object."""
+ self.i = 0
+
+ def connection_error(self, *args, **kwargs):
+ """Test the connection error for CustomMock."""
+ self.i += 1
+
+ if self.i < 4:
+ raise requests.exceptions.ConnectionError
+ else:
+ r = requests.Response()
+ r.status_code = 200
+ return r
+
+ mock_request.side_effect = CustomMock().connection_error
+
+ cli = InfluxDBClient(database='db')
+
+ with self.assertRaises(requests.exceptions.ConnectionError):
+ cli.write_points(self.dummy_points)
diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py
new file mode 100644
index 00000000..0a766af0
--- /dev/null
+++ b/influxdb/tests/influxdb08/dataframe_client_test.py
@@ -0,0 +1,331 @@
+# -*- coding: utf-8 -*-
+"""Unit tests for misc module."""
+
+from datetime import timedelta
+
+import copy
+import json
+import unittest
+import warnings
+
+import requests_mock
+
+from nose.tools import raises
+
+from influxdb.tests import skip_if_pypy, using_pypy
+
+from .client_test import _mocked_session
+
+if not using_pypy:
+ import pandas as pd
+ from pandas.util.testing import assert_frame_equal
+ from influxdb.influxdb08 import DataFrameClient
+
+
+@skip_if_pypy
+class TestDataFrameClient(unittest.TestCase):
+ """Define the DataFramClient test object."""
+
+ def setUp(self):
+ """Set up an instance of TestDataFrameClient object."""
+ # By default, raise exceptions on warnings
+ warnings.simplefilter('error', FutureWarning)
+
+ def test_write_points_from_dataframe(self):
+ """Test write points from dataframe."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ points = [
+ {
+ "points": [
+ ["1", 1, 1.0, 0],
+ ["2", 2, 2.0, 3600]
+ ],
+ "name": "foo",
+ "columns": ["column_one", "column_two", "column_three", "time"]
+ }
+ ]
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ cli.write_points({"foo": dataframe})
+
+ self.assertListEqual(json.loads(m.last_request.body), points)
+
+ def test_write_points_from_dataframe_with_float_nan(self):
+ """Test write points from dataframe with NaN float."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ points = [
+ {
+ "points": [
+ [1, None, 1.0, 0],
+ [2, 2, 2.0, 3600]
+ ],
+ "name": "foo",
+ "columns": ["column_one", "column_two", "column_three", "time"]
+ }
+ ]
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ cli.write_points({"foo": dataframe})
+
+ self.assertListEqual(json.loads(m.last_request.body), points)
+
+ def test_write_points_from_dataframe_in_batches(self):
+ """Test write points from dataframe in batches."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1))
+
+ def test_write_points_from_dataframe_with_numeric_column_names(self):
+ """Test write points from dataframe with numeric columns."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ # df with numeric column names
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)])
+ points = [
+ {
+ "points": [
+ ["1", 1, 1.0, 0],
+ ["2", 2, 2.0, 3600]
+ ],
+ "name": "foo",
+ "columns": ['0', '1', '2', "time"]
+ }
+ ]
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ cli.write_points({"foo": dataframe})
+
+ self.assertListEqual(json.loads(m.last_request.body), points)
+
+ def test_write_points_from_dataframe_with_period_index(self):
+ """Test write points from dataframe with period index."""
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[pd.Period('1970-01-01'),
+ pd.Period('1970-01-02')],
+ columns=["column_one", "column_two",
+ "column_three"])
+ points = [
+ {
+ "points": [
+ ["1", 1, 1.0, 0],
+ ["2", 2, 2.0, 86400]
+ ],
+ "name": "foo",
+ "columns": ["column_one", "column_two", "column_three", "time"]
+ }
+ ]
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ cli.write_points({"foo": dataframe})
+
+ self.assertListEqual(json.loads(m.last_request.body), points)
+
+ def test_write_points_from_dataframe_with_time_precision(self):
+ """Test write points from dataframe with time precision."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ points = [
+ {
+ "points": [
+ ["1", 1, 1.0, 0],
+ ["2", 2, 2.0, 3600]
+ ],
+ "name": "foo",
+ "columns": ["column_one", "column_two", "column_three", "time"]
+ }
+ ]
+
+ points_ms = copy.deepcopy(points)
+ points_ms[0]["points"][1][-1] = 3600 * 1000
+
+ points_us = copy.deepcopy(points)
+ points_us[0]["points"][1][-1] = 3600 * 1000000
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points({"foo": dataframe}, time_precision='s')
+ self.assertListEqual(json.loads(m.last_request.body), points)
+
+ cli.write_points({"foo": dataframe}, time_precision='m')
+ self.assertListEqual(json.loads(m.last_request.body), points_ms)
+
+ cli.write_points({"foo": dataframe}, time_precision='u')
+ self.assertListEqual(json.loads(m.last_request.body), points_us)
+
+ @raises(TypeError)
+ def test_write_points_from_dataframe_fails_without_time_index(self):
+ """Test write points from dataframe that fails without time index."""
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ columns=["column_one", "column_two",
+ "column_three"])
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ cli.write_points({"foo": dataframe})
+
+ @raises(TypeError)
+ def test_write_points_from_dataframe_fails_with_series(self):
+ """Test failed write points from dataframe with series."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.Series(data=[1.0, 2.0],
+ index=[now, now + timedelta(hours=1)])
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/db/db/series")
+
+ cli = DataFrameClient(database='db')
+ cli.write_points({"foo": dataframe})
+
+ def test_query_into_dataframe(self):
+ """Test query into a dataframe."""
+ data = [
+ {
+ "name": "foo",
+ "columns": ["time", "sequence_number", "column_one"],
+ "points": [
+ [3600, 16, 2], [3600, 15, 1],
+ [0, 14, 2], [0, 13, 1]
+ ]
+ }
+ ]
+ # dataframe sorted ascending by time first, then sequence_number
+ dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]],
+ index=pd.to_datetime([0, 0,
+ 3600, 3600],
+ unit='s', utc=True),
+ columns=['sequence_number', 'column_one'])
+ with _mocked_session('get', 200, data):
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ result = cli.query('select column_one from foo;')
+ assert_frame_equal(dataframe, result)
+
+ def test_query_multiple_time_series(self):
+ """Test query for multiple time series."""
+ data = [
+ {
+ "name": "series1",
+ "columns": ["time", "mean", "min", "max", "stddev"],
+ "points": [[0, 323048, 323048, 323048, 0]]
+ },
+ {
+ "name": "series2",
+ "columns": ["time", "mean", "min", "max", "stddev"],
+ "points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]]
+ },
+ {
+ "name": "series3",
+ "columns": ["time", "mean", "min", "max", "stddev"],
+ "points": [[0, -0.01220, -0.01220, -0.01220, 0]]
+ }
+ ]
+ dataframes = {
+ 'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]],
+ index=pd.to_datetime([0], unit='s',
+ utc=True),
+ columns=['mean', 'min', 'max', 'stddev']),
+ 'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]],
+ index=pd.to_datetime([0], unit='s',
+ utc=True),
+ columns=['mean', 'min', 'max', 'stddev']),
+ 'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]],
+ index=pd.to_datetime([0], unit='s',
+ utc=True),
+ columns=['mean', 'min', 'max', 'stddev'])
+ }
+ with _mocked_session('get', 200, data):
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ result = cli.query("""select mean(value), min(value), max(value),
+ stddev(value) from series1, series2, series3""")
+ self.assertEqual(dataframes.keys(), result.keys())
+ for key in dataframes.keys():
+ assert_frame_equal(dataframes[key], result[key])
+
+ def test_query_with_empty_result(self):
+ """Test query with empty results."""
+ with _mocked_session('get', 200, []):
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ result = cli.query('select column_one from foo;')
+ self.assertEqual(result, [])
+
+ def test_list_series(self):
+ """Test list of series for dataframe object."""
+ response = [
+ {
+ 'columns': ['time', 'name'],
+ 'name': 'list_series_result',
+ 'points': [[0, 'seriesA'], [0, 'seriesB']]
+ }
+ ]
+ with _mocked_session('get', 200, response):
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ series_list = cli.get_list_series()
+ self.assertEqual(series_list, ['seriesA', 'seriesB'])
+
+ def test_datetime_to_epoch(self):
+ """Test convert datetime to epoch."""
+ timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp),
+ 1356998400.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='s'),
+ 1356998400.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='m'),
+ 1356998400000.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='ms'),
+ 1356998400000.0
+ )
+ self.assertEqual(
+ cli._datetime_to_epoch(timestamp, time_precision='u'),
+ 1356998400000000.0
+ )
diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py
new file mode 100644
index 00000000..2e305f3f
--- /dev/null
+++ b/influxdb/tests/influxdb08/helper_test.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+"""Define set of helper functions for the dataframe."""
+
+import unittest
+import warnings
+
+import mock
+from influxdb.influxdb08 import SeriesHelper, InfluxDBClient
+from requests.exceptions import ConnectionError
+
+
+class TestSeriesHelper(unittest.TestCase):
+ """Define the SeriesHelper for test."""
+
+ @classmethod
+ def setUpClass(cls):
+ """Set up an instance of the TestSerisHelper object."""
+ super(TestSeriesHelper, cls).setUpClass()
+
+ TestSeriesHelper.client = InfluxDBClient(
+ 'host',
+ 8086,
+ 'username',
+ 'password',
+ 'database'
+ )
+
+ class MySeriesHelper(SeriesHelper):
+ """Define a subset SeriesHelper instance."""
+
+ class Meta:
+ """Define metadata for the TestSeriesHelper object."""
+
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ bulk_size = 5
+ autocommit = True
+
+ TestSeriesHelper.MySeriesHelper = MySeriesHelper
+
+ def test_auto_commit(self):
+ """Test that write_points called after the right number of events."""
+ class AutoCommitTest(SeriesHelper):
+ """Define an instance of SeriesHelper for AutoCommit test."""
+
+ class Meta:
+ """Define metadata AutoCommitTest object."""
+
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ bulk_size = 5
+ client = InfluxDBClient()
+ autocommit = True
+
+ fake_write_points = mock.MagicMock()
+ AutoCommitTest(server_name='us.east-1', time=159)
+ AutoCommitTest._client.write_points = fake_write_points
+ AutoCommitTest(server_name='us.east-1', time=158)
+ AutoCommitTest(server_name='us.east-1', time=157)
+ AutoCommitTest(server_name='us.east-1', time=156)
+ self.assertFalse(fake_write_points.called)
+ AutoCommitTest(server_name='us.east-1', time=3443)
+ self.assertTrue(fake_write_points.called)
+
+ def testSingleSeriesName(self):
+ """Test JSON conversion when there is only one series name."""
+ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
+ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158)
+ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157)
+ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156)
+ expectation = [{'points': [[159, 'us.east-1'],
+ [158, 'us.east-1'],
+ [157, 'us.east-1'],
+ [156, 'us.east-1']],
+ 'name': 'events.stats.us.east-1',
+ 'columns': ['time', 'server_name']}]
+
+ rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
+ self.assertTrue(all([el in expectation for el in rcvd]) and
+ all([el in rcvd for el in expectation]),
+ 'Invalid JSON body of time series returned from '
+ '_json_body_ for one series name: {0}.'.format(rcvd))
+ TestSeriesHelper.MySeriesHelper._reset_()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper did not empty datapoints.')
+
+ def testSeveralSeriesNames(self):
+ """Test JSON conversion when there is only one series name."""
+ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
+ TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158)
+ TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157)
+ TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156)
+ expectation = [{'points': [[157, 'lu.lux']],
+ 'name': 'events.stats.lu.lux',
+ 'columns': ['time', 'server_name']},
+ {'points': [[156, 'uk.london']],
+ 'name': 'events.stats.uk.london',
+ 'columns': ['time', 'server_name']},
+ {'points': [[158, 'fr.paris-10']],
+ 'name': 'events.stats.fr.paris-10',
+ 'columns': ['time', 'server_name']},
+ {'points': [[159, 'us.east-1']],
+ 'name': 'events.stats.us.east-1',
+ 'columns': ['time', 'server_name']}]
+
+ rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
+ self.assertTrue(all([el in expectation for el in rcvd]) and
+ all([el in rcvd for el in expectation]),
+ 'Invalid JSON body of time series returned from '
+ '_json_body_ for several series names: {0}.'
+ .format(rcvd))
+ TestSeriesHelper.MySeriesHelper._reset_()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper did not empty datapoints.')
+
+ def testInvalidHelpers(self):
+ """Test errors in invalid helpers."""
+ class MissingMeta(SeriesHelper):
+ """Define SeriesHelper object for MissingMeta test."""
+
+ pass
+
+ class MissingClient(SeriesHelper):
+ """Define SeriesHelper object for MissingClient test."""
+
+ class Meta:
+ """Define metadata for MissingClient object."""
+
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ autocommit = True
+
+ class MissingSeriesName(SeriesHelper):
+ """Define SeriesHelper object for MissingSeries test."""
+
+ class Meta:
+ """Define metadata for MissingSeriesName object."""
+
+ fields = ['time', 'server_name']
+
+ class MissingFields(SeriesHelper):
+ """Define SeriesHelper for MissingFields test."""
+
+ class Meta:
+ """Define metadata for MissingFields object."""
+
+ series_name = 'events.stats.{server_name}'
+
+ for cls in [MissingMeta, MissingClient, MissingFields,
+ MissingSeriesName]:
+ self.assertRaises(
+ AttributeError, cls, **{'time': 159,
+ 'server_name': 'us.east-1'})
+
+ def testWarnBulkSizeZero(self):
+ """Test warning for an invalid bulk size."""
+ class WarnBulkSizeZero(SeriesHelper):
+ """Define SeriesHelper for WarnBulkSizeZero test."""
+
+ class Meta:
+ """Define metadata for WarnBulkSizeZero object."""
+
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ bulk_size = 0
+ autocommit = True
+
+ with warnings.catch_warnings(record=True) as rec_warnings:
+ warnings.simplefilter("always")
+ # Server defined in the client is invalid, we're testing
+ # the warning only.
+ with self.assertRaises(ConnectionError):
+ WarnBulkSizeZero(time=159, server_name='us.east-1')
+
+ self.assertGreaterEqual(
+ len(rec_warnings), 1,
+ '{0} call should have generated one warning.'
+ 'Actual generated warnings: {1}'.format(
+ WarnBulkSizeZero, '\n'.join(map(str, rec_warnings))))
+
+ expected_msg = (
+ 'Definition of bulk_size in WarnBulkSizeZero forced to 1, '
+ 'was less than 1.')
+
+ self.assertIn(expected_msg, list(w.message.args[0]
+ for w in rec_warnings),
+ 'Warning message did not contain "forced to 1".')
+
+ def testWarnBulkSizeNoEffect(self):
+ """Test warning for a set bulk size but autocommit False."""
+ class WarnBulkSizeNoEffect(SeriesHelper):
+ """Define SeriesHelper for WarnBulkSizeNoEffect object."""
+
+ class Meta:
+ """Define metadata for WarnBulkSizeNoEffect object."""
+
+ series_name = 'events.stats.{server_name}'
+ fields = ['time', 'server_name']
+ bulk_size = 5
+ autocommit = False
+
+ with warnings.catch_warnings(record=True) as rec_warnings:
+ warnings.simplefilter("always")
+ WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
+
+ self.assertGreaterEqual(
+ len(rec_warnings), 1,
+ '{0} call should have generated one warning.'
+ 'Actual generated warnings: {1}'.format(
+ WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings))))
+
+ expected_msg = (
+ 'Definition of bulk_size in WarnBulkSizeNoEffect has no affect '
+ 'because autocommit is false.')
+
+ self.assertIn(expected_msg, list(w.message.args[0]
+ for w in rec_warnings),
+ 'Warning message did not contain the expected_msg.')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/influxdb/tests/misc.py b/influxdb/tests/misc.py
new file mode 100644
index 00000000..324d13c4
--- /dev/null
+++ b/influxdb/tests/misc.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+"""Define the misc handler for InfluxDBClient test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import socket
+
+
+def get_free_ports(num_ports, ip='127.0.0.1'):
+ """Determine free ports on provided interface.
+
+ Get `num_ports` free/available ports on the interface linked to the `ip`
+ :param int num_ports: The number of free ports to get
+ :param str ip: The ip on which the ports have to be taken
+ :return: a set of ports number
+ """
+ sock_ports = []
+ ports = set()
+ try:
+ for _ in range(num_ports):
+ sock = socket.socket()
+ cur = [sock, -1]
+ # append the socket directly,
+ # so that it'll be also closed (no leaked resource)
+ # in the finally here after.
+ sock_ports.append(cur)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind((ip, 0))
+ cur[1] = sock.getsockname()[1]
+ finally:
+ for sock, port in sock_ports:
+ sock.close()
+ ports.add(port)
+ assert num_ports == len(ports)
+ return ports
+
+
+def is_port_open(port, ip='127.0.0.1'):
+ """Check if given TCP port is open for connection."""
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ result = sock.connect_ex((ip, port))
+ if not result:
+ sock.shutdown(socket.SHUT_RDWR)
+ return result == 0
+ finally:
+ sock.close()
diff --git a/influxdb/tests/resultset_test.py b/influxdb/tests/resultset_test.py
new file mode 100644
index 00000000..83faa4dd
--- /dev/null
+++ b/influxdb/tests/resultset_test.py
@@ -0,0 +1,202 @@
+# -*- coding: utf-8 -*-
+"""Define the resultset test package."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import unittest
+
+from influxdb.exceptions import InfluxDBClientError
+from influxdb.resultset import ResultSet
+
+
+class TestResultSet(unittest.TestCase):
+ """Define the ResultSet test object."""
+
+ def setUp(self):
+ """Set up an instance of TestResultSet."""
+ self.query_response = {
+ "results": [
+ {"series": [{"name": "cpu_load_short",
+ "columns": ["time", "value", "host", "region"],
+ "values": [
+ ["2015-01-29T21:51:28.968422294Z",
+ 0.64,
+ "server01",
+ "us-west"],
+ ["2015-01-29T21:51:28.968422294Z",
+ 0.65,
+ "server02",
+ "us-west"],
+ ]},
+ {"name": "other_series",
+ "columns": ["time", "value", "host", "region"],
+ "values": [
+ ["2015-01-29T21:51:28.968422294Z",
+ 0.66,
+ "server01",
+ "us-west"],
+ ]}]}
+ ]
+ }
+
+ self.rs = ResultSet(self.query_response['results'][0])
+
+ def test_filter_by_name(self):
+ """Test filtering by name in TestResultSet object."""
+ expected = [
+ {'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'value': 0.65,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server02',
+ 'region': 'us-west'},
+ ]
+
+ self.assertEqual(expected, list(self.rs['cpu_load_short']))
+ self.assertEqual(expected,
+ list(self.rs.get_points(
+ measurement='cpu_load_short')))
+
+ def test_filter_by_tags(self):
+ """Test filter by tags in TestResultSet object."""
+ expected = [
+ {'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'value': 0.66,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ ]
+
+ self.assertEqual(
+ expected,
+ list(self.rs[{"host": "server01"}])
+ )
+
+ self.assertEqual(
+ expected,
+ list(self.rs.get_points(tags={'host': 'server01'}))
+ )
+
+ def test_filter_by_name_and_tags(self):
+ """Test filter by name and tags in TestResultSet object."""
+ self.assertEqual(
+ list(self.rs[('cpu_load_short', {"host": "server01"})]),
+ [{'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'}]
+ )
+
+ self.assertEqual(
+ list(self.rs[('cpu_load_short', {"region": "us-west"})]),
+ [
+ {'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'value': 0.65,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server02',
+ 'region': 'us-west'},
+ ]
+ )
+
+ def test_keys(self):
+ """Test keys in TestResultSet object."""
+ self.assertEqual(
+ self.rs.keys(),
+ [
+ ('cpu_load_short', None),
+ ('other_series', None),
+ ]
+ )
+
+ def test_len(self):
+ """Test length in TestResultSet object."""
+ self.assertEqual(
+ len(self.rs),
+ 2
+ )
+
+ def test_items(self):
+ """Test items in TestResultSet object."""
+ items = list(self.rs.items())
+ items_lists = [(item[0], list(item[1])) for item in items]
+
+ self.assertEqual(
+ items_lists,
+ [
+ (
+ ('cpu_load_short', None),
+ [
+ {'time': '2015-01-29T21:51:28.968422294Z',
+ 'value': 0.64,
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'time': '2015-01-29T21:51:28.968422294Z',
+ 'value': 0.65,
+ 'host': 'server02',
+ 'region': 'us-west'}]),
+ (
+ ('other_series', None),
+ [
+ {'time': '2015-01-29T21:51:28.968422294Z',
+ 'value': 0.66,
+ 'host': 'server01',
+ 'region': 'us-west'}])]
+ )
+
+ def test_point_from_cols_vals(self):
+ """Test points from columns in TestResultSet object."""
+ cols = ['col1', 'col2']
+ vals = [1, '2']
+
+ point = ResultSet.point_from_cols_vals(cols, vals)
+ self.assertDictEqual(
+ point,
+ {'col1': 1, 'col2': '2'}
+ )
+
+ def test_system_query(self):
+ """Test system query capabilities in TestResultSet object."""
+ rs = ResultSet(
+ {'series': [
+ {'values': [['another', '48h0m0s', 3, False],
+ ['default', '0', 1, False],
+ ['somename', '24h0m0s', 4, True]],
+ 'columns': ['name', 'duration',
+ 'replicaN', 'default']}]}
+ )
+
+ self.assertEqual(
+ rs.keys(),
+ [('results', None)]
+ )
+
+ self.assertEqual(
+ list(rs['results']),
+ [
+ {'duration': '48h0m0s', 'default': False, 'replicaN': 3,
+ 'name': 'another'},
+ {'duration': '0', 'default': False, 'replicaN': 1,
+ 'name': 'default'},
+ {'duration': '24h0m0s', 'default': True, 'replicaN': 4,
+ 'name': 'somename'}
+ ]
+ )
+
+ def test_resultset_error(self):
+ """Test returning error in TestResultSet object."""
+ with self.assertRaises(InfluxDBClientError):
+ ResultSet({
+ "series": [],
+ "error": "Big error, many problems."
+ })
diff --git a/influxdb/tests/server_tests/__init__.py b/influxdb/tests/server_tests/__init__.py
new file mode 100644
index 00000000..ce149ab4
--- /dev/null
+++ b/influxdb/tests/server_tests/__init__.py
@@ -0,0 +1 @@
+"""Define the server tests package."""
diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py
new file mode 100644
index 00000000..45a9ec80
--- /dev/null
+++ b/influxdb/tests/server_tests/base.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+"""Define the base module for server test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import sys
+
+from influxdb.tests import using_pypy
+from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance
+
+from influxdb.client import InfluxDBClient
+
+if not using_pypy:
+ from influxdb.dataframe_client import DataFrameClient
+
+
+def _setup_influxdb_server(inst):
+ inst.influxd_inst = InfluxDbInstance(
+ inst.influxdb_template_conf,
+ udp_enabled=getattr(inst, 'influxdb_udp_enabled', False),
+ )
+
+ inst.cli = InfluxDBClient('localhost',
+ inst.influxd_inst.http_port,
+ 'root',
+ '',
+ database='db')
+ if not using_pypy:
+ inst.cliDF = DataFrameClient('localhost',
+ inst.influxd_inst.http_port,
+ 'root',
+ '',
+ database='db')
+
+
+def _setup_gzip_client(inst):
+ inst.cli = InfluxDBClient('localhost',
+ inst.influxd_inst.http_port,
+ 'root',
+ '',
+ database='db',
+ gzip=True)
+
+
+def _teardown_influxdb_server(inst):
+ remove_tree = sys.exc_info() == (None, None, None)
+ inst.influxd_inst.close(remove_tree=remove_tree)
+
+
+class SingleTestCaseWithServerMixin(object):
+ """Define the single testcase with server mixin.
+
+ A mixin for unittest.TestCase to start an influxdb server instance
+ in a temporary directory **for each test function/case**
+ """
+
+ # 'influxdb_template_conf' attribute must be set
+ # on the TestCase class or instance.
+
+ @classmethod
+ def setUp(cls):
+ """Set up an instance of the SingleTestCaseWithServerMixin."""
+ _setup_influxdb_server(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
+
+
+class ManyTestCasesWithServerMixin(object):
+ """Define the many testcase with server mixin.
+
+ Same as the SingleTestCaseWithServerMixin but this module creates
+ a single instance for the whole class. Also pre-creates a fresh
+ database: 'db'.
+ """
+
+ # 'influxdb_template_conf' attribute must be set on the class itself !
+
+ @classmethod
+ def setUpClass(cls):
+ """Set up an instance of the ManyTestCasesWithServerMixin."""
+ _setup_influxdb_server(cls)
+
+ def setUp(self):
+ """Set up an instance of the ManyTestCasesWithServerMixin."""
+ self.cli.create_database('db')
+
+ @classmethod
+ def tearDownClass(cls):
+ """Deconstruct an instance of ManyTestCasesWithServerMixin."""
+ _teardown_influxdb_server(cls)
+
+ def tearDown(self):
+ """Deconstruct an instance of ManyTestCasesWithServerMixin."""
+ self.cli.drop_database('db')
+
+
+class SingleTestCaseWithServerGzipMixin(object):
+ """Define the single testcase with server with gzip client mixin.
+
+ Same as the SingleTestCaseWithServerGzipMixin but the InfluxDBClient has
+ gzip=True
+ """
+
+ @classmethod
+ def setUp(cls):
+ """Set up an instance of the SingleTestCaseWithServerGzipMixin."""
+ _setup_influxdb_server(cls)
+ _setup_gzip_client(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
+
+
+class ManyTestCasesWithServerGzipMixin(object):
+ """Define the many testcase with server with gzip client mixin.
+
+ Same as the ManyTestCasesWithServerMixin but the InfluxDBClient has
+ gzip=True.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ """Set up an instance of the ManyTestCasesWithServerGzipMixin."""
+ _setup_influxdb_server(cls)
+ _setup_gzip_client(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py
new file mode 100644
index 00000000..a0263243
--- /dev/null
+++ b/influxdb/tests/server_tests/client_test_with_server.py
@@ -0,0 +1,966 @@
+# -*- coding: utf-8 -*-
+"""Unit tests for checking the InfluxDB server.
+
+The good/expected interaction between:
+
++ the python client.. (obviously)
++ and a *_real_* server instance running.
+
+This basically duplicates what's in client_test.py
+but without mocking around every call.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from functools import partial
+import os
+import time
+import unittest
+import warnings
+
+from influxdb import InfluxDBClient
+from influxdb.exceptions import InfluxDBClientError
+
+from influxdb.tests import skip_if_pypy, using_pypy, skip_server_tests
+from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin
+from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin
+from influxdb.tests.server_tests.base import ManyTestCasesWithServerGzipMixin
+from influxdb.tests.server_tests.base import SingleTestCaseWithServerGzipMixin
+
+# By default, raise exceptions on warnings
+warnings.simplefilter('error', FutureWarning)
+
+if not using_pypy:
+ import pandas as pd
+ from pandas.util.testing import assert_frame_equal
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def point(series_name, timestamp=None, tags=None, **fields):
+ """Define what a point looks like."""
+ res = {'measurement': series_name}
+
+ if timestamp:
+ res['time'] = timestamp
+
+ if tags:
+ res['tags'] = tags
+
+ res['fields'] = fields
+ return res
+
+
+dummy_point = [ # some dummy points
+ {
+ "measurement": "cpu_load_short",
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "time": "2009-11-10T23:00:00Z",
+ "fields": {
+ "value": 0.64
+ }
+ }
+]
+
+dummy_points = [ # some dummy points
+ dummy_point[0],
+ {
+ "measurement": "memory",
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "time": "2009-11-10T23:01:35Z",
+ "fields": {
+ "value": 33.0
+ }
+ }
+]
+
+if not using_pypy:
+ dummy_point_df = {
+ "measurement": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "dataframe": pd.DataFrame(
+ [[0.64]], columns=['value'],
+ index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
+ }
+ dummy_points_df = [{
+ "measurement": "cpu_load_short",
+ "tags": {"host": "server01", "region": "us-west"},
+ "dataframe": pd.DataFrame(
+ [[0.64]], columns=['value'],
+ index=pd.to_datetime(["2009-11-10T23:00:00Z"])),
+ }, {
+ "measurement": "memory",
+ "tags": {"host": "server01", "region": "us-west"},
+ "dataframe": pd.DataFrame(
+ [[33]], columns=['value'],
+ index=pd.to_datetime(["2009-11-10T23:01:35Z"])
+ )
+ }]
+
+
+dummy_point_without_timestamp = [
+ {
+ "measurement": "cpu_load_short",
+ "tags": {
+ "host": "server02",
+ "region": "us-west"
+ },
+ "fields": {
+ "value": 0.64
+ }
+ }
+]
+
+
+@skip_server_tests
+class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase):
+ """Define the class of simple tests."""
+
+ influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
+
+ def test_fresh_server_no_db(self):
+ """Test a fresh server without database."""
+ self.assertEqual([], self.cli.get_list_database())
+
+ def test_create_database(self):
+ """Test create a database."""
+ self.assertIsNone(self.cli.create_database('new_db_1'))
+ self.assertIsNone(self.cli.create_database('new_db_2'))
+ self.assertEqual(
+ self.cli.get_list_database(),
+ [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
+ )
+
+ def test_drop_database(self):
+ """Test drop a database."""
+ self.test_create_database()
+ self.assertIsNone(self.cli.drop_database('new_db_1'))
+ self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database())
+
+ def test_query_fail(self):
+ """Test that a query failed."""
+ with self.assertRaises(InfluxDBClientError) as ctx:
+ self.cli.query('select column_one from foo')
+ self.assertIn('database not found: db',
+ ctx.exception.content)
+
+ def test_query_fail_ignore_errors(self):
+ """Test query failed but ignore errors."""
+ result = self.cli.query('select column_one from foo',
+ raise_errors=False)
+ self.assertEqual(result.error, 'database not found: db')
+
+ def test_create_user(self):
+ """Test create user."""
+ self.cli.create_user('test_user', 'secret_password')
+ rsp = list(self.cli.query("SHOW USERS")['results'])
+ self.assertIn({'user': 'test_user', 'admin': False},
+ rsp)
+
+ def test_create_user_admin(self):
+ """Test create admin user."""
+ self.cli.create_user('test_user', 'secret_password', True)
+ rsp = list(self.cli.query("SHOW USERS")['results'])
+ self.assertIn({'user': 'test_user', 'admin': True},
+ rsp)
+
+ def test_create_user_blank_password(self):
+ """Test create user with a blank pass."""
+ self.cli.create_user('test_user', '')
+ rsp = list(self.cli.query("SHOW USERS")['results'])
+ self.assertIn({'user': 'test_user', 'admin': False},
+ rsp)
+
+ def test_get_list_users_empty(self):
+ """Test get list of users, but empty."""
+ rsp = self.cli.get_list_users()
+ self.assertEqual([], rsp)
+
+ def test_get_list_users(self):
+ """Test get list of users."""
+ self.cli.query("CREATE USER test WITH PASSWORD 'test'")
+ rsp = self.cli.get_list_users()
+
+ self.assertEqual(
+ [{'user': 'test', 'admin': False}],
+ rsp
+ )
+
+ def test_create_user_blank_username(self):
+ """Test create blank username."""
+ with self.assertRaises(InfluxDBClientError) as ctx:
+ self.cli.create_user('', 'secret_password')
+ self.assertIn('username required',
+ ctx.exception.content)
+ rsp = list(self.cli.query("SHOW USERS")['results'])
+ self.assertEqual(rsp, [])
+
+ def test_drop_user(self):
+ """Test drop a user."""
+ self.cli.query("CREATE USER test WITH PASSWORD 'test'")
+ self.cli.drop_user('test')
+ users = list(self.cli.query("SHOW USERS")['results'])
+ self.assertEqual(users, [])
+
+ def test_drop_user_nonexisting(self):
+ """Test dropping a nonexistent user."""
+ with self.assertRaises(InfluxDBClientError) as ctx:
+ self.cli.drop_user('test')
+ self.assertIn('user not found',
+ ctx.exception.content)
+
+ @unittest.skip("Broken as of 0.9.0")
+ def test_revoke_admin_privileges(self):
+ """Test revoking admin privs, deprecated as of v0.9.0."""
+ self.cli.create_user('test', 'test', admin=True)
+ self.assertEqual([{'user': 'test', 'admin': True}],
+ self.cli.get_list_users())
+ self.cli.revoke_admin_privileges('test')
+ self.assertEqual([{'user': 'test', 'admin': False}],
+ self.cli.get_list_users())
+
+ def test_grant_privilege(self):
+ """Test grant privs to user."""
+ self.cli.create_user('test', 'test')
+ self.cli.create_database('testdb')
+ self.cli.grant_privilege('all', 'testdb', 'test')
+ # TODO: when supported by InfluxDB, check if privileges are granted
+
+ def test_grant_privilege_invalid(self):
+ """Test grant invalid privs to user."""
+ self.cli.create_user('test', 'test')
+ self.cli.create_database('testdb')
+ with self.assertRaises(InfluxDBClientError) as ctx:
+ self.cli.grant_privilege('', 'testdb', 'test')
+ self.assertEqual(400, ctx.exception.code)
+ self.assertIn('{"error":"error parsing query: ',
+ ctx.exception.content)
+
+ def test_revoke_privilege(self):
+ """Test revoke privs from user."""
+ self.cli.create_user('test', 'test')
+ self.cli.create_database('testdb')
+ self.cli.revoke_privilege('all', 'testdb', 'test')
+ # TODO: when supported by InfluxDB, check if privileges are revoked
+
+ def test_revoke_privilege_invalid(self):
+ """Test revoke invalid privs from user."""
+ self.cli.create_user('test', 'test')
+ self.cli.create_database('testdb')
+ with self.assertRaises(InfluxDBClientError) as ctx:
+ self.cli.revoke_privilege('', 'testdb', 'test')
+ self.assertEqual(400, ctx.exception.code)
+ self.assertIn('{"error":"error parsing query: ',
+ ctx.exception.content)
+
+ def test_invalid_port_fails(self):
+ """Test invalid port access fails."""
+ with self.assertRaises(ValueError):
+ InfluxDBClient('host', '80/redir', 'username', 'password')
+
+
+@skip_server_tests
+class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase):
+ """Define a class to handle common tests for the server."""
+
+ influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
+
+ def test_write(self):
+ """Test write to the server."""
+ self.assertIs(True, self.cli.write(
+ {'points': dummy_point},
+ params={'db': 'db'},
+ ))
+
+ def test_write_check_read(self):
+ """Test write and check read of data to server."""
+ self.test_write()
+ time.sleep(1)
+ rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db')
+ self.assertListEqual([{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
+ "host": "server01", "region": "us-west"}],
+ list(rsp.get_points()))
+
+ def test_write_points(self):
+ """Test writing points to the server."""
+ self.assertIs(True, self.cli.write_points(dummy_point))
+
+ @skip_if_pypy
+ def test_write_points_DF(self):
+ """Test writing points with dataframe."""
+ self.assertIs(
+ True,
+ self.cliDF.write_points(
+ dummy_point_df['dataframe'],
+ dummy_point_df['measurement'],
+ dummy_point_df['tags']
+ )
+ )
+
+ def test_write_points_check_read(self):
+ """Test writing points and check read back."""
+ self.test_write_points()
+ time.sleep(1) # same as test_write_check_read()
+ rsp = self.cli.query('SELECT * FROM cpu_load_short')
+
+ self.assertEqual(
+ list(rsp),
+ [[
+ {'value': 0.64,
+ 'time': '2009-11-10T23:00:00Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]]
+ )
+
+ rsp2 = list(rsp.get_points())
+ self.assertEqual(len(rsp2), 1)
+ pt = rsp2[0]
+
+ self.assertEqual(
+ pt,
+ {'time': '2009-11-10T23:00:00Z',
+ 'value': 0.64,
+ "host": "server01",
+ "region": "us-west"}
+ )
+
+ @unittest.skip("Broken as of 0.9.0")
+ def test_write_points_check_read_DF(self):
+ """Test write points and check back with dataframe."""
+ self.test_write_points_DF()
+ time.sleep(1) # same as test_write_check_read()
+
+ rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
+ assert_frame_equal(
+ rsp['cpu_load_short'],
+ dummy_point_df['dataframe']
+ )
+
+ # Query with Tags
+ rsp = self.cliDF.query(
+ "SELECT * FROM cpu_load_short GROUP BY *")
+ assert_frame_equal(
+ rsp[('cpu_load_short',
+ (('host', 'server01'), ('region', 'us-west')))],
+ dummy_point_df['dataframe']
+ )
+
+ def test_write_multiple_points_different_series(self):
+ """Test write multiple points to different series."""
+ self.assertIs(True, self.cli.write_points(dummy_points))
+ time.sleep(1)
+ rsp = self.cli.query('SELECT * FROM cpu_load_short')
+ lrsp = list(rsp)
+
+ self.assertEqual(
+ [[
+ {'value': 0.64,
+ 'time': '2009-11-10T23:00:00Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]],
+ lrsp
+ )
+
+ rsp = list(self.cli.query('SELECT * FROM memory'))
+
+ self.assertEqual(
+ rsp,
+ [[
+ {'value': 33,
+ 'time': '2009-11-10T23:01:35Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]]
+ )
+
+ def test_select_into_as_post(self):
+ """Test SELECT INTO is POSTed."""
+ self.assertIs(True, self.cli.write_points(dummy_points))
+ time.sleep(1)
+ rsp = self.cli.query('SELECT * INTO "newmeas" FROM "memory"')
+ rsp = self.cli.query('SELECT * FROM "newmeas"')
+ lrsp = list(rsp)
+
+ self.assertEqual(
+ lrsp,
+ [[
+ {'value': 33,
+ 'time': '2009-11-10T23:01:35Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]]
+ )
+
+ @unittest.skip("Broken as of 0.9.0")
+ def test_write_multiple_points_different_series_DF(self):
+ """Test write multiple points using dataframe to different series."""
+ for i in range(2):
+ self.assertIs(
+ True, self.cliDF.write_points(
+ dummy_points_df[i]['dataframe'],
+ dummy_points_df[i]['measurement'],
+ dummy_points_df[i]['tags']))
+ time.sleep(1)
+ rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
+
+ assert_frame_equal(
+ rsp['cpu_load_short'],
+ dummy_points_df[0]['dataframe']
+ )
+
+ rsp = self.cliDF.query('SELECT * FROM memory')
+ assert_frame_equal(
+ rsp['memory'],
+ dummy_points_df[1]['dataframe']
+ )
+
+ def test_write_points_batch(self):
+ """Test writing points in a batch."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ self.cli.write_points(points=dummy_points,
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ time.sleep(5)
+ net_in = self.cli.query("SELECT value FROM network "
+ "WHERE direction=$dir",
+ bind_params={'dir': 'in'}
+ ).raw
+ net_out = self.cli.query("SELECT value FROM network "
+ "WHERE direction='out'").raw
+ cpu = self.cli.query("SELECT value FROM cpu_usage").raw
+ self.assertIn(123, net_in['series'][0]['values'][0])
+ self.assertIn(12, net_out['series'][0]['values'][0])
+ self.assertIn(12.34, cpu['series'][0]['values'][0])
+
+ def test_write_points_batch_generator(self):
+ """Test writing points in a batch from a generator."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ dummy_points_generator = (point for point in dummy_points)
+ self.cli.write_points(points=dummy_points_generator,
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ time.sleep(5)
+ net_in = self.cli.query("SELECT value FROM network "
+ "WHERE direction=$dir",
+ bind_params={'dir': 'in'}
+ ).raw
+ net_out = self.cli.query("SELECT value FROM network "
+ "WHERE direction='out'").raw
+ cpu = self.cli.query("SELECT value FROM cpu_usage").raw
+ self.assertIn(123, net_in['series'][0]['values'][0])
+ self.assertIn(12, net_out['series'][0]['values'][0])
+ self.assertIn(12.34, cpu['series'][0]['values'][0])
+
+ def test_query(self):
+ """Test querying data back from server."""
+ self.assertIs(True, self.cli.write_points(dummy_point))
+
+ @unittest.skip('Not implemented for 0.9')
+ def test_query_chunked(self):
+ """Test query for chunked response from server."""
+ cli = InfluxDBClient(database='db')
+ example_object = {
+ 'points': [
+ [1415206250119, 40001, 667],
+ [1415206244555, 30001, 7],
+ [1415206228241, 20001, 788],
+ [1415206212980, 10001, 555],
+ [1415197271586, 10001, 23]
+ ],
+ 'name': 'foo',
+ 'columns': [
+ 'time',
+ 'sequence_number',
+ 'val'
+ ]
+ }
+ del cli
+ del example_object
+ # TODO ?
+
+ def test_delete_series_invalid(self):
+ """Test delete invalid series."""
+ with self.assertRaises(InfluxDBClientError):
+ self.cli.delete_series()
+
+ def test_default_retention_policy(self):
+ """Test add default retention policy."""
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'name': 'autogen',
+ 'duration': '0s',
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'default': True}
+ ],
+ rsp
+ )
+
+ def test_create_retention_policy_default(self):
+ """Test create a new default retention policy."""
+ self.cli.create_retention_policy('somename', '1d', 1, default=True)
+ self.cli.create_retention_policy('another', '2d', 1, default=False)
+ rsp = self.cli.get_list_retention_policies()
+
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '24h0m0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'},
+ {'duration': '48h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'24h0m0s',
+ 'name': 'another'}
+ ],
+ rsp
+ )
+
+ def test_create_retention_policy(self):
+ """Test creating a new retention policy, not default."""
+ self.cli.create_retention_policy('somename', '1d', 1)
+ # NB: creating a retention policy without specifying
+ # shard group duration
+ # leads to a shard group duration of 1 hour
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '24h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ self.cli.drop_retention_policy('somename', 'db')
+ # recreate the RP
+ self.cli.create_retention_policy('somename', '1w', 1,
+ shard_duration='1h')
+
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '168h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ self.cli.drop_retention_policy('somename', 'db')
+ # recreate the RP
+ self.cli.create_retention_policy('somename', '1w', 1)
+
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '168h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'24h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ def test_alter_retention_policy(self):
+ """Test alter a retention policy, not default."""
+ self.cli.create_retention_policy('somename', '1d', 1)
+
+ # Test alter duration
+ self.cli.alter_retention_policy('somename', 'db',
+ duration='4d',
+ shard_duration='2h')
+ # NB: altering retention policy doesn't change shard group duration
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'2h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ # Test alter replication
+ self.cli.alter_retention_policy('somename', 'db',
+ replication=4)
+
+ # NB: altering retention policy doesn't change shard group duration
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': False,
+ 'replicaN': 4,
+ 'shardGroupDuration': u'2h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ # Test alter default
+ self.cli.alter_retention_policy('somename', 'db',
+ default=True)
+ # NB: altering retention policy doesn't change shard group duration
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': True,
+ 'replicaN': 4,
+ 'shardGroupDuration': u'2h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ # Test alter shard_duration
+ self.cli.alter_retention_policy('somename', 'db',
+ shard_duration='4h')
+
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': True,
+ 'replicaN': 4,
+ 'shardGroupDuration': u'4h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ def test_alter_retention_policy_invalid(self):
+ """Test invalid alter retention policy."""
+ self.cli.create_retention_policy('somename', '1d', 1)
+ with self.assertRaises(InfluxDBClientError) as ctx:
+ self.cli.alter_retention_policy('somename', 'db')
+ self.assertEqual(400, ctx.exception.code)
+ self.assertIn('{"error":"error parsing query: ',
+ ctx.exception.content)
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '24h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ def test_drop_retention_policy(self):
+ """Test drop a retention policy."""
+ self.cli.create_retention_policy('somename', '1d', 1)
+
+ # Test drop retention
+ self.cli.drop_retention_policy('somename', 'db')
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'}
+ ],
+ rsp
+ )
+
+ def test_create_continuous_query(self):
+ """Test continuous query creation."""
+ self.cli.create_retention_policy('some_rp', '1d', 1)
+ query = 'select count("value") into "some_rp"."events" from ' \
+ '"events" group by time(10m)'
+ self.cli.create_continuous_query('test_cq', query, 'db')
+ cqs = self.cli.get_list_continuous_queries()
+ expected_cqs = [
+ {
+ 'db': [
+ {
+ 'name': 'test_cq',
+ 'query': 'CREATE CONTINUOUS QUERY test_cq ON db '
+ 'BEGIN SELECT count(value) INTO '
+ 'db.some_rp.events FROM db.autogen.events '
+ 'GROUP BY time(10m) END'
+ }
+ ]
+ }
+ ]
+ self.assertEqual(cqs, expected_cqs)
+
+ def test_drop_continuous_query(self):
+ """Test continuous query drop."""
+ self.test_create_continuous_query()
+ self.cli.drop_continuous_query('test_cq', 'db')
+ cqs = self.cli.get_list_continuous_queries()
+ expected_cqs = [{'db': []}]
+ self.assertEqual(cqs, expected_cqs)
+
+ def test_issue_143(self):
+ """Test for PR#143 from repo."""
+ pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z')
+ pts = [
+ pt(value=15),
+ pt(tags={'tag_1': 'value1'}, value=5),
+ pt(tags={'tag_1': 'value2'}, value=10),
+ ]
+ self.cli.write_points(pts)
+ time.sleep(1)
+ rsp = list(self.cli.query('SELECT * FROM a_series_name \
+GROUP BY tag_1').get_points())
+
+ self.assertEqual(
+ [
+ {'time': '2015-03-30T16:16:37Z', 'value': 15},
+ {'time': '2015-03-30T16:16:37Z', 'value': 5},
+ {'time': '2015-03-30T16:16:37Z', 'value': 10}
+ ],
+ rsp
+ )
+
+ # a slightly more complex one with 2 tags values:
+ pt = partial(point, 'series2', timestamp='2015-03-30T16:16:37Z')
+ pts = [
+ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
+ pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5),
+ pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10),
+ ]
+ self.cli.write_points(pts)
+ time.sleep(1)
+ rsp = self.cli.query('SELECT * FROM series2 GROUP BY tag1,tag2')
+
+ self.assertEqual(
+ [
+ {'value': 0, 'time': '2015-03-30T16:16:37Z'},
+ {'value': 5, 'time': '2015-03-30T16:16:37Z'},
+ {'value': 10, 'time': '2015-03-30T16:16:37Z'}
+ ],
+ list(rsp['series2'])
+ )
+
+ all_tag2_equal_v1 = list(rsp.get_points(tags={'tag2': 'v1'}))
+
+ self.assertEqual(
+ [{'value': 0, 'time': '2015-03-30T16:16:37Z'},
+ {'value': 10, 'time': '2015-03-30T16:16:37Z'}],
+ all_tag2_equal_v1,
+ )
+
+ def test_query_multiple_series(self):
+ """Test query for multiple series."""
+ pt = partial(point, 'series1', timestamp='2015-03-30T16:16:37Z')
+ pts = [
+ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
+ ]
+ self.cli.write_points(pts)
+
+ pt = partial(point, 'series2', timestamp='1970-03-30T16:16:37Z')
+ pts = [
+ pt(tags={'tag1': 'value1', 'tag2': 'v1'},
+ value=0, data1=33, data2="bla"),
+ ]
+ self.cli.write_points(pts)
+
+ def test_get_list_series(self):
+ """Test get a list of series from the database."""
+ dummy_points = [
+ {
+ "measurement": "cpu_load_short",
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 0.64
+ }
+ }
+ ]
+
+ dummy_points_2 = [
+ {
+ "measurement": "memory_usage",
+ "tags": {
+ "host": "server02",
+ "region": "us-east"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 80
+ }
+ }
+ ]
+
+ self.cli.write_points(dummy_points)
+ self.cli.write_points(dummy_points_2)
+
+ self.assertEquals(
+ self.cli.get_list_series(),
+ ['cpu_load_short,host=server01,region=us-west',
+ 'memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(measurement='memory_usage'),
+ ['memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(measurement='memory_usage'),
+ ['memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(tags={'host': 'server02'}),
+ ['memory_usage,host=server02,region=us-east'])
+
+ self.assertEquals(
+ self.cli.get_list_series(
+ measurement='cpu_load_short', tags={'host': 'server02'}),
+ [])
+
+
+@skip_server_tests
+class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase):
+ """Define a class to test UDP series."""
+
+ influxdb_udp_enabled = True
+ influxdb_template_conf = os.path.join(THIS_DIR,
+ 'influxdb.conf.template')
+
+ def test_write_points_udp(self):
+ """Test write points UDP."""
+ cli = InfluxDBClient(
+ 'localhost',
+ self.influxd_inst.http_port,
+ 'root',
+ '',
+ database='db',
+ use_udp=True,
+ udp_port=self.influxd_inst.udp_port
+ )
+ cli.write_points(dummy_point)
+
+ # The points are not immediately available after write_points.
+ # This is to be expected because we are using udp (no response !).
+ # So we have to wait some time,
+ time.sleep(3) # 3 sec seems to be a good choice.
+ rsp = self.cli.query('SELECT * FROM cpu_load_short')
+
+ self.assertEqual(
+ # this is dummy_points :
+ [
+ {'value': 0.64,
+ 'time': '2009-11-10T23:00:00Z',
+ "host": "server01",
+ "region": "us-west"}
+ ],
+ list(rsp['cpu_load_short'])
+ )
+
+
+# Run the tests again, but with gzip enabled this time
+@skip_server_tests
+class GzipSimpleTests(SimpleTests, SingleTestCaseWithServerGzipMixin):
+ """Repeat the simple tests with InfluxDBClient where gzip=True."""
+
+ pass
+
+
+@skip_server_tests
+class GzipCommonTests(CommonTests, ManyTestCasesWithServerGzipMixin):
+ """Repeat the common tests with InfluxDBClient where gzip=True."""
+
+ pass
+
+
+@skip_server_tests
+class GzipUdpTests(UdpTests, ManyTestCasesWithServerGzipMixin):
+ """Repeat the UDP tests with InfluxDBClient where gzip=True."""
+
+ pass
diff --git a/influxdb/tests/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template
new file mode 100644
index 00000000..efcff78a
--- /dev/null
+++ b/influxdb/tests/server_tests/influxdb.conf.template
@@ -0,0 +1,29 @@
+bind-address = ":{global_port}"
+
+[meta]
+ dir = "{meta_dir}"
+ hostname = "localhost"
+ bind-address = ":{meta_port}"
+
+[data]
+ dir = "{data_dir}"
+ wal-dir = "{wal_dir}"
+ index-version = "tsi1"
+
+[http]
+ enabled = true
+ bind-address = ":{http_port}"
+ auth-enabled = false
+
+[[udp]]
+ enabled = {udp_enabled}
+ bind-address = ":{udp_port}"
+ database = "db"
+ batch-size = 0
+ batch-timeout = "0"
+
+[monitor]
+ store-enabled = false
+
+[hinted-handoff]
+ dir = "{handoff_dir}"
diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py
new file mode 100644
index 00000000..2dd823ff
--- /dev/null
+++ b/influxdb/tests/server_tests/influxdb_instance.py
@@ -0,0 +1,198 @@
+# -*- coding: utf-8 -*-
+"""Define the test module for an influxdb instance."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import datetime
+import distutils.spawn
+import os
+import tempfile
+import shutil
+import subprocess
+import sys
+import time
+import unittest
+
+from influxdb.tests.misc import is_port_open, get_free_ports
+
+# hack in check_output if it's not defined, like for python 2.6
+if "check_output" not in dir(subprocess):
+ def f(*popenargs, **kwargs):
+ """Check for output."""
+ if 'stdout' in kwargs:
+ raise ValueError(
+ 'stdout argument not allowed, it will be overridden.'
+ )
+ process = subprocess.Popen(stdout=subprocess.PIPE,
+ *popenargs,
+ **kwargs)
+ output, unused_err = process.communicate()
+ retcode = process.poll()
+ if retcode:
+ cmd = kwargs.get("args")
+ if cmd is None:
+ cmd = popenargs[0]
+ raise subprocess.CalledProcessError(retcode, cmd)
+ return output
+ subprocess.check_output = f
+
+
+class InfluxDbInstance(object):
+ """Define an instance of InfluxDB.
+
+ A class to launch of fresh influxdb server instance
+ in a temporary place, using a config file template.
+ """
+
+ def __init__(self, conf_template, udp_enabled=False):
+ """Initialize an instance of InfluxDbInstance."""
+ if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True':
+ raise unittest.SkipTest(
+ "Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)"
+ )
+
+ self.influxd_path = self.find_influxd_path()
+
+ errors = 0
+ while True:
+ try:
+ self._start_server(conf_template, udp_enabled)
+ break
+ # Happens when the ports are already in use.
+ except RuntimeError as e:
+ errors += 1
+ if errors > 2:
+ raise e
+
+ def _start_server(self, conf_template, udp_enabled):
+ # create a temporary dir to store all needed files
+ # for the influxdb server instance :
+ self.temp_dir_base = tempfile.mkdtemp()
+
+ # "temp_dir_base" will be used for conf file and logs,
+ # while "temp_dir_influxdb" is for the databases files/dirs :
+ tempdir = self.temp_dir_influxdb = tempfile.mkdtemp(
+ dir=self.temp_dir_base)
+
+ # find a couple free ports :
+ free_ports = get_free_ports(4)
+ ports = {}
+ for service in 'http', 'global', 'meta', 'udp':
+ ports[service + '_port'] = free_ports.pop()
+ if not udp_enabled:
+ ports['udp_port'] = -1
+
+ conf_data = dict(
+ meta_dir=os.path.join(tempdir, 'meta'),
+ data_dir=os.path.join(tempdir, 'data'),
+ wal_dir=os.path.join(tempdir, 'wal'),
+ cluster_dir=os.path.join(tempdir, 'state'),
+ handoff_dir=os.path.join(tempdir, 'handoff'),
+ logs_file=os.path.join(self.temp_dir_base, 'logs.txt'),
+ udp_enabled='true' if udp_enabled else 'false',
+ )
+ conf_data.update(ports)
+ self.__dict__.update(conf_data)
+
+ conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf')
+ with open(conf_file, "w") as fh:
+ with open(conf_template) as fh_template:
+ fh.write(fh_template.read().format(**conf_data))
+
+ # now start the server instance:
+ self.proc = subprocess.Popen(
+ [self.influxd_path, '-config', conf_file],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE
+ )
+
+ print(
+ "%s > Started influxdb bin in %r with ports %s and %s.." % (
+ datetime.datetime.now(),
+ self.temp_dir_base,
+ self.global_port,
+ self.http_port
+ )
+ )
+
+ # wait for it to listen on the broker and admin ports:
+ # usually a fresh instance is ready in less than 1 sec ..
+ timeout = time.time() + 10 # so 10 secs should be enough,
+ # otherwise either your system load is high,
+ # or you run a 286 @ 1Mhz ?
+ try:
+ while time.time() < timeout:
+ if (is_port_open(self.http_port) and
+ is_port_open(self.global_port)):
+ # it's hard to check if a UDP port is open..
+ if udp_enabled:
+ # so let's just sleep 0.5 sec in this case
+ # to be sure that the server has open the port
+ time.sleep(0.5)
+ break
+ time.sleep(0.5)
+ if self.proc.poll() is not None:
+ raise RuntimeError('influxdb prematurely exited')
+ else:
+ self.proc.terminate()
+ self.proc.wait()
+ raise RuntimeError('Timeout waiting for influxdb to listen'
+ ' on its ports (%s)' % ports)
+ except RuntimeError as err:
+ data = self.get_logs_and_output()
+ data['reason'] = str(err)
+ data['now'] = datetime.datetime.now()
+ raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n"
+ "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r"
+ % data)
+
+ def find_influxd_path(self):
+ """Find the path for InfluxDB."""
+ influxdb_bin_path = os.environ.get(
+ 'INFLUXDB_PYTHON_INFLUXD_PATH',
+ None
+ )
+
+ if influxdb_bin_path is None:
+ influxdb_bin_path = distutils.spawn.find_executable('influxd')
+ if not influxdb_bin_path:
+ try:
+ influxdb_bin_path = subprocess.check_output(
+ ['which', 'influxd']
+ ).strip()
+ except subprocess.CalledProcessError:
+ # fallback on :
+ influxdb_bin_path = '/opt/influxdb/influxd'
+
+ if not os.path.isfile(influxdb_bin_path):
+ raise unittest.SkipTest("Could not find influxd binary")
+
+ version = subprocess.check_output([influxdb_bin_path, 'version'])
+ print("InfluxDB version: %s" % version, file=sys.stderr)
+
+ return influxdb_bin_path
+
+ def get_logs_and_output(self):
+ """Query for logs and output."""
+ proc = self.proc
+ try:
+ with open(self.logs_file) as fh:
+ logs = fh.read()
+ except IOError as err:
+ logs = "Couldn't read logs: %s" % err
+ return {
+ 'rc': proc.returncode,
+ 'out': proc.stdout.read(),
+ 'err': proc.stderr.read(),
+ 'logs': logs
+ }
+
+ def close(self, remove_tree=True):
+ """Close an instance of InfluxDB."""
+ self.proc.terminate()
+ self.proc.wait()
+ if remove_tree:
+ shutil.rmtree(self.temp_dir_base)
diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py
new file mode 100644
index 00000000..5b344990
--- /dev/null
+++ b/influxdb/tests/test_line_protocol.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+"""Define the line protocol test module."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import unittest
+
+from datetime import datetime
+from decimal import Decimal
+
+from pytz import UTC, timezone
+from influxdb import line_protocol
+
+
+class TestLineProtocol(unittest.TestCase):
+ """Define the LineProtocol test object."""
+
+ def test_make_lines(self):
+ """Test make new lines in TestLineProtocol object."""
+ data = {
+ "tags": {
+ "empty_tag": "",
+ "none_tag": None,
+ "backslash_tag": "C:\\",
+ "integer_tag": 2,
+ "string_tag": "hello"
+ },
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "string_val": "hello!",
+ "int_val": 1,
+ "float_val": 1.1,
+ "none_field": None,
+ "bool_val": True,
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test,backslash_tag=C:\\\\,integer_tag=2,string_tag=hello '
+ 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
+ )
+
+ def test_timezone(self):
+ """Test timezone in TestLineProtocol object."""
+ dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
+ utc = UTC.localize(dt)
+ berlin = timezone('Europe/Berlin').localize(dt)
+ eastern = berlin.astimezone(timezone('US/Eastern'))
+ data = {
+ "points": [
+ {"measurement": "A", "fields": {"val": 1},
+ "time": 0},
+ {"measurement": "A", "fields": {"val": 1},
+ "time": "2009-11-10T23:00:00.123456Z"},
+ {"measurement": "A", "fields": {"val": 1}, "time": dt},
+ {"measurement": "A", "fields": {"val": 1}, "time": utc},
+ {"measurement": "A", "fields": {"val": 1}, "time": berlin},
+ {"measurement": "A", "fields": {"val": 1}, "time": eastern},
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ '\n'.join([
+ 'A val=1i 0',
+ 'A val=1i 1257894000123456000',
+ 'A val=1i 1257894000123456000',
+ 'A val=1i 1257894000123456000',
+ 'A val=1i 1257890400123456000',
+ 'A val=1i 1257890400123456000',
+ ]) + '\n'
+ )
+
+ def test_string_val_newline(self):
+ """Test string value with newline in TestLineProtocol object."""
+ data = {
+ "points": [
+ {
+ "measurement": "m1",
+ "fields": {
+ "multi_line": "line1\nline1\nline3"
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'm1 multi_line="line1\\nline1\\nline3"\n'
+ )
+
+ def test_make_lines_unicode(self):
+ """Test make unicode lines in TestLineProtocol object."""
+ data = {
+ "tags": {
+ "unicode_tag": "\'Привет!\'" # Hello! in Russian
+ },
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "unicode_val": "Привет!", # Hello! in Russian
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
+ )
+
+ def test_make_lines_empty_field_string(self):
+ """Test make lines with an empty string field."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "string": "",
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test string=""\n'
+ )
+
+ def test_tag_value_newline(self):
+ """Test make lines with tag value contains newline."""
+ data = {
+ "tags": {
+ "t1": "line1\nline2"
+ },
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "val": "hello"
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test,t1=line1\\nline2 val="hello"\n'
+ )
+
+ def test_quote_ident(self):
+ """Test quote indentation in TestLineProtocol object."""
+ self.assertEqual(
+ line_protocol.quote_ident(r"""\foo ' bar " Örf"""),
+ r'''"\\foo ' bar \" Örf"'''
+ )
+
+ def test_quote_literal(self):
+ """Test quote literal in TestLineProtocol object."""
+ self.assertEqual(
+ line_protocol.quote_literal(r"""\foo ' bar " Örf"""),
+ r"""'\\foo \' bar " Örf'"""
+ )
+
+ def test_float_with_long_decimal_fraction(self):
+ """Ensure precision is preserved when casting floats into strings."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "float_val": 1.0000000000000009,
+ }
+ }
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test float_val=1.0000000000000009\n'
+ )
+
+ def test_float_with_long_decimal_fraction_as_type_decimal(self):
+ """Ensure precision is preserved when casting Decimal into strings."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "float_val": Decimal(0.8289445733333332),
+ }
+ }
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test float_val=0.8289445733333332\n'
+ )
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000..308aa62d
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,8 @@
+[mypy]
+ignore_missing_imports = True
+warn_unused_ignores = True
+warn_unused_configs = True
+warn_redundant_casts = True
+warn_no_return = True
+no_implicit_optional = True
+strict_equality = True
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..1b68d94e
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
\ No newline at end of file
diff --git a/release.sh b/release.sh
old mode 100644
new mode 100755
index d0337897..d94e16fd
--- a/release.sh
+++ b/release.sh
@@ -1,3 +1,4 @@
#!/usr/bin/env bash
-python setup.py sdist upload
+python setup.py sdist bdist_wheel
+twine upload dist/*
diff --git a/requirements.txt b/requirements.txt
index c7d0ccea..a3df3154 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,5 @@
-six
-requests
+python-dateutil>=2.6.0
+pytz>=2016.10
+requests>=2.17.0
+six>=1.10.0
+msgpack>=0.5.0
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 00000000..217d437b
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[bdist_rpm]
+requires=python-dateutil
+
+[wheel]
+universal = 1
diff --git a/setup.py b/setup.py
index edc09303..8ac7d1a7 100755
--- a/setup.py
+++ b/setup.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+"""Define the setup options."""
try:
import distribute_setup
@@ -16,24 +17,48 @@
import re
-with open(os.path.join(os.path.dirname(__file__),
- 'influxdb',
- '__init__.py')) as f:
- version = re.search("__version__ = '([^']+)'", f.read()).group(1)
+with open(os.path.join(os.path.dirname(__file__), 'influxdb', '__init__.py')) as f:
+ version = re.search("__version__ = '([^']+)'", f.read()).group(1)
with open('requirements.txt', 'r') as f:
requires = [x.strip() for x in f if x.strip()]
+# Debugging: Print the requires values
+print("install_requires values:")
+for req in requires:
+ print(f"- {req}")
+
with open('test-requirements.txt', 'r') as f:
test_requires = [x.strip() for x in f if x.strip()]
+with open('README.rst', 'r') as f:
+ readme = f.read()
+
+
setup(
name='influxdb',
version=version,
- description="influxdb client",
+ description="InfluxDB client",
+ long_description=readme,
+ url='https://github.com/influxdb/influxdb-python',
+ license='MIT License',
packages=find_packages(exclude=['tests']),
test_suite='tests',
tests_require=test_requires,
install_requires=requires,
extras_require={'test': test_requires},
+ classifiers=[
+ 'Development Status :: 3 - Alpha',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
)
diff --git a/test-requirements.txt b/test-requirements.txt
index fac3bb36..9b31f5f1 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,3 +1,4 @@
-requests
nose
+nose-cov
mock
+requests-mock
diff --git a/test.sh b/test.sh
new file mode 100755
index 00000000..96d619fa
--- /dev/null
+++ b/test.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+if ! which tox; then
+ echo "Please install tox using `pip install tox`"
+ exit 1
+fi
+
+tox
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index 40a96afc..00000000
--- a/tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-
diff --git a/tests/influxdb/__init__.py b/tests/influxdb/__init__.py
deleted file mode 100644
index 40a96afc..00000000
--- a/tests/influxdb/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-
diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py
deleted file mode 100644
index cfc5fb61..00000000
--- a/tests/influxdb/client_test.py
+++ /dev/null
@@ -1,227 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-unit tests
-"""
-import requests
-from nose.tools import raises
-from mock import patch
-
-from influxdb import InfluxDBClient
-from influxdb.client import session
-
-
-def _build_response_object(status_code=200, content=""):
- resp = requests.Response()
- resp.status_code = status_code
- resp._content = content
- return resp
-
-
-class TestInfluxDBClient(object):
- def test_switch_db(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
- cli.switch_db('another_database')
- assert cli._database == 'another_database'
-
- def test_switch_user(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
- cli.switch_user('another_username', 'another_password')
- assert cli._username == 'another_username'
- assert cli._password == 'another_password'
-
- def test_write_points(self):
- data = [
- {
- "points": [
- ["1", 1, 1.0],
- ["2", 2, 2.0]
- ],
- "name": "foo",
- "columns": ["column_one", "column_two", "column_three"]
- }
- ]
-
- with patch.object(session, 'post') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=200)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- assert cli.write_points(data) is True
-
- @raises(Exception)
- def test_write_points_fails(self):
- with patch.object(session, 'post') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=500)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.write_points([])
-
- def test_write_points_with_precision(self):
- data = [
- {
- "points": [
- ["1", 1, 1.0],
- ["2", 2, 2.0]
- ],
- "name": "foo",
- "columns": ["column_one", "column_two", "column_three"]
- }
- ]
-
- with patch.object(session, 'post') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=200)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- assert cli.write_points_with_precision(data) is True
-
- @raises(Exception)
- def test_write_points_with_precision_fails(self):
- with patch.object(session, 'post') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=500)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.write_points_with_precision([])
-
- @raises(NotImplementedError)
- def test_delete_points(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.delete_points([])
-
- @raises(NotImplementedError)
- def test_create_scheduled_delete(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.create_scheduled_delete([])
-
- @raises(NotImplementedError)
- def test_get_list_scheduled_delete(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.get_list_scheduled_delete()
-
- @raises(NotImplementedError)
- def test_remove_scheduled_delete(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.remove_scheduled_delete(1)
-
- def test_query(self):
- expected = ('[{"name":"foo",'
- '"columns":["time","sequence_number","column_one"],'
- '"points":[[1383876043,16,"2"],[1383876043,15,"1"],'
- '[1383876035,14,"2"],[1383876035,13,"1"]]}]')
- with patch.object(session, 'get') as mocked_get:
- mocked_get.return_value = _build_response_object(
- status_code=200,
- content=expected)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- result = cli.query('select column_one from foo;')
- assert len(result[0]['points']) == 4
-
- @raises(Exception)
- def test_query_fail(self):
- with patch.object(session, 'get') as mocked_get:
- mocked_get.return_value = _build_response_object(status_code=401)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.query('select column_one from foo;')
-
- def test_create_database(self):
- with patch.object(session, 'post') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=201)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- assert cli.create_database('new_db') is True
-
- @raises(Exception)
- def test_creata_database_fails(self):
- with patch.object(session, 'post') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=401)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.create_database('new_db')
-
- def test_delete_database(self):
- with patch.object(session, 'delete') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=204)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- assert cli.delete_database('old_db') is True
-
- @raises(Exception)
- def test_delete_database_fails(self):
- with patch.object(session, 'delete') as mocked_post:
- mocked_post.return_value = _build_response_object(status_code=401)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.delete_database('old_db')
-
- def test_get_database_list(self):
- with patch.object(session, 'get') as mocked_get:
- mocked_get.return_value = _build_response_object(
- status_code=200, content='[{"name": "a_db"}]')
- cli = InfluxDBClient('host', 8086, 'username', 'password')
- assert len(cli.get_database_list()) == 1
- assert cli.get_database_list()[0]['name'] == 'a_db'
-
- @raises(Exception)
- def test_get_database_list_fails(self):
- with patch.object(session, 'get') as mocked_get:
- mocked_get.return_value = _build_response_object(status_code=401)
- cli = InfluxDBClient('host', 8086, 'username', 'password')
- cli.get_database_list()
-
- def test_delete_series(self):
- with patch.object(session, 'delete') as mocked_delete:
- mocked_delete.return_value = _build_response_object(status_code=204)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.delete_series('old_series')
-
- @raises(Exception)
- def test_delete_series_fails(self):
- with patch.object(session, 'delete') as mocked_delete:
- mocked_delete.return_value = _build_response_object(status_code=401)
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.delete_series('old_series')
-
- def test_get_list_cluster_admins(self):
- pass
-
- def test_add_cluster_admin(self):
- pass
-
- def test_update_cluster_admin_password(self):
- pass
-
- def test_delete_cluster_admin(self):
- pass
-
- def test_set_database_admin(self):
- pass
-
- def test_unset_database_admin(self):
- pass
-
- @raises(NotImplementedError)
- def test_get_list_database_admins(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.get_list_database_admins()
-
- @raises(NotImplementedError)
- def test_add_database_admin(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.add_database_admin('admin', 'admin_secret_password')
-
- @raises(NotImplementedError)
- def test_update_database_admin_password(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.update_database_admin_password('admin', 'admin_secret_password')
-
- @raises(NotImplementedError)
- def test_delete_database_admin(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.delete_database_admin('admin')
-
- def test_get_database_user(self):
- pass
-
- def test_add_database_user(self):
- pass
-
- def test_update_database_user_password(self):
- pass
-
- def test_delete_database_user(self):
- pass
-
- @raises(NotImplementedError)
- def test_update_permission(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- cli.update_permission('admin', [])
diff --git a/tox.ini b/tox.ini
index f5128281..a1005abb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,14 +1,59 @@
[tox]
-envlist = py33, py27, flake8
+envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs, mypy
[testenv]
-commands =
- pip install -r test-requirements.txt
- nosetests
+passenv = INFLUXDB_PYTHON_INFLUXD_PATH
+setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False
+deps = -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+ py27: pandas==0.21.1
+ py27: numpy==1.13.3
+ py35: pandas==0.22.0
+ py35: numpy==1.14.6
+ py36: pandas==0.23.4
+ py36: numpy==1.15.4
+ py37: pandas>=0.24.2
+ py37: numpy>=1.16.2
+# Only install pandas with non-pypy interpreters
+# Testing all combinations would be too expensive
+commands = nosetests -v --with-doctest {posargs}
[testenv:flake8]
deps =
flake8
pep8-naming
-
commands = flake8 influxdb
+
+[testenv:pep257]
+deps = pydocstyle
+commands = pydocstyle --count -ve examples influxdb
+
+[testenv:coverage]
+deps = -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+ pandas==0.24.2
+ coverage
+ numpy
+commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb
+
+[testenv:docs]
+deps = -r{toxinidir}/requirements.txt
+ pandas>=0.24.2
+ numpy>=1.16.2
+ Sphinx>=1.8.5
+ sphinx_rtd_theme
+commands = sphinx-build -b html docs/source docs/build
+
+[testenv:mypy]
+deps = -r{toxinidir}/test-requirements.txt
+ mypy==0.720
+commands = mypy --config-file mypy.ini -p influxdb
+
+[flake8]
+ignore = W503,W504,W605,N802,F821,E402
+# W503: Line break occurred before a binary operator
+# W504: Line break occurred after a binary operator
+# W605: invalid escape sequence
+# N802: nosetests's setUp function
+# F821: False positive in intluxdb/dataframe_client.py
+# E402: module level import not at top of file
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy