Skip to content

Commit ff86c4f

Browse files
aviauaviau
authored andcommitted
Merge pull request influxdata#180 from kespindler/ks-08-docs
improve docs for 0.8
2 parents dee737d + 85f5efd commit ff86c4f

13 files changed

+202
-186
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ env:
1212
install:
1313
- sudo pip install tox
1414
- sudo pip install coveralls
15-
- wget http://get.influxdb.org/influxdb_0.9.0-rc30_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc30_amd64.deb
15+
- wget http://get.influxdb.org/influxdb_0.9.0_amd64.deb && sudo dpkg -i influxdb_0.9.0_amd64.deb
1616
script:
1717
- travis_wait 30 tox -e $TOX_ENV
1818
after_success:

README.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ Here's a basic example (for more see the examples directory)::
104104

105105
>>> json_body = [
106106
{
107-
"name": "cpu_load_short",
107+
"measurement": "cpu_load_short",
108108
"tags": {
109109
"host": "server01",
110110
"region": "us-west"

influxdb/_dataframe_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None):
137137
dataframe = dataframe.astype('object')
138138

139139
points = [
140-
{'name': measurement,
140+
{'measurement': measurement,
141141
'tags': tags if tags else {},
142142
'fields': rec,
143143
'time': ts.isoformat()

influxdb/helper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def _json_body_(cls):
137137
for series_name, data in six.iteritems(cls._datapoints):
138138
for point in data:
139139
json_point = {
140-
"name": series_name,
140+
"measurement": series_name,
141141
"fields": {},
142142
"tags": {},
143143
}

influxdb/influxdb08/client.py

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -272,9 +272,23 @@ def write(self, data):
272272

273273
def write_points(self, data, time_precision='s', *args, **kwargs):
274274
"""
275-
Write to multiple time series names.
276-
277-
:param data: A list of dicts.
275+
Write to multiple time series names. An example data blob is:
276+
277+
data = [
278+
{
279+
"points": [
280+
[
281+
12
282+
]
283+
],
284+
"name": "cpu_load_short",
285+
"columns": [
286+
"value"
287+
]
288+
}
289+
]
290+
291+
:param data: A list of dicts in InfluxDB 0.8.x data format.
278292
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
279293
or 'u'.
280294
:param batch_size: [Optional] Value to write the points in batches

influxdb/resultset.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def __getitem__(self, key):
5050
raise TypeError('serie_name must be an str or None')
5151

5252
for serie in self._get_series():
53-
serie_name = serie.get('name', 'results')
53+
serie_name = serie.get('measurement', serie.get('name', 'results'))
5454
if serie_name is None:
5555
# this is a "system" query or a query which
5656
# doesn't return a name attribute.
@@ -119,7 +119,9 @@ def keys(self):
119119
keys = []
120120
for serie in self._get_series():
121121
keys.append(
122-
(serie.get('name', 'results'), serie.get('tags', None))
122+
(serie.get('measurement',
123+
serie.get('name', 'results')),
124+
serie.get('tags', None))
123125
)
124126
return keys
125127

@@ -129,7 +131,9 @@ def items(self):
129131
"""
130132
items = []
131133
for serie in self._get_series():
132-
serie_key = (serie.get('name', 'results'), serie.get('tags', None))
134+
serie_key = (serie.get('measurement',
135+
serie.get('name', 'results')),
136+
serie.get('tags', None))
133137
items.append(
134138
(serie_key, self[serie_key])
135139
)

tests/influxdb/chunked_json_test.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,10 @@ def test_load(self):
1616
Tests reading a sequence of JSON values from a string
1717
"""
1818
example_response = \
19-
'{"results": [{"series": [{"name": "sdfsdfsdf", ' \
19+
'{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
2020
'"columns": ["time", "value"], "values": ' \
2121
'[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
22-
'[{"name": "cpu_load_short", "columns": ["time", "value"], ' \
22+
'[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' \
2323
'"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
2424

2525
res = list(chunked_json.loads(example_response))
@@ -32,12 +32,12 @@ def test_load(self):
3232
'results': [
3333
{'series': [{
3434
'values': [['2009-11-10T23:00:00Z', 0.64]],
35-
'name': 'sdfsdfsdf',
35+
'measurement': 'sdfsdfsdf',
3636
'columns':
3737
['time', 'value']}]},
3838
{'series': [{
3939
'values': [['2009-11-10T23:00:00Z', 0.64]],
40-
'name': 'cpu_load_short',
40+
'measurement': 'cpu_load_short',
4141
'columns': ['time', 'value']}]}
4242
]
4343
}

tests/influxdb/client_test.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ def setUp(self):
8383
self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
8484
self.dummy_points = [
8585
{
86-
"name": "cpu_load_short",
86+
"measurement": "cpu_load_short",
8787
"tags": {
8888
"host": "server01",
8989
"region": "us-west"
@@ -146,7 +146,7 @@ def test_write(self):
146146
cli.write(
147147
{"database": "mydb",
148148
"retentionPolicy": "mypolicy",
149-
"points": [{"name": "cpu_load_short",
149+
"points": [{"measurement": "cpu_load_short",
150150
"tags": {"host": "server01",
151151
"region": "us-west"},
152152
"timestamp": "2009-11-10T23:00:00Z",
@@ -157,7 +157,7 @@ def test_write(self):
157157
json.loads(m.last_request.body),
158158
{"database": "mydb",
159159
"retentionPolicy": "mypolicy",
160-
"points": [{"name": "cpu_load_short",
160+
"points": [{"measurement": "cpu_load_short",
161161
"tags": {"host": "server01",
162162
"region": "us-west"},
163163
"timestamp": "2009-11-10T23:00:00Z",
@@ -211,17 +211,17 @@ def test_write_points_toplevel_attributes(self):
211211

212212
def test_write_points_batch(self):
213213
dummy_points = [
214-
{"name": "cpu_usage", "tags": {"unit": "percent"},
214+
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
215215
"timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
216-
{"name": "network", "tags": {"direction": "in"},
216+
{"measurement": "network", "tags": {"direction": "in"},
217217
"timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
218-
{"name": "network", "tags": {"direction": "out"},
218+
{"measurement": "network", "tags": {"direction": "out"},
219219
"timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
220220
]
221221
expected_last_body = {"tags": {"host": "server01",
222222
"region": "us-west"},
223223
"database": "db",
224-
"points": [{"name": "network",
224+
"points": [{"measurement": "network",
225225
"tags": {"direction": "out"},
226226
"timestamp": "2009-11-10T23:00:00Z",
227227
"fields": {"value": 12.00}}]}
@@ -322,10 +322,10 @@ def test_write_points_with_precision_fails(self):
322322

323323
def test_query(self):
324324
example_response = \
325-
'{"results": [{"series": [{"name": "sdfsdfsdf", ' \
325+
'{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
326326
'"columns": ["time", "value"], "values": ' \
327327
'[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
328-
'[{"name": "cpu_load_short", "columns": ["time", "value"], ' \
328+
'[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' \
329329
'"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
330330

331331
with requests_mock.Mocker() as m:
@@ -352,7 +352,7 @@ def test_query_chunked(self):
352352
[1415206212980, 10001, 555],
353353
[1415197271586, 10001, 23]
354354
],
355-
'name': 'foo',
355+
'measurement': 'foo',
356356
'columns': [
357357
'time',
358358
'sequence_number',

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy