From c92bb4e4a5684611dc65b53c5791fae4bce5d94b Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Feb 2015 11:37:12 -0500 Subject: [PATCH 001/536] Moving towards v0.9.0: removed deprecated endpoints, fixed write_points, and more... --- influxdb/client.py | 615 ++++------------------------------ tests/influxdb/client_test.py | 403 +++++----------------- 2 files changed, 151 insertions(+), 867 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 6fcae994..4e4ffcbf 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -18,7 +18,7 @@ class InfluxDBClientError(Exception): - "Raised when an error occurs in the request" + """Raised when an error occurs in the request""" def __init__(self, content, code): super(InfluxDBClientError, self).__init__( "{0}: {1}".format(code, content)) @@ -99,7 +99,13 @@ def __init__(self, 'Content-type': 'application/json', 'Accept': 'text/plain'} - # Change member variables + @staticmethod + def _get_values_from_query_response(response): + """Returns a list of values from a query response""" + values = [ + value[0] for value in response['results'][0]['rows'][0]['values'] + ] + return values def switch_database(self, database): """ @@ -112,18 +118,6 @@ def switch_database(self, database): """ self._database = database - def switch_db(self, database): - """ - DEPRECATED. Change client database. - - """ - warnings.warn( - "switch_db is deprecated, and will be removed " - "in future versions. Please use " - "``InfluxDBClient.switch_database(database)`` instead.", - FutureWarning) - return self.switch_database(database) - def switch_user(self, username, password): """ switch_user() @@ -173,173 +167,98 @@ def request(self, url, method='GET', params=None, data=None, else: raise InfluxDBClientError(response.content, response.status_code) - def write(self, data): - """ Provided as convenience for influxdb v0.9.0, this may change. """ + def write(self, data, params=None, expected_response_code=200): + """ Write to influxdb """ self.request( url="write", method='POST', - params=None, + params=params, data=data, - expected_response_code=200 + expected_response_code=expected_response_code ) return True - # Writing Data - # - # Assuming you have a database named foo_production you can write data - # by doing a POST to /db/foo_production/series?u=some_user&p=some_password - # with a JSON body of points. - - def write_points(self, data, time_precision='s', *args, **kwargs): + def query(self, + query, + params={}, + expected_response_code=200, + database=None): """ - Write to multiple time series names. + Query data - :param data: A list of dicts. - :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' - or 'u'. - :param batch_size: [Optional] Value to write the points in batches - instead of all at one time. Useful for when doing data dumps from - one database to another or when doing a massive write operation - :type batch_size: int + :param params: Additional parameters to be passed to requests. + :param database: Database to query, default to None. + :param expected_response_code: Expected response code. Defaults to 200. """ - def list_chunks(l, n): - """ Yield successive n-sized chunks from l. - """ - for i in xrange(0, len(l), n): - yield l[i:i + n] - - batch_size = kwargs.get('batch_size') - if batch_size: - for item in data: - name = item.get('name') - columns = item.get('columns') - point_list = item.get('points') - - for batch in list_chunks(point_list, batch_size): - item = [{ - "points": batch, - "name": name, - "columns": columns - }] - self._write_points( - data=item, - time_precision=time_precision) - - return True - - return self._write_points(data=data, time_precision=time_precision) - - def write_points_with_precision(self, data, time_precision='s'): - """ - DEPRECATED. Write to multiple time series names + params['q'] = query + if database: + params['db'] = database + + response = self.request( + url="query", + method='GET', + params=params, + data=None, + expected_response_code=expected_response_code + ) + return response.json() + def write_points(self, + points, + time_precision=None, + database=None, + retention_policy=None, + *args, + **kwargs): """ - warnings.warn( - "write_points_with_precision is deprecated, and will be removed " - "in future versions. Please use " - "``InfluxDBClient.write_points(time_precision='..')`` instead.", - FutureWarning) - return self._write_points(data=data, time_precision=time_precision) - - def _write_points(self, data, time_precision): - if time_precision not in ['s', 'm', 'ms', 'u']: + Write to multiple time series names. + + :param points: A list of dicts. + :param time_precision: [Optional, default None] Either 's', 'm', 'ms' + or 'u'. + :param database The database to write the points to. Defaults to + the client's current db. + :param retention_policy The retention policy for the points. + """ + #TODO: re-implement chunks. + return self._write_points(points=points, + time_precision=time_precision, + database=database, + retention_policy=retention_policy) + + def _write_points(self, points, time_precision, database, retention_policy): + if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise Exception( - "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + "Invalid time precision is given. (use 'n', 'u', 'ms', 's', 'm' or 'h')") - if self.use_udp and time_precision != 's': + if self.use_udp and (time_precision != 's' or None): raise Exception( "InfluxDB only supports seconds precision for udp writes" ) - url = "db/{0}/series".format(self._database) - - params = { - 'time_precision': time_precision + data = { + 'points': points } + if time_precision: + data['precision'] = time_precision + + if retention_policy: + data['retentionPolicy'] = retention_policy + + data['database'] = database or self._database + if self.use_udp: self.send_packet(data) else: - self.request( - url=url, - method='POST', - params=params, + self.write( data=data, expected_response_code=200 ) return True - # One Time Deletes - - def delete_points(self, name): - """ - Delete an entire series - """ - url = "db/{0}/series/{1}".format(self._database, name) - - self.request( - url=url, - method='DELETE', - expected_response_code=204 - ) - - return True - - # Regularly Scheduled Deletes - - def create_scheduled_delete(self, json_body): - """ - TODO: Create scheduled delete - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - # get list of deletes - # curl http://localhost:8086/db/site_dev/scheduled_deletes - # - # remove a regularly scheduled delete - # curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id - - def get_list_scheduled_delete(self): - """ - TODO: Get list of scheduled deletes - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - def remove_scheduled_delete(self, delete_id): - """ - TODO: Remove scheduled delete - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - def query(self, query, time_precision='s', chunked=False): - """ - Quering data - - :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' - or 'u'. - :param chunked: [Optional, default=False] True if the data shall be - retrieved in chunks, False otherwise. - """ - return self._query(query, time_precision=time_precision, - chunked=chunked) - - # Querying Data - # - # GET db/:name/series. It takes five parameters def _query(self, query, time_precision='s', chunked=False): if time_precision not in ['s', 'm', 'ms', 'u']: raise Exception( @@ -371,404 +290,14 @@ def _query(self, query, time_precision='s', chunked=False): else: return response.json() - # Creating and Dropping Databases - # - # ### create a database - # curl -X POST http://localhost:8086/db -d '{"name": "site_development"}' - # - # ### drop a database - # curl -X DELETE http://localhost:8086/db/site_development - - def create_database(self, database): - """ - create_database() - - Create a database on the InfluxDB server. - - :param database: the name of the database to create - :type database: string - :rtype: boolean - """ - url = "db" - - data = {'name': database} - - self.request( - url=url, - method='POST', - data=data, - expected_response_code=201 - ) - - return True - - def delete_database(self, database): - """ - delete_database() - - Drop a database on the InfluxDB server. - - :param database: the name of the database to delete - :type database: string - :rtype: boolean - """ - url = "db/{0}".format(database) - - self.request( - url=url, - method='DELETE', - expected_response_code=204 - ) - - return True - - # ### get list of databases - # curl -X GET http://localhost:8086/db - def get_list_database(self): """ Get the list of databases """ - url = "db" - - response = self.request( - url=url, - method='GET', - expected_response_code=200 - ) - - return response.json() - - def get_database_list(self): - """ - DEPRECATED. Get the list of databases - - """ - warnings.warn( - "get_database_list is deprecated, and will be removed " - "in future versions. Please use " - "``InfluxDBClient.get_list_database`` instead.", - FutureWarning) - return self.get_list_database() - - def delete_series(self, series): - """ - delete_series() - - Drop a series on the InfluxDB server. - - :param series: the name of the series to delete - :type series: string - :rtype: boolean - """ - url = "db/{0}/series/{1}".format( - self._database, - series + return self._get_values_from_query_response( + self.query("SHOW DATABASES") ) - self.request( - url=url, - method='DELETE', - expected_response_code=204 - ) - - return True - - def get_list_series(self): - """ - Get a list of all time series in a database - """ - - response = self._query('list series') - - series_list = [] - for series in response[0]['points']: - series_list.append(series[1]) - - return series_list - - def get_list_continuous_queries(self): - """ - Get a list of continuous queries - """ - - response = self._query('list continuous queries') - queries_list = [] - for query in response[0]['points']: - queries_list.append(query[2]) - - return queries_list - - # Security - # get list of cluster admins - # curl http://localhost:8086/cluster_admins?u=root&p=root - - # add cluster admin - # curl -X POST http://localhost:8086/cluster_admins?u=root&p=root \ - # -d '{"name": "paul", "password": "i write teh docz"}' - - # update cluster admin password - # curl -X POST http://localhost:8086/cluster_admins/paul?u=root&p=root \ - # -d '{"password": "new pass"}' - - # delete cluster admin - # curl -X DELETE http://localhost:8086/cluster_admins/paul?u=root&p=root - - # Database admins, with a database name of site_dev - # get list of database admins - # curl http://localhost:8086/db/site_dev/admins?u=root&p=root - - # add database admin - # curl -X POST http://localhost:8086/db/site_dev/admins?u=root&p=root \ - # -d '{"name": "paul", "password": "i write teh docz"}' - - # update database admin password - # curl -X POST http://localhost:8086/db/site_dev/admins/paul?u=root&p=root\ - # -d '{"password": "new pass"}' - - # delete database admin - # curl -X DELETE \ - # http://localhost:8086/db/site_dev/admins/paul?u=root&p=root - - def get_list_cluster_admins(self): - """ - Get list of cluster admins - """ - response = self.request( - url="cluster_admins", - method='GET', - expected_response_code=200 - ) - - return response.json() - - def add_cluster_admin(self, new_username, new_password): - """ - Add cluster admin - """ - data = { - 'name': new_username, - 'password': new_password - } - - self.request( - url="cluster_admins", - method='POST', - data=data, - expected_response_code=200 - ) - - return True - - def update_cluster_admin_password(self, username, new_password): - """ - Update cluster admin password - """ - url = "cluster_admins/{0}".format(username) - - data = { - 'password': new_password - } - - self.request( - url=url, - method='POST', - data=data, - expected_response_code=200 - ) - - return True - - def delete_cluster_admin(self, username): - """ - Delete cluster admin - """ - url = "cluster_admins/{0}".format(username) - - self.request( - url=url, - method='DELETE', - expected_response_code=200 - ) - - return True - - def set_database_admin(self, username): - """ - Set user as database admin - """ - return self.alter_database_admin(username, True) - - def unset_database_admin(self, username): - """ - Unset user as database admin - """ - return self.alter_database_admin(username, False) - - def alter_database_admin(self, username, is_admin): - url = "db/{0}/users/{1}".format(self._database, username) - - data = {'admin': is_admin} - - self.request( - url=url, - method='POST', - data=data, - expected_response_code=200 - ) - - return True - - def get_list_database_admins(self): - """ - TODO: Get list of database admins - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - def add_database_admin(self, new_username, new_password): - """ - TODO: Add cluster admin - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - def update_database_admin_password(self, username, new_password): - """ - TODO: Update database admin password - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - def delete_database_admin(self, username): - """ - TODO: Delete database admin - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - - ### - # Limiting User Access - - # Database users - # get list of database users - # curl http://localhost:8086/db/site_dev/users?u=root&p=root - - # add database user - # curl -X POST http://localhost:8086/db/site_dev/users?u=root&p=root \ - # -d '{"name": "paul", "password": "i write teh docz"}' - - # update database user password - # curl -X POST http://localhost:8086/db/site_dev/users/paul?u=root&p=root \ - # -d '{"password": "new pass"}' - - # delete database user - # curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root - - def get_database_users(self): - """ - Get list of database users - """ - url = "db/{0}/users".format(self._database) - - response = self.request( - url=url, - method='GET', - expected_response_code=200 - ) - - return response.json() - - def add_database_user(self, new_username, new_password, permissions=None): - """ - Add database user - - :param permissions: A ``(readFrom, writeTo)`` tuple - """ - url = "db/{0}/users".format(self._database) - - data = { - 'name': new_username, - 'password': new_password - } - - if permissions: - try: - data['readFrom'], data['writeTo'] = permissions - except (ValueError, TypeError): - raise TypeError( - "'permissions' must be (readFrom, writeTo) tuple" - ) - - self.request( - url=url, - method='POST', - data=data, - expected_response_code=200 - ) - - return True - - def update_database_user_password(self, username, new_password): - """ - Update password - """ - url = "db/{0}/users/{1}".format(self._database, username) - - data = { - 'password': new_password - } - - self.request( - url=url, - method='POST', - data=data, - expected_response_code=200 - ) - - if username == self._username: - self._password = new_password - - return True - - def delete_database_user(self, username): - """ - Delete database user - """ - url = "db/{0}/users/{1}".format(self._database, username) - - self.request( - url=url, - method='DELETE', - expected_response_code=200 - ) - - return True - - # update the user by POSTing to db/site_dev/users/paul - - def update_permission(self, username, json_body): - """ - TODO: Update read/write permission - - 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, - but it is documented in http://influxdb.org/docs/api/http.html. - See also: src/api/http/api.go:l57 - """ - raise NotImplementedError() - def send_packet(self, packet): data = json.dumps(packet) byte = data.encode('utf-8') diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 8cec9657..7b6bb2d2 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -90,12 +90,6 @@ def test_switch_database(self): cli.switch_database('another_database') assert cli._database == 'another_database' - @raises(FutureWarning) - def test_switch_db_deprecated(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') - cli.switch_db('another_database') - assert cli._database == 'another_database' - def test_switch_user(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') @@ -134,19 +128,36 @@ def test_write_points(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/db/db/series" + "http://localhost:8086/write" ) cli = InfluxDBClient(database='db') cli.write_points( - self.dummy_points - ) - - self.assertListEqual( - json.loads(m.last_request.body), - self.dummy_points + self.dummy_points, ) + self.assertDictEqual( + {u'points': + [ + { + u'points': [ + [u'1', 1, 1.0], + [u'2', 2, 2.0] + ], + u'name': u'foo', + u'columns': [ + u'column_one', + u'column_two', + u'column_three' + ] + } + ], + u'database': u'db', + }, + json.loads(m.last_request.body) + ) + + @unittest.skip('Not implemented for 0.9') def test_write_points_string(self): with requests_mock.Mocker() as m: m.register_uri( @@ -164,6 +175,7 @@ def test_write_points_string(self): self.dummy_points ) + @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): with _mocked_session('post', 200, self.dummy_points): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') @@ -172,6 +184,7 @@ def test_write_points_batch(self): batch_size=2 ) is True + @unittest.skip('Not implemented for 0.9') def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(('0.0.0.0', 4444)) @@ -187,6 +200,7 @@ def test_write_points_udp(self): assert self.dummy_points == \ json.loads(received_data.decode(), strict=True) + @unittest.skip('Not implemented for 0.9') def test_write_bad_precision_udp(self): cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', @@ -209,15 +223,45 @@ def test_write_points_fails(self): cli.write_points([]) def test_write_points_with_precision(self): - with _mocked_session('post', 200, self.dummy_points): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.write_points(self.dummy_points) is True + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write" + ) + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points, + time_precision='n' + ) + + self.assertDictEqual( + {u'points': + [ + { + u'points': [ + [u'1', 1, 1.0], + [u'2', 2, 2.0] + ], + u'name': u'foo', + u'columns': [ + u'column_one', + u'column_two', + u'column_three' + ] + } + ], + u'database': u'db', + u'precision': u'n', + }, + json.loads(m.last_request.body) + ) def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, - "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" + "Invalid time precision is given. \(use 'n', 'u', 'ms', 's', 'm' or 'h'\)" ): cli.write_points( self.dummy_points, @@ -230,38 +274,6 @@ def test_write_points_with_precision_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points_with_precision([]) - def test_delete_points(self): - with _mocked_session('delete', 204) as mocked: - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.delete_points("foo") is True - - assert len(mocked.call_args_list) == 1 - args, kwds = mocked.call_args_list[0] - - assert kwds['params'] == {'u': 'username', 'p': 'password'} - assert kwds['url'] == 'http://host:8086/db/db/series/foo' - - @raises(Exception) - def test_delete_points_with_wrong_name(self): - with _mocked_session('delete', 400): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.delete_points("nonexist") - - @raises(NotImplementedError) - def test_create_scheduled_delete(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.create_scheduled_delete([]) - - @raises(NotImplementedError) - def test_get_list_scheduled_delete(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.get_list_scheduled_delete() - - @raises(NotImplementedError) - def test_remove_scheduled_delete(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.remove_scheduled_delete(1) - def test_query(self): data = [ { @@ -278,6 +290,7 @@ def test_query(self): result = cli.query('select column_one from foo;') assert len(result[0]['points']) == 4 + @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): cli = InfluxDBClient(database='db') example_object = { @@ -316,14 +329,7 @@ def test_query_fail(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.query('select column_one from foo;') - def test_query_bad_precision(self): - cli = InfluxDBClient() - with self.assertRaisesRegexp( - Exception, - "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" - ): - cli.query('select column_one from foo', time_precision='g') - + @unittest.skip('Not implemented for 0.9') def test_create_database(self): with _mocked_session('post', 201, {"name": "new_db"}): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') @@ -335,6 +341,7 @@ def test_create_database_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_database('new_db') + @unittest.skip('Not implemented for 0.9') def test_delete_database(self): with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') @@ -347,13 +354,21 @@ def test_delete_database_fails(self): cli.delete_database('old_db') def test_get_list_database(self): - data = [ - {"name": "a_db"} - ] - with _mocked_session('get', 200, data): + data = { + "results": + [ + {"rows": [ + {"columns": ["name"], + "values":[["mydb"], ["myotherdb"]]}]} + ] + } + + with _mocked_session('get', 200, json.dumps(data)): cli = InfluxDBClient('host', 8086, 'username', 'password') - assert len(cli.get_list_database()) == 1 - assert cli.get_list_database()[0]['name'] == 'a_db' + self.assertListEqual( + cli.get_list_database(), + [u'mydb', u'myotherdb'] + ) @raises(Exception) def test_get_list_database_fails(self): @@ -361,27 +376,7 @@ def test_get_list_database_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password') cli.get_list_database() - @raises(FutureWarning) - def test_get_database_list_deprecated(self): - data = [ - {"name": "a_db"} - ] - with _mocked_session('get', 200, data): - cli = InfluxDBClient('host', 8086, 'username', 'password') - assert len(cli.get_database_list()) == 1 - assert cli.get_database_list()[0]['name'] == 'a_db' - - def test_delete_series(self): - with _mocked_session('delete', 204): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.delete_series('old_series') - - @raises(Exception) - def test_delete_series_fails(self): - with _mocked_session('delete', 401): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.delete_series('old_series') - + @unittest.skip('Not implemented for 0.9') def test_get_series_list(self): cli = InfluxDBClient(database='db') @@ -400,243 +395,3 @@ def test_get_series_list(self): cli.get_list_series(), ['foo', 'bar'] ) - - def test_get_continuous_queries(self): - cli = InfluxDBClient(database='db') - - with requests_mock.Mocker() as m: - - # Tip: put this in a json linter! - example_response = '[ { "name": "continuous queries", "columns"' \ - ': [ "time", "id", "query" ], "points": [ [ ' \ - '0, 1, "select foo(bar,95) from \\"foo_bar' \ - 's\\" group by time(5m) into response_times.' \ - 'percentiles.5m.95" ], [ 0, 2, "select perce' \ - 'ntile(value,95) from \\"response_times\\" g' \ - 'roup by time(5m) into response_times.percen' \ - 'tiles.5m.95" ] ] } ]' - - m.register_uri( - requests_mock.GET, - "http://localhost:8086/db/db/series", - text=example_response - ) - - self.assertListEqual( - cli.get_list_continuous_queries(), - [ - 'select foo(bar,95) from "foo_bars" group ' - 'by time(5m) into response_times.percentiles.5m.95', - - 'select percentile(value,95) from "response_times" group ' - 'by time(5m) into response_times.percentiles.5m.95' - ] - ) - - def test_get_list_cluster_admins(self): - pass - - def test_add_cluster_admin(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/cluster_admins" - ) - - cli = InfluxDBClient(database='db') - cli.add_cluster_admin( - new_username='paul', - new_password='laup' - ) - - self.assertDictEqual( - json.loads(m.last_request.body), - { - 'name': 'paul', - 'password': 'laup' - } - ) - - def test_update_cluster_admin_password(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/cluster_admins/paul" - ) - - cli = InfluxDBClient(database='db') - cli.update_cluster_admin_password( - username='paul', - new_password='laup' - ) - - self.assertDictEqual( - json.loads(m.last_request.body), - {'password': 'laup'} - ) - - def test_delete_cluster_admin(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.DELETE, - "http://localhost:8086/cluster_admins/paul", - status_code=200, - ) - - cli = InfluxDBClient(database='db') - cli.delete_cluster_admin(username='paul') - - self.assertIsNone(m.last_request.body) - - def test_set_database_admin(self): - pass - - def test_unset_database_admin(self): - pass - - def test_alter_database_admin(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/db/db/users/paul" - ) - - cli = InfluxDBClient(database='db') - cli.alter_database_admin( - username='paul', - is_admin=False - ) - - self.assertDictEqual( - json.loads(m.last_request.body), - { - 'admin': False - } - ) - - @raises(NotImplementedError) - def test_get_list_database_admins(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.get_list_database_admins() - - @raises(NotImplementedError) - def test_add_database_admin(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.add_database_admin('admin', 'admin_secret_password') - - @raises(NotImplementedError) - def test_update_database_admin_password(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.update_database_admin_password('admin', 'admin_secret_password') - - @raises(NotImplementedError) - def test_delete_database_admin(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.delete_database_admin('admin') - - def test_get_database_users(self): - cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') - - example_response = \ - '[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\ - '{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]' - - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.GET, - "http://localhost:8086/db/db/users", - text=example_response - ) - users = cli.get_database_users() - - self.assertEqual(json.loads(example_response), users) - - def test_add_database_user(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/db/db/users" - ) - cli = InfluxDBClient(database='db') - cli.add_database_user( - new_username='paul', - new_password='laup', - permissions=('.*', '.*') - ) - - self.assertDictEqual( - json.loads(m.last_request.body), - { - 'writeTo': '.*', - 'password': 'laup', - 'readFrom': '.*', - 'name': 'paul' - } - ) - - def test_add_database_user_bad_permissions(self): - cli = InfluxDBClient() - - with self.assertRaisesRegexp( - Exception, - "'permissions' must be \(readFrom, writeTo\) tuple" - ): - cli.add_database_user( - new_password='paul', - new_username='paul', - permissions=('hello', 'hello', 'hello') - ) - - def test_update_database_user_password(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/db/db/users/paul" - ) - - cli = InfluxDBClient(database='db') - cli.update_database_user_password( - username='paul', - new_password='laup' - ) - - self.assertDictEqual( - json.loads(m.last_request.body), - {'password': 'laup'} - ) - - def test_update_database_user_password_current_user(self): - cli = InfluxDBClient( - username='root', - password='hello', - database='database' - ) - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/db/database/users/root" - ) - - cli.update_database_user_password( - username='root', - new_password='bye' - ) - - self.assertEqual(cli._password, 'bye') - - def test_delete_database_user(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.DELETE, - "http://localhost:8086/db/db/users/paul" - ) - - cli = InfluxDBClient(database='db') - cli.delete_database_user(username='paul') - - self.assertIsNone(m.last_request.body) - - @raises(NotImplementedError) - def test_update_permission(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.update_permission('admin', []) From ada3dad24abb4794d0037431025fb0eafc15aa94 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Feb 2015 12:42:43 -0500 Subject: [PATCH 002/536] Flake8 fixes + new time precisions --- influxdb/client.py | 14 +++++--- tests/influxdb/client_test.py | 65 ++++++++++++----------------------- 2 files changed, 31 insertions(+), 48 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 4e4ffcbf..1dccb316 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -5,7 +5,6 @@ import json import socket import requests -import warnings from influxdb import chunked_json @@ -221,18 +220,23 @@ def write_points(self, the client's current db. :param retention_policy The retention policy for the points. """ - #TODO: re-implement chunks. + # TODO: re-implement chunks. return self._write_points(points=points, time_precision=time_precision, database=database, retention_policy=retention_policy) - def _write_points(self, points, time_precision, database, retention_policy): + def _write_points(self, + points, + time_precision, + database, + retention_policy): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise Exception( - "Invalid time precision is given. (use 'n', 'u', 'ms', 's', 'm' or 'h')") + "Invalid time precision is given. " + "(use 'n', 'u', 'ms', 's', 'm' or 'h')") - if self.use_udp and (time_precision != 's' or None): + if self.use_udp and time_precision and time_precision != 's': raise Exception( "InfluxDB only supports seconds precision for udp writes" ) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 7b6bb2d2..b7dbd813 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -67,12 +67,15 @@ def setUp(self): self.dummy_points = [ { - "points": [ - ["1", 1, 1.0], - ["2", 2, 2.0] - ], - "name": "foo", - "columns": ["column_one", "column_two", "column_three"] + "name": "cpu_load_short", + "tags": { + "host": "server01", + "region": "us-west" + }, + "timestamp": "2009-11-10T23:00:00Z", + "values": { + "value": 0.64 + } } ] @@ -135,25 +138,11 @@ def test_write_points(self): cli.write_points( self.dummy_points, ) - self.assertDictEqual( - {u'points': - [ - { - u'points': [ - [u'1', 1, 1.0], - [u'2', 2, 2.0] - ], - u'name': u'foo', - u'columns': [ - u'column_one', - u'column_two', - u'column_three' - ] - } - ], - u'database': u'db', - }, + { + "database": "db", + "points": self.dummy_points, + }, json.loads(m.last_request.body) ) @@ -184,7 +173,6 @@ def test_write_points_batch(self): batch_size=2 ) is True - @unittest.skip('Not implemented for 0.9') def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(('0.0.0.0', 4444)) @@ -197,10 +185,14 @@ def test_write_points_udp(self): received_data, addr = s.recvfrom(1024) - assert self.dummy_points == \ + self.assertDictEqual( + { + "points": self.dummy_points, + "database": "test" + }, json.loads(received_data.decode(), strict=True) + ) - @unittest.skip('Not implemented for 0.9') def test_write_bad_precision_udp(self): cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', @@ -236,21 +228,7 @@ def test_write_points_with_precision(self): ) self.assertDictEqual( - {u'points': - [ - { - u'points': [ - [u'1', 1, 1.0], - [u'2', 2, 2.0] - ], - u'name': u'foo', - u'columns': [ - u'column_one', - u'column_two', - u'column_three' - ] - } - ], + {u'points': self.dummy_points, u'database': u'db', u'precision': u'n', }, @@ -261,7 +239,8 @@ def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, - "Invalid time precision is given. \(use 'n', 'u', 'ms', 's', 'm' or 'h'\)" + "Invalid time precision is given. " + "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)" ): cli.write_points( self.dummy_points, From e180304fb8a1c84b0e6587e0aed94cf563663fa8 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Feb 2015 12:47:58 -0500 Subject: [PATCH 003/536] Disabled dataframe client tests --- tests/influxdb/dataframe_client_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 9d82f345..f4bc3c5f 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -19,6 +19,7 @@ from influxdb import DataFrameClient +@unittest.skip('Not updated for 0.9') @skipIfPYpy class TestDataFrameClient(unittest.TestCase): From e7460826935f43863b17e972431bdfe78a1a1563 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Feb 2015 13:53:44 -0500 Subject: [PATCH 004/536] Added Create/Drop database methods --- influxdb/client.py | 12 +++++++++++ tests/influxdb/client_test.py | 38 +++++++++++++++++++++++++---------- 2 files changed, 39 insertions(+), 11 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 1dccb316..06e681b4 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -302,6 +302,18 @@ def get_list_database(self): self.query("SHOW DATABASES") ) + def create_database(self, dbname): + """ + Create a new database + """ + self.query("CREATE DATABASE %s" % dbname) + + def drop_database(self, dbname): + """ + Create a new database + """ + self.query("DROP DATABASE %s" % dbname) + def send_packet(self, packet): data = json.dumps(packet) byte = data.encode('utf-8') diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index b7dbd813..0fd31871 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -308,11 +308,19 @@ def test_query_fail(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.query('select column_one from foo;') - @unittest.skip('Not implemented for 0.9') def test_create_database(self): - with _mocked_session('post', 201, {"name": "new_db"}): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.create_database('new_db') is True + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') + cli.create_database('new_db') + self.assertEqual( + m.last_request.qs['q'][0], + 'create database new_db' + ) @raises(Exception) def test_create_database_fails(self): @@ -320,17 +328,25 @@ def test_create_database_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_database('new_db') - @unittest.skip('Not implemented for 0.9') - def test_delete_database(self): - with _mocked_session('delete', 204): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.delete_database('old_db') is True + def test_drop_database(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') + cli.drop_database('new_db') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop database new_db' + ) @raises(Exception) - def test_delete_database_fails(self): + def test_drop_database_fails(self): with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.delete_database('old_db') + cli.drop_database('old_db') def test_get_list_database(self): data = { From a82b0912a2c2dfb4210700b7bdef844d330cc407 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Feb 2015 14:44:05 -0500 Subject: [PATCH 005/536] Added get_list_retention_policies --- influxdb/client.py | 47 ++++++++++++++++++++++++++++------- tests/influxdb/client_test.py | 32 ++++++++++++++++-------- 2 files changed, 60 insertions(+), 19 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 06e681b4..1ae5417c 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -99,12 +99,22 @@ def __init__(self, 'Accept': 'text/plain'} @staticmethod - def _get_values_from_query_response(response): - """Returns a list of values from a query response""" - values = [ - value[0] for value in response['results'][0]['rows'][0]['values'] - ] - return values + def get_items_from_query_response(response): + """Returns a list of items from a query response""" + items = [] + if 'results' in response.keys(): + for result in response['results']: + if 'rows' in result.keys(): + for row in result['rows']: + if 'columns' in row.keys() and 'values' in row.keys(): + for value in row['values']: + item = {} + current_col = 0 + for field in value: + item[row['columns'][current_col]] = field + current_col += 1 + items.append(item) + return items def switch_database(self, database): """ @@ -298,9 +308,12 @@ def get_list_database(self): """ Get the list of databases """ - return self._get_values_from_query_response( - self.query("SHOW DATABASES") - ) + return [ + db['name'] for db in + self.get_items_from_query_response( + self.query("SHOW DATABASES") + ) + ] def create_database(self, dbname): """ @@ -314,6 +327,22 @@ def drop_database(self, dbname): """ self.query("DROP DATABASE %s" % dbname) + def get_list_retention_policies(self, database=None): + """ + Get the list of retention policies + """ + return self.get_items_from_query_response( + self.query("SHOW RETENTION POLICIES %s" % database or self._database) + ) + + def get_list_users(self): + """ + Get the list of users + """ + return self.get_items_from_query_response( + self.query("SHOW USERS") + ) + def send_packet(self, packet): data = json.dumps(packet) byte = data.encode('utf-8') diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 0fd31871..b1451a57 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -65,6 +65,7 @@ def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) + self.cli = InfluxDBClient('localhost', 8086, 'username', 'password') self.dummy_points = [ { "name": "cpu_load_short", @@ -305,8 +306,7 @@ def test_query_chunked(self): @raises(Exception) def test_query_fail(self): with _mocked_session('get', 401): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.query('select column_one from foo;') + self.cli.query('select column_one from foo;') def test_create_database(self): with requests_mock.Mocker() as m: @@ -315,8 +315,7 @@ def test_create_database(self): "http://localhost:8086/query", text='{"results":[{}]}' ) - cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') - cli.create_database('new_db') + self.cli.create_database('new_db') self.assertEqual( m.last_request.qs['q'][0], 'create database new_db' @@ -325,8 +324,7 @@ def test_create_database(self): @raises(Exception) def test_create_database_fails(self): with _mocked_session('post', 401): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - cli.create_database('new_db') + self.cli.create_database('new_db') def test_drop_database(self): with requests_mock.Mocker() as m: @@ -335,8 +333,7 @@ def test_drop_database(self): "http://localhost:8086/query", text='{"results":[{}]}' ) - cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') - cli.drop_database('new_db') + self.cli.drop_database('new_db') self.assertEqual( m.last_request.qs['q'][0], 'drop database new_db' @@ -359,9 +356,8 @@ def test_get_list_database(self): } with _mocked_session('get', 200, json.dumps(data)): - cli = InfluxDBClient('host', 8086, 'username', 'password') self.assertListEqual( - cli.get_list_database(), + self.cli.get_list_database(), [u'mydb', u'myotherdb'] ) @@ -390,3 +386,19 @@ def test_get_series_list(self): cli.get_list_series(), ['foo', 'bar'] ) + + def test_get_list_retention_policies(self): + example_response = \ + u'{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ + u' "columns": ["name", "duration", "replicaN"]}]}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.assertListEqual( + self.cli.get_list_retention_policies(), + [{u'duration': u'24h0m0s', u'name': u'fsfdsdf', u'replicaN': 2}] + ) From 7d60a3f4d19c1d0f57f99730e2374ff5c40b3a43 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Feb 2015 17:02:56 -0500 Subject: [PATCH 006/536] Implemented get_list_series --- influxdb/client.py | 47 ++++++++++++++-------- tests/influxdb/client_test.py | 74 ++++++++++++++--------------------- 2 files changed, 61 insertions(+), 60 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 1ae5417c..e56f3a7e 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -99,13 +99,19 @@ def __init__(self, 'Accept': 'text/plain'} @staticmethod - def get_items_from_query_response(response): + def format_query_response(response): """Returns a list of items from a query response""" - items = [] + + series = {} if 'results' in response.keys(): for result in response['results']: if 'rows' in result.keys(): for row in result['rows']: + items = [] + if 'name' in row.keys(): + series[row['name']] = items + else: + series = items # Special case for system queries. if 'columns' in row.keys() and 'values' in row.keys(): for value in row['values']: item = {} @@ -114,7 +120,7 @@ def get_items_from_query_response(response): item[row['columns'][current_col]] = field current_col += 1 items.append(item) - return items + return series def switch_database(self, database): """ @@ -191,13 +197,15 @@ def query(self, query, params={}, expected_response_code=200, - database=None): + database=None, + raw=False): """ Query data :param params: Additional parameters to be passed to requests. :param database: Database to query, default to None. :param expected_response_code: Expected response code. Defaults to 200. + :param raw: Wether or not to return the raw influxdb response. """ params['q'] = query @@ -211,7 +219,11 @@ def query(self, data=None, expected_response_code=expected_response_code ) - return response.json() + + if raw: + return response.json() + else: + return self.format_query_response(response.json()) def write_points(self, points, @@ -308,12 +320,7 @@ def get_list_database(self): """ Get the list of databases """ - return [ - db['name'] for db in - self.get_items_from_query_response( - self.query("SHOW DATABASES") - ) - ] + return [db['name'] for db in self.query("SHOW DATABASES")] def create_database(self, dbname): """ @@ -331,17 +338,25 @@ def get_list_retention_policies(self, database=None): """ Get the list of retention policies """ - return self.get_items_from_query_response( - self.query("SHOW RETENTION POLICIES %s" % database or self._database) + return self.query( + "SHOW RETENTION POLICIES %s" % database or self._database ) + def get_list_series(self, database=None): + """ + Get the list of series + """ + return self.query("SHOW SERIES", database=database or self._database) + def get_list_users(self): """ Get the list of users """ - return self.get_items_from_query_response( - self.query("SHOW USERS") - ) + return self.query("SHOW USERS") + + def delete_series(self, name, database=None): + database = database or self._database + self.query('DROP SERIES \"%s\"' % name, database=database) def send_packet(self, packet): data = json.dumps(packet) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index b1451a57..6d2f4f77 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -147,24 +147,6 @@ def test_write_points(self): json.loads(m.last_request.body) ) - @unittest.skip('Not implemented for 0.9') - def test_write_points_string(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.POST, - "http://localhost:8086/db/db/series" - ) - - cli = InfluxDBClient(database='db') - cli.write_points( - str(json.dumps(self.dummy_points)) - ) - - self.assertListEqual( - json.loads(m.last_request.body), - self.dummy_points - ) - @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): with _mocked_session('post', 200, self.dummy_points): @@ -255,20 +237,26 @@ def test_write_points_with_precision_fails(self): cli.write_points_with_precision([]) def test_query(self): - data = [ - { - "name": "foo", - "columns": ["time", "sequence_number", "column_one"], - "points": [ - [1383876043, 16, "2"], [1383876043, 15, "1"], - [1383876035, 14, "2"], [1383876035, 13, "1"] - ] - } - ] - with _mocked_session('get', 200, data): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - result = cli.query('select column_one from foo;') - assert len(result[0]['points']) == 4 + example_response = \ + '{"results": [{"rows": [{"name": "sdfsdfsdf", ' \ + '"columns": ["time", "value"], "values": ' \ + '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"rows": ' \ + '[{"name": "cpu_load_short", "columns": ["time", "value"], ' \ + '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.assertDictEqual( + self.cli.query('select * from foo'), + {u'cpu_load_short': + [{u'value': 0.64, u'time': u'2009-11-10T23:00:00Z'}], + u'sdfsdfsdf': + [{u'value': 0.64, u'time': u'2009-11-10T23:00:00Z'}]} + ) @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): @@ -367,24 +355,21 @@ def test_get_list_database_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password') cli.get_list_database() - @unittest.skip('Not implemented for 0.9') - def test_get_series_list(self): - cli = InfluxDBClient(database='db') + def test_get_list_series(self): + example_response = \ + u'{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ + u' "columns": ["name", "duration", "replicaN"]}]}]}' with requests_mock.Mocker() as m: - example_response = \ - '[{"name":"list_series_result","columns":' \ - '["time","name"],"points":[[0,"foo"],[0,"bar"]]}]' - m.register_uri( requests_mock.GET, - "http://localhost:8086/db/db/series", + "http://localhost:8086/query", text=example_response ) - self.assertListEqual( - cli.get_list_series(), - ['foo', 'bar'] + self.cli.get_list_series(), + [{u'duration': u'24h0m0s', + u'name': u'fsfdsdf', u'replicaN': 2}] ) def test_get_list_retention_policies(self): @@ -400,5 +385,6 @@ def test_get_list_retention_policies(self): ) self.assertListEqual( self.cli.get_list_retention_policies(), - [{u'duration': u'24h0m0s', u'name': u'fsfdsdf', u'replicaN': 2}] + [{u'duration': u'24h0m0s', + u'name': u'fsfdsdf', u'replicaN': 2}] ) From 8bb255100fd7a7724b9a18741e9020c50c564645 Mon Sep 17 00:00:00 2001 From: aviau Date: Sat, 28 Feb 2015 13:50:20 -0500 Subject: [PATCH 007/536] Updated tutorial.py for 0.9.0 --- examples/tutorial.py | 40 +++++++++++++++------------------------- 1 file changed, 15 insertions(+), 25 deletions(-) diff --git a/examples/tutorial.py b/examples/tutorial.py index 442d2242..9b0050cb 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -9,36 +9,26 @@ def main(host='localhost', port=8086): dbname = 'example' dbuser = 'smly' dbuser_password = 'my_secret_password' - query = 'select column_one from foo;' - json_body = [{ - "points": [ - ["1", 1, 1.0], - ["2", 2, 2.0] - ], - "name": "foo", - "columns": ["column_one", "column_two", "column_three"] - }] + query = 'select value from cpu_load_short;' + json_body = [ + { + "name": "cpu_load_short", + "tags": { + "host": "server01", + "region": "us-west" + }, + "timestamp": "2009-11-10T23:00:00Z", + "fields": { + "value": 0.64 + } + } + ] client = InfluxDBClient(host, port, user, password, dbname) print("Create database: " + dbname) client.create_database(dbname) - dbusers = client.get_database_users() - print("Get list of database users: {0}".format(dbusers)) - - print("Add database user: " + dbuser) - client.add_database_user(dbuser, dbuser_password) - - print("Make user a database admin") - client.set_database_admin(dbuser) - - print("Remove admin privilege from user") - client.unset_database_admin(dbuser) - - dbusers = client.get_database_users() - print("Get list of database users again: {0}".format(dbusers)) - print("Switch user: " + dbuser) client.switch_user(dbuser, dbuser_password) @@ -54,7 +44,7 @@ def main(host='localhost', port=8086): client.switch_user(user, password) print("Delete database: " + dbname) - client.delete_database(dbname) + client.drop_database(dbname) def parse_args(): From ab3e3bff0ad72da893c3ba025e6de8cd8e1f797d Mon Sep 17 00:00:00 2001 From: aviau Date: Sat, 28 Feb 2015 17:21:30 -0500 Subject: [PATCH 008/536] SeriesHelper: Added tags support --- examples/tutorial_serieshelper.py | 16 ++-- influxdb/helper.py | 27 ++++-- tests/influxdb/helper_test.py | 144 +++++++++++++++++++++++------- 3 files changed, 138 insertions(+), 49 deletions(-) diff --git a/examples/tutorial_serieshelper.py b/examples/tutorial_serieshelper.py index 62d7101e..ed3f155a 100644 --- a/examples/tutorial_serieshelper.py +++ b/examples/tutorial_serieshelper.py @@ -8,20 +8,22 @@ class Meta: client = InfluxDBClient() # The client should be an instance of InfluxDBClient. series_name = 'events.stats.{server_name}' - # The series name must be a string. Add dependent field names in curly brackets. - fields = ['time', 'server_name'] + # The series name must be a string. Add dependent fields/tags in curly brackets. + fields = ['some_stat'] # Defines all the fields in this time series. + tags = ['server_name'] + # Defines all the tags for the series. bulk_size = 5 # Defines the number of data points to store prior to writing on the wire. # The following will create *five* (immutable) data points. # Since bulk_size is set to 5, upon the fifth construction call, *all* data # points will be written on the wire via MySeriesHelper.Meta.client. -MySeriesHelper(server_name='us.east-1', time=159) -MySeriesHelper(server_name='us.east-1', time=158) -MySeriesHelper(server_name='us.east-1', time=157) -MySeriesHelper(server_name='us.east-1', time=156) -MySeriesHelper(server_name='us.east-1', time=155) +MySeriesHelper(server_name='us.east-1', some_stat=159) +MySeriesHelper(server_name='us.east-1', some_stat=158) +MySeriesHelper(server_name='us.east-1', some_stat=157) +MySeriesHelper(server_name='us.east-1', some_stat=156) +MySeriesHelper(server_name='us.east-1', some_stat=155) # To manually submit data points which are not yet written, call commit: MySeriesHelper.commit() diff --git a/influxdb/helper.py b/influxdb/helper.py index b2f8f8bb..7e86578b 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -54,7 +54,7 @@ def __new__(cls, *args, **kwargs): 'Missing Meta class in {}.'.format( cls.__name__)) - for attr in ['series_name', 'fields']: + for attr in ['series_name', 'fields', 'tags']: try: setattr(cls, '_' + attr, getattr(_meta, attr)) except AttributeError: @@ -87,7 +87,7 @@ def __new__(cls, *args, **kwargs): ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) - cls._type = namedtuple(cls.__name__, cls._fields) + cls._type = namedtuple(cls.__name__, cls._fields + cls._tags) return super(SeriesHelper, cls).__new__(cls) @@ -100,10 +100,10 @@ def __init__(self, **kw): """ cls = self.__class__ - if sorted(cls._fields) != sorted(kw.keys()): + if sorted(cls._fields + cls._tags) != sorted(kw.keys()): raise NameError( 'Expected {0}, got {1}.'.format( - cls._fields, + sorted(cls._fields + cls._tags), kw.keys())) cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw)) @@ -135,11 +135,20 @@ def _json_body_(cls): """ json = [] for series_name, data in six.iteritems(cls._datapoints): - json.append({'name': series_name, - 'columns': cls._fields, - 'points': [[point.__dict__[k] for k in cls._fields] - for point in data] - }) + for point in data: + json_point = { + "name": series_name, + "fields": {}, + "tags": {}, + } + + for field in cls._fields: + json_point['fields'][field] = point.__dict__[field] + + for tag in cls._tags: + json_point['tags'][tag] = point.__dict__[tag] + + json.append(json_point) return json @classmethod diff --git a/tests/influxdb/helper_test.py b/tests/influxdb/helper_test.py index e0bcff5d..d5f1d02e 100644 --- a/tests/influxdb/helper_test.py +++ b/tests/influxdb/helper_test.py @@ -27,7 +27,8 @@ class MySeriesHelper(SeriesHelper): class Meta: client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' - fields = ['time', 'server_name'] + fields = ['some_stat'] + tags = ['server_name', 'other_tag'] bulk_size = 5 autocommit = True @@ -41,35 +42,76 @@ class AutoCommitTest(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' - fields = ['time', 'server_name'] + fields = ['some_stat'] + tags = ['server_name', 'other_tag'] bulk_size = 5 client = InfluxDBClient() autocommit = True fake_write_points = mock.MagicMock() - AutoCommitTest(server_name='us.east-1', time=159) + AutoCommitTest(server_name='us.east-1', some_stat=159, other_tag='gg') AutoCommitTest._client.write_points = fake_write_points - AutoCommitTest(server_name='us.east-1', time=158) - AutoCommitTest(server_name='us.east-1', time=157) - AutoCommitTest(server_name='us.east-1', time=156) + AutoCommitTest(server_name='us.east-1', some_stat=158, other_tag='gg') + AutoCommitTest(server_name='us.east-1', some_stat=157, other_tag='gg') + AutoCommitTest(server_name='us.east-1', some_stat=156, other_tag='gg') self.assertFalse(fake_write_points.called) - AutoCommitTest(server_name='us.east-1', time=3443) + AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg') self.assertTrue(fake_write_points.called) def testSingleSeriesName(self): """ Tests JSON conversion when there is only one series name. """ - TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) - TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158) - TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157) - TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156) - expectation = [{'points': [[159, 'us.east-1'], - [158, 'us.east-1'], - [157, 'us.east-1'], - [156, 'us.east-1']], - 'name': 'events.stats.us.east-1', - 'columns': ['time', 'server_name']}] + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=159) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=158) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=157) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=156) + expectation = [ + { + "name": "events.stats.us.east-1", + "tags": { + "other_tag": "ello", + "server_name": "us.east-1" + }, + "fields": { + "some_stat": 159 + }, + }, + { + "name": "events.stats.us.east-1", + "tags": { + "other_tag": "ello", + "server_name": "us.east-1" + }, + "fields": { + "some_stat": 158 + }, + }, + { + "name": "events.stats.us.east-1", + "tags": { + "other_tag": "ello", + "server_name": "us.east-1" + }, + "fields": { + "some_stat": 157 + }, + }, + { + "name": "events.stats.us.east-1", + "tags": { + "other_tag": "ello", + "server_name": "us.east-1" + }, + "fields": { + "some_stat": 156 + }, + } + ] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and @@ -86,22 +128,56 @@ def testSeveralSeriesNames(self): ''' Tests JSON conversion when there is only one series name. ''' - TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) - TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158) - TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157) - TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156) - expectation = [{'points': [[157, 'lu.lux']], - 'name': 'events.stats.lu.lux', - 'columns': ['time', 'server_name']}, - {'points': [[156, 'uk.london']], - 'name': 'events.stats.uk.london', - 'columns': ['time', 'server_name']}, - {'points': [[158, 'fr.paris-10']], - 'name': 'events.stats.fr.paris-10', - 'columns': ['time', 'server_name']}, - {'points': [[159, 'us.east-1']], - 'name': 'events.stats.us.east-1', - 'columns': ['time', 'server_name']}] + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', some_stat=159, other_tag='ello') + TestSeriesHelper.MySeriesHelper( + server_name='fr.paris-10', some_stat=158, other_tag='ello') + TestSeriesHelper.MySeriesHelper( + server_name='lu.lux', some_stat=157, other_tag='ello') + TestSeriesHelper.MySeriesHelper( + server_name='uk.london', some_stat=156, other_tag='ello') + expectation = [ + { + 'fields': { + 'some_stat': 157 + }, + 'name': 'events.stats.lu.lux', + 'tags': { + 'other_tag': 'ello', + 'server_name': 'lu.lux' + } + }, + { + 'fields': { + 'some_stat': 156 + }, + 'name': 'events.stats.uk.london', + 'tags': { + 'other_tag': 'ello', + 'server_name': 'uk.london' + } + }, + { + 'fields': { + 'some_stat': 158 + }, + 'name': 'events.stats.fr.paris-10', + 'tags': { + 'other_tag': 'ello', + 'server_name': 'fr.paris-10' + } + }, + { + 'fields': { + 'some_stat': 159 + }, + 'name': 'events.stats.us.east-1', + 'tags': { + 'other_tag': 'ello', + 'server_name': 'us.east-1' + } + } + ] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and @@ -155,6 +231,7 @@ class Meta: client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] + tags = [] bulk_size = 0 autocommit = True @@ -182,6 +259,7 @@ class Meta: series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 + tags = [] autocommit = False with warnings.catch_warnings(record=True) as w: From 907d471fd2a1a11c0a96b3be0f2db610beb27d99 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Sat, 28 Feb 2015 17:35:22 -0500 Subject: [PATCH 009/536] Bumped to 1.0.0 (this is not a release commit) This version will be released only when its tagged. --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 274366e6..782a2005 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '0.3.0' +__version__ = '1.0.0' From 61f635a2173c1272ec91f9f45c1e8afef59f248f Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Sat, 28 Feb 2015 17:45:21 -0500 Subject: [PATCH 010/536] README: updated write_points example --- README.rst | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/README.rst b/README.rst index 1804f40e..8911bcc6 100644 --- a/README.rst +++ b/README.rst @@ -100,14 +100,19 @@ Here's a basic example (for more see the examples directory):: >>> from influxdb import InfluxDBClient - >>> json_body = [{ - "points": [ - ["1", 1, 1.0], - ["2", 2, 2.0] - ], - "name": "foo", - "columns": ["column_one", "column_two", "column_three"] - }] + >>> json_body = [ + { + "name": "cpu_load_short", + "tags": { + "host": "server01", + "region": "us-west" + }, + "timestamp": "2009-11-10T23:00:00Z", + "fields": { + "value": 0.64 + } + } + ] >>> client = InfluxDBClient('localhost', 8086, 'root', 'root', 'example') From 2d02e59206e6edb450128891fd5155c067f98a76 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 1 Mar 2015 21:21:28 -0500 Subject: [PATCH 011/536] Added create_retention_policy --- influxdb/client.py | 20 +++++++++++++++++++ tests/influxdb/client_test.py | 36 +++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index e56f3a7e..8dbfac05 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -334,6 +334,26 @@ def drop_database(self, dbname): """ self.query("DROP DATABASE %s" % dbname) + def create_retention_policy(self, name, duration, replication, database=None, default=False): + """ + Create a retention policy + + :param duration: The duration. Ex: '1d' + :param replication: The replication. + :param database: The database. Defaults to current database + :param default: (bool) Wether or not to set the policy as default + """ + + query_string = \ + "CREATE RETENTION POLICY %s ON %s " \ + "DURATION %s REPLICATION %s" % \ + (name, database or self._database, duration, replication) + + if default is True: + query_string += " DEFAULT" + + self.query(query_string) + def get_list_retention_policies(self, database=None): """ Get the list of retention policies diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 6d2f4f77..ce5104ae 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -372,6 +372,42 @@ def test_get_list_series(self): u'name': u'fsfdsdf', u'replicaN': 2}] ) + def test_create_retention_policy_default(self): + example_response = u'{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.create_retention_policy( + 'somename', '1d', 4, default=True, database='db' + ) + + self.assertEqual( + m.last_request.qs['q'][0], + 'create retention policy somename on db duration 1d replication 4 default' + ) + + def test_create_retention_policy(self): + example_response = u'{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.create_retention_policy( + 'somename', '1d', 4, database='db' + ) + + self.assertEqual( + m.last_request.qs['q'][0], + 'create retention policy somename on db duration 1d replication 4' + ) + def test_get_list_retention_policies(self): example_response = \ u'{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ From 6a3bc5c4c394571a773bfc4bb255c9294bb5e51a Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 1 Mar 2015 21:24:12 -0500 Subject: [PATCH 012/536] python3.2: Dict keys are not unicode --- tests/influxdb/client_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index ce5104ae..aae4dffc 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -211,9 +211,9 @@ def test_write_points_with_precision(self): ) self.assertDictEqual( - {u'points': self.dummy_points, - u'database': u'db', - u'precision': u'n', + {'points': self.dummy_points, + 'database': u'db', + 'precision': u'n', }, json.loads(m.last_request.body) ) From 5cdbc27c7e25db4668ca86922fbb7ac923d310c3 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 1 Mar 2015 21:38:08 -0500 Subject: [PATCH 013/536] Tutorial.py: Added create_retention_policy example --- examples/tutorial.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/tutorial.py b/examples/tutorial.py index 9b0050cb..cb813862 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -29,6 +29,9 @@ def main(host='localhost', port=8086): print("Create database: " + dbname) client.create_database(dbname) + print("Create a retention policy") + client.create_retention_policy('awesome_policy', '3d', 3, default=True) + print("Switch user: " + dbuser) client.switch_user(dbuser, dbuser_password) From 696a6f6a95ca01d4f9fe69d15955de4ca22fe147 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 1 Mar 2015 21:40:34 -0500 Subject: [PATCH 014/536] Flake8 fixes --- influxdb/client.py | 4 +++- tests/influxdb/client_test.py | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 8dbfac05..25a8a38b 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -334,7 +334,9 @@ def drop_database(self, dbname): """ self.query("DROP DATABASE %s" % dbname) - def create_retention_policy(self, name, duration, replication, database=None, default=False): + def create_retention_policy( + self, name, duration, + replication, database=None, default=False): """ Create a retention policy diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index aae4dffc..0cb76d66 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -387,7 +387,8 @@ def test_create_retention_policy_default(self): self.assertEqual( m.last_request.qs['q'][0], - 'create retention policy somename on db duration 1d replication 4 default' + 'create retention policy somename on ' + 'db duration 1d replication 4 default' ) def test_create_retention_policy(self): @@ -405,7 +406,8 @@ def test_create_retention_policy(self): self.assertEqual( m.last_request.qs['q'][0], - 'create retention policy somename on db duration 1d replication 4' + 'create retention policy somename on ' + 'db duration 1d replication 4' ) def test_get_list_retention_policies(self): From c4bbbb0de6c12e51e93e1b52ea93e23331d76961 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 1 Mar 2015 21:49:23 -0500 Subject: [PATCH 015/536] python3.2: Dont use unicode in dict --- tests/influxdb/client_test.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 0cb76d66..1559f10e 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -212,8 +212,8 @@ def test_write_points_with_precision(self): self.assertDictEqual( {'points': self.dummy_points, - 'database': u'db', - 'precision': u'n', + 'database': 'db', + 'precision': 'n', }, json.loads(m.last_request.body) ) @@ -252,10 +252,10 @@ def test_query(self): ) self.assertDictEqual( self.cli.query('select * from foo'), - {u'cpu_load_short': - [{u'value': 0.64, u'time': u'2009-11-10T23:00:00Z'}], - u'sdfsdfsdf': - [{u'value': 0.64, u'time': u'2009-11-10T23:00:00Z'}]} + {'cpu_load_short': + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], + 'sdfsdfsdf': + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]} ) @unittest.skip('Not implemented for 0.9') @@ -346,7 +346,7 @@ def test_get_list_database(self): with _mocked_session('get', 200, json.dumps(data)): self.assertListEqual( self.cli.get_list_database(), - [u'mydb', u'myotherdb'] + ['mydb', 'myotherdb'] ) @raises(Exception) @@ -357,8 +357,8 @@ def test_get_list_database_fails(self): def test_get_list_series(self): example_response = \ - u'{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ - u' "columns": ["name", "duration", "replicaN"]}]}]}' + '{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ + ' "columns": ["name", "duration", "replicaN"]}]}]}' with requests_mock.Mocker() as m: m.register_uri( @@ -368,12 +368,12 @@ def test_get_list_series(self): ) self.assertListEqual( self.cli.get_list_series(), - [{u'duration': u'24h0m0s', - u'name': u'fsfdsdf', u'replicaN': 2}] + [{'duration': '24h0m0s', + 'name': 'fsfdsdf', 'replicaN': 2}] ) def test_create_retention_policy_default(self): - example_response = u'{"results":[{}]}' + example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( @@ -392,7 +392,7 @@ def test_create_retention_policy_default(self): ) def test_create_retention_policy(self): - example_response = u'{"results":[{}]}' + example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( @@ -412,8 +412,8 @@ def test_create_retention_policy(self): def test_get_list_retention_policies(self): example_response = \ - u'{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ - u' "columns": ["name", "duration", "replicaN"]}]}]}' + '{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ + ' "columns": ["name", "duration", "replicaN"]}]}]}' with requests_mock.Mocker() as m: m.register_uri( @@ -423,6 +423,6 @@ def test_get_list_retention_policies(self): ) self.assertListEqual( self.cli.get_list_retention_policies(), - [{u'duration': u'24h0m0s', - u'name': u'fsfdsdf', u'replicaN': 2}] + [{'duration': '24h0m0s', + 'name': 'fsfdsdf', 'replicaN': 2}] ) From 4c6fb415be6eb3461a80b2d179e174e9666a88de Mon Sep 17 00:00:00 2001 From: Federico Capoano Date: Thu, 5 Mar 2015 17:56:18 +0100 Subject: [PATCH 016/536] Updated format_query_response to reflect influxdb 0.9 response 'rows' was renamed in 'series', related to #109 --- influxdb/client.py | 6 ++---- tests/influxdb/client_test.py | 10 +++++----- 2 files changed, 7 insertions(+), 9 deletions(-) mode change 100644 => 100755 influxdb/client.py mode change 100644 => 100755 tests/influxdb/client_test.py diff --git a/influxdb/client.py b/influxdb/client.py old mode 100644 new mode 100755 index 25a8a38b..46d4f4d4 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -26,7 +26,6 @@ def __init__(self, content, code): class InfluxDBClient(object): - """ The ``InfluxDBClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. @@ -101,12 +100,11 @@ def __init__(self, @staticmethod def format_query_response(response): """Returns a list of items from a query response""" - series = {} if 'results' in response.keys(): for result in response['results']: - if 'rows' in result.keys(): - for row in result['rows']: + if 'series' in result.keys(): + for row in result['series']: items = [] if 'name' in row.keys(): series[row['name']] = items diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py old mode 100644 new mode 100755 index 1559f10e..ce1377f5 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -238,9 +238,9 @@ def test_write_points_with_precision_fails(self): def test_query(self): example_response = \ - '{"results": [{"rows": [{"name": "sdfsdfsdf", ' \ + '{"results": [{"series": [{"name": "sdfsdfsdf", ' \ '"columns": ["time", "value"], "values": ' \ - '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"rows": ' \ + '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \ '[{"name": "cpu_load_short", "columns": ["time", "value"], ' \ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' @@ -337,7 +337,7 @@ def test_get_list_database(self): data = { "results": [ - {"rows": [ + {"series": [ {"columns": ["name"], "values":[["mydb"], ["myotherdb"]]}]} ] @@ -357,7 +357,7 @@ def test_get_list_database_fails(self): def test_get_list_series(self): example_response = \ - '{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ + '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ ' "columns": ["name", "duration", "replicaN"]}]}]}' with requests_mock.Mocker() as m: @@ -412,7 +412,7 @@ def test_create_retention_policy(self): def test_get_list_retention_policies(self): example_response = \ - '{"results": [{"rows": [{"values": [["fsfdsdf", "24h0m0s", 2]],' \ + '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ ' "columns": ["name", "duration", "replicaN"]}]}]}' with requests_mock.Mocker() as m: From c3ca445fa877271f0a2070e6897f0a27c83fd883 Mon Sep 17 00:00:00 2001 From: areski Date: Sat, 7 Mar 2015 01:00:56 +0100 Subject: [PATCH 017/536] Fix example server_data to support v0.9 This includes fixes for the private method '_query' from client.py to work properly with influxdb v0.9 API --- examples/tutorial.py | 2 +- examples/tutorial_server_data.py | 49 +++++++++++++++++-------- influxdb/client.py | 61 ++++++++++++++++---------------- influxdb/dataframe_client.py | 6 ++-- 4 files changed, 69 insertions(+), 49 deletions(-) diff --git a/examples/tutorial.py b/examples/tutorial.py index cb813862..fa1ebe9f 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -46,7 +46,7 @@ def main(host='localhost', port=8086): print("Switch user: " + user) client.switch_user(user, password) - print("Delete database: " + dbname) + print("Drop database: " + dbname) client.drop_database(dbname) diff --git a/examples/tutorial_server_data.py b/examples/tutorial_server_data.py index 2e9bba15..aa518f95 100644 --- a/examples/tutorial_server_data.py +++ b/examples/tutorial_server_data.py @@ -1,8 +1,10 @@ import argparse from influxdb import InfluxDBClient +from influxdb.client import InfluxDBClientError import datetime import random +import time USER = 'root' @@ -17,34 +19,53 @@ def main(host='localhost', port=8086, nb_day=15): total_minutes = 1440 * nb_day total_records = int(total_minutes / timeinterval_min) now = datetime.datetime.today() - cpu_series = [{ - 'name': "server_data.cpu_idle", - 'columns': ["time", "value", "hostName"], - 'points': [] - }] + metric = "server_data.cpu_idle" + series = [] for i in range(0, total_records): past_date = now - datetime.timedelta(minutes=i * timeinterval_min) value = random.randint(0, 200) hostName = "server-%d" % random.randint(1, 5) - pointValues = [int(past_date.strftime('%s')), value, hostName] - cpu_series[0]['points'].append(pointValues) + # pointValues = [int(past_date.strftime('%s')), value, hostName] + pointValues = { + "timestamp": int(past_date.strftime('%s')), + "name": metric, + 'fields': { + 'value': value, + }, + 'tags': { + "hostName": hostName, + }, + } + series.append(pointValues) + print series client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) - client.create_database(DBNAME) + try: + client.create_database(DBNAME) + except InfluxDBClientError: + # Drop and create + client.drop_database(DBNAME) + client.create_database(DBNAME) + + print("Create a retention policy") + retention_policy = 'awesome_policy' + client.create_retention_policy(retention_policy, '3d', 3, default=True) print("Write points #: {0}".format(total_records)) - client.write_points(cpu_series) + client.write_points(series, retention_policy=retention_policy) + + time.sleep(2) - query = 'SELECT MEAN(value) FROM server_data.cpu_idle GROUP BY time(30m) WHERE time > now() - 1d;' - print("Queying data: " + query) - result = client.query(query) + query = "SELECT MEAN(value) FROM %s WHERE time > now() - 10d GROUP BY time(500m)" % (metric) + result = client.query(query, database=DBNAME, raw=False) + print (result) print("Result: {0}".format(result)) - print("Delete database: " + DBNAME) - client.delete_database(DBNAME) + print("Drop database: " + DBNAME) + client.drop_database(DBNAME) def parse_args(): diff --git a/influxdb/client.py b/influxdb/client.py index 46d4f4d4..d5aa1e8a 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -223,6 +223,36 @@ def query(self, else: return self.format_query_response(response.json()) + def _query(self, query, time_precision='s', chunked=False, + expected_response_code=200): + if time_precision not in ['s', 'm', 'ms', 'u']: + raise Exception( + "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + + if chunked is True: + chunked_param = 'true' + else: + chunked_param = 'false' + + params = { + 'q': query, + 'time_precision': time_precision, + 'chunked': chunked_param + } + + response = self.request( + url="query", + method='GET', + params=params, + data=None, + expected_response_code=expected_response_code + ) + + if chunked: + return list(chunked_json.loads(response.content.decode())) + else: + return response.json() + def write_points(self, points, time_precision=None, @@ -283,37 +313,6 @@ def _write_points(self, return True - def _query(self, query, time_precision='s', chunked=False): - if time_precision not in ['s', 'm', 'ms', 'u']: - raise Exception( - "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") - - if chunked is True: - chunked_param = 'true' - else: - chunked_param = 'false' - - # Build the URL of the serie to query - url = "db/{0}/series".format(self._database) - - params = { - 'q': query, - 'time_precision': time_precision, - 'chunked': chunked_param - } - - response = self.request( - url=url, - method='GET', - params=params, - expected_response_code=200 - ) - - if chunked: - return list(chunked_json.loads(response.content.decode())) - else: - return response.json() - def get_list_database(self): """ Get the list of databases diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 850e9ea5..d2685627 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -85,11 +85,11 @@ def query(self, query, time_precision='s', chunked=False): retrieved in chunks, False otherwise. """ - result = InfluxDBClient.query(self, query=query, + result = InfluxDBClient._query(self, query=query, time_precision=time_precision, chunked=chunked) - if len(result) > 0: - return self._to_dataframe(result[0], time_precision) + if len(result['results'][0]) > 0: + return self._to_dataframe(result['results'][0], time_precision) else: return result From e0e6edc80a3c4ab3d292b4b545a23c4296752df2 Mon Sep 17 00:00:00 2001 From: areski Date: Sat, 7 Mar 2015 20:17:35 +0100 Subject: [PATCH 018/536] PEP8 fix E128 on dataframe_client --- influxdb/dataframe_client.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index d2685627..d483e133 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -85,9 +85,10 @@ def query(self, query, time_precision='s', chunked=False): retrieved in chunks, False otherwise. """ - result = InfluxDBClient._query(self, query=query, - time_precision=time_precision, - chunked=chunked) + result = InfluxDBClient._query(self, + query=query, + time_precision=time_precision, + chunked=chunked) if len(result['results'][0]) > 0: return self._to_dataframe(result['results'][0], time_precision) else: From 549d13eb54fce7a38fa98e0a35b4427e74c10d99 Mon Sep 17 00:00:00 2001 From: areski Date: Mon, 9 Mar 2015 13:58:21 +0100 Subject: [PATCH 019/536] Improve test coverage on chunked_json --- test-requirements.txt | 3 ++- tests/influxdb/chunked_json_test.py | 36 +++++++++++++++++++++++++++++ tests/influxdb/client_test.py | 6 ++--- 3 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 tests/influxdb/chunked_json_test.py diff --git a/test-requirements.txt b/test-requirements.txt index 4b80095c..cbc6add3 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,3 +1,4 @@ nose +nose-cov mock -requests-mock +requests-mock \ No newline at end of file diff --git a/tests/influxdb/chunked_json_test.py b/tests/influxdb/chunked_json_test.py new file mode 100644 index 00000000..c4f731b7 --- /dev/null +++ b/tests/influxdb/chunked_json_test.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +import unittest + +from influxdb import chunked_json + + +class TestChunkJson(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super(TestChunkJson, cls).setUpClass() + + def test_load(self): + """ + Tests reading a sequence of JSON values from a string + """ + example_response = \ + '{"results": [{"series": [{"name": "sdfsdfsdf", ' \ + '"columns": ["time", "value"], "values": ' \ + '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \ + '[{"name": "cpu_load_short", "columns": ["time", "value"], ' \ + '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' + + res = list(chunked_json.loads(example_response)) + # import ipdb; ipdb.set_trace() + + # self.assertTrue(res) + self.assertListEqual( + [ + { + 'results': [{'series': [{'values': [['2009-11-10T23:00:00Z', 0.64]], 'name': 'sdfsdfsdf', 'columns': ['time', 'value']}]}, {'series': [{'values': [['2009-11-10T23:00:00Z', 0.64]], 'name': 'cpu_load_short', 'columns': ['time', 'value']}]}] + } + ], + res + ) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index ce1377f5..3c75a616 100755 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -74,7 +74,7 @@ def setUp(self): "region": "us-west" }, "timestamp": "2009-11-10T23:00:00Z", - "values": { + "fields": { "value": 0.64 } } @@ -114,7 +114,7 @@ def test_write(self): "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", - "values": {"value": 0.64}}]} + "fields": {"value": 0.64}}]} ) self.assertEqual( @@ -125,7 +125,7 @@ def test_write(self): "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", - "values": {"value": 0.64}}]} + "fields": {"value": 0.64}}]} ) def test_write_points(self): From 653bc1fc72cde666373f039af99ad91076c1249f Mon Sep 17 00:00:00 2001 From: areski Date: Mon, 9 Mar 2015 20:30:43 +0100 Subject: [PATCH 020/536] fix handle error when trying to import pandas --- examples/tutorial_pandas.py | 2 +- influxdb/dataframe_client.py | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 45cfeb23..6f483e55 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -20,7 +20,7 @@ def main(host='localhost', port=8086): client.create_database(dbname) print("Write DataFrame") - client.write_points({'demo':df}) + client.write_points({'demo': df}) print("Read DataFrame") client.query("select * from demo") diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 850e9ea5..16068dbc 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -7,6 +7,11 @@ from .client import InfluxDBClient +try: + import pandas as pd +except ImportError: + pd = None + class DataFrameClient(InfluxDBClient): """ @@ -17,13 +22,9 @@ class DataFrameClient(InfluxDBClient): def __init__(self, *args, **kwargs): super(DataFrameClient, self).__init__(*args, **kwargs) - try: - global pd - import pandas as pd - except ImportError as ex: + if not pd: raise ImportError( - 'DataFrameClient requires Pandas, "{ex}" problem importing' - .format(ex=str(ex)) + 'DataFrameClient requires Pandas' ) self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') From bfd08785f8c0ce2d4f415ccc59ccd4786e09fc62 Mon Sep 17 00:00:00 2001 From: areski Date: Mon, 9 Mar 2015 23:28:57 +0100 Subject: [PATCH 021/536] PEP8 fix E501 on chunked_json_test --- tests/influxdb/chunked_json_test.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/influxdb/chunked_json_test.py b/tests/influxdb/chunked_json_test.py index c4f731b7..1225a109 100644 --- a/tests/influxdb/chunked_json_test.py +++ b/tests/influxdb/chunked_json_test.py @@ -29,7 +29,17 @@ def test_load(self): self.assertListEqual( [ { - 'results': [{'series': [{'values': [['2009-11-10T23:00:00Z', 0.64]], 'name': 'sdfsdfsdf', 'columns': ['time', 'value']}]}, {'series': [{'values': [['2009-11-10T23:00:00Z', 0.64]], 'name': 'cpu_load_short', 'columns': ['time', 'value']}]}] + 'results': [ + {'series': [{ + 'values': [['2009-11-10T23:00:00Z', 0.64]], + 'name': 'sdfsdfsdf', + 'columns': + ['time', 'value']}]}, + {'series': [{ + 'values': [['2009-11-10T23:00:00Z', 0.64]], + 'name': 'cpu_load_short', + 'columns': ['time', 'value']}]} + ] } ], res From e722a4bdb1003d5d2fe3857f00e66d9654bd8680 Mon Sep 17 00:00:00 2001 From: areski Date: Tue, 10 Mar 2015 01:00:13 +0100 Subject: [PATCH 022/536] remove _query method in dataframe_client and client --- influxdb/client.py | 50 +++++++++++++++--------------------- influxdb/dataframe_client.py | 8 +++--- 2 files changed, 25 insertions(+), 33 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index d5aa1e8a..ba5a1622 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -195,50 +195,39 @@ def query(self, query, params={}, expected_response_code=200, + time_precision='s', database=None, - raw=False): + raw=False, + chunked=False): """ Query data :param params: Additional parameters to be passed to requests. + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. :param database: Database to query, default to None. :param expected_response_code: Expected response code. Defaults to 200. :param raw: Wether or not to return the raw influxdb response. + :param chunked: [Optional, default=False] True if the data shall be + retrieved in chunks, False otherwise. + """ + if time_precision not in ['s', 'm', 'ms', 'u']: + raise Exception( + "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + params['q'] = query if database: params['db'] = database - response = self.request( - url="query", - method='GET', - params=params, - data=None, - expected_response_code=expected_response_code - ) - - if raw: - return response.json() - else: - return self.format_query_response(response.json()) - - def _query(self, query, time_precision='s', chunked=False, - expected_response_code=200): - if time_precision not in ['s', 'm', 'ms', 'u']: - raise Exception( - "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + if time_precision: + params['time_precision'] = time_precision if chunked is True: - chunked_param = 'true' + params['chunked'] = 'true' else: - chunked_param = 'false' - - params = { - 'q': query, - 'time_precision': time_precision, - 'chunked': chunked_param - } + params['chunked'] = 'false' response = self.request( url="query", @@ -248,10 +237,12 @@ def _query(self, query, time_precision='s', chunked=False, expected_response_code=expected_response_code ) - if chunked: + if raw and not chunked: + return response.json() + elif not raw and chunked: return list(chunked_json.loads(response.content.decode())) else: - return response.json() + return self.format_query_response(response.json()) def write_points(self, points, @@ -306,6 +297,7 @@ def _write_points(self, if self.use_udp: self.send_packet(data) else: + import ipdb; ipdb.set_trace() self.write( data=data, expected_response_code=200 diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index d483e133..8b91e613 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -85,10 +85,10 @@ def query(self, query, time_precision='s', chunked=False): retrieved in chunks, False otherwise. """ - result = InfluxDBClient._query(self, - query=query, - time_precision=time_precision, - chunked=chunked) + result = InfluxDBClient.query(self, + query=query, + time_precision=time_precision, + chunked=chunked) if len(result['results'][0]) > 0: return self._to_dataframe(result['results'][0], time_precision) else: From 4654015213f6c13481e8b40ec863c994486e8d42 Mon Sep 17 00:00:00 2001 From: areski Date: Tue, 10 Mar 2015 09:23:18 +0100 Subject: [PATCH 023/536] remove debugging --- influxdb/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index ba5a1622..da409ffb 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -297,7 +297,6 @@ def _write_points(self, if self.use_udp: self.send_packet(data) else: - import ipdb; ipdb.set_trace() self.write( data=data, expected_response_code=200 From 4a320d0024647e7341ce99c5893ac240d47623b4 Mon Sep 17 00:00:00 2001 From: areski Date: Tue, 10 Mar 2015 12:03:38 +0100 Subject: [PATCH 024/536] Add TODO location in Readme --- README.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.rst b/README.rst index 8911bcc6..d24c1dab 100644 --- a/README.rst +++ b/README.rst @@ -154,6 +154,12 @@ problems or submit contributions. .. _Issues: https://github.com/influxdb/influxdb-python/issues +TODO +==== + +The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdb/influxdb-python/issues/109 + + Source code =========== From cdbf1bf4d7f53b1e70db53360fb18fb694cb0761 Mon Sep 17 00:00:00 2001 From: areski Date: Tue, 10 Mar 2015 13:58:24 +0100 Subject: [PATCH 025/536] fix clarity in tutorial Series Helper --- examples/tutorial_serieshelper.py | 42 +++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 11 deletions(-) diff --git a/examples/tutorial_serieshelper.py b/examples/tutorial_serieshelper.py index ed3f155a..d7bd27c9 100644 --- a/examples/tutorial_serieshelper.py +++ b/examples/tutorial_serieshelper.py @@ -1,29 +1,49 @@ +""" +Tutorial/Example how to use the class helper `SeriesHelper` +""" + from influxdb import InfluxDBClient from influxdb import SeriesHelper +# InfluxDB connections settings +host = 'localhost' +port = 8086 +user = 'root' +password = 'root' +dbname = 'mydb' + +myclient = InfluxDBClient(host, port, user, password, dbname) + +# Uncomment the following code if the database is not yet created +# myclient.create_database(dbname) +# myclient.create_retention_policy('awesome_policy', '3d', 3, default=True) + class MySeriesHelper(SeriesHelper): + # Meta class stores time series helper configuration. class Meta: - # Meta class stores time series helper configuration. - client = InfluxDBClient() # The client should be an instance of InfluxDBClient. - series_name = 'events.stats.{server_name}' + client = myclient # The series name must be a string. Add dependent fields/tags in curly brackets. - fields = ['some_stat'] + series_name = 'events.stats.{server_name}' # Defines all the fields in this time series. - tags = ['server_name'] + fields = ['some_stat', 'other_stat'] # Defines all the tags for the series. - bulk_size = 5 + tags = ['server_name'] # Defines the number of data points to store prior to writing on the wire. + bulk_size = 5 + # autocommit must be set to True when using bulk_size + autocommit = True + # The following will create *five* (immutable) data points. # Since bulk_size is set to 5, upon the fifth construction call, *all* data # points will be written on the wire via MySeriesHelper.Meta.client. -MySeriesHelper(server_name='us.east-1', some_stat=159) -MySeriesHelper(server_name='us.east-1', some_stat=158) -MySeriesHelper(server_name='us.east-1', some_stat=157) -MySeriesHelper(server_name='us.east-1', some_stat=156) -MySeriesHelper(server_name='us.east-1', some_stat=155) +MySeriesHelper(server_name='us.east-1', some_stat=159, other_stat=10) +MySeriesHelper(server_name='us.east-1', some_stat=158, other_stat=20) +MySeriesHelper(server_name='us.east-1', some_stat=157, other_stat=30) +MySeriesHelper(server_name='us.east-1', some_stat=156, other_stat=40) +MySeriesHelper(server_name='us.east-1', some_stat=155, other_stat=50) # To manually submit data points which are not yet written, call commit: MySeriesHelper.commit() From d1b8edf6c864b6da93f56b3b54d29d9479e8d601 Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 10 Mar 2015 17:03:00 -0400 Subject: [PATCH 026/536] Removed chunked and time_precision parameters --- influxdb/client.py | 26 ++------------------------ 1 file changed, 2 insertions(+), 24 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index da409ffb..64a3ae4d 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -6,7 +6,6 @@ import socket import requests -from influxdb import chunked_json try: xrange @@ -195,40 +194,21 @@ def query(self, query, params={}, expected_response_code=200, - time_precision='s', database=None, - raw=False, - chunked=False): + raw=False): """ Query data :param params: Additional parameters to be passed to requests. - :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' - or 'u'. :param database: Database to query, default to None. :param expected_response_code: Expected response code. Defaults to 200. :param raw: Wether or not to return the raw influxdb response. - :param chunked: [Optional, default=False] True if the data shall be - retrieved in chunks, False otherwise. - """ - if time_precision not in ['s', 'm', 'ms', 'u']: - raise Exception( - "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") - params['q'] = query if database: params['db'] = database - if time_precision: - params['time_precision'] = time_precision - - if chunked is True: - params['chunked'] = 'true' - else: - params['chunked'] = 'false' - response = self.request( url="query", method='GET', @@ -237,10 +217,8 @@ def query(self, expected_response_code=expected_response_code ) - if raw and not chunked: + if raw: return response.json() - elif not raw and chunked: - return list(chunked_json.loads(response.content.decode())) else: return self.format_query_response(response.json()) From 2fcf3fa7a929dbc84faee87d889042edaeaf5485 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Tr=C3=B6ndle?= Date: Sat, 14 Mar 2015 11:45:46 +0100 Subject: [PATCH 027/536] Test covering multiple time series --- tests/influxdb/dataframe_client_test.py | 40 +++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 469cb1d7..4e4149c2 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -203,6 +203,46 @@ def test_query_into_dataframe(self): result = cli.query('select column_one from foo;') assert_frame_equal(dataframe, result) + def test_query_multiple_time_series(self): + data = [ + { + "name": "series1", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, 323048, 323048, 323048, 0]] + }, + { + "name": "series2", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]] + }, + { + "name": "series3", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, -0.01220, -0.01220, -0.01220, 0]] + } + ] + dataframes = { + 'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']), + 'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']), + 'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']) + } + with _mocked_session('get', 200, data): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query("""select mean(value), min(value), max(value), + stddev(value) from series1, series2, series3""") + assert dataframes.keys() == result.keys() + for key in dataframes.keys(): + assert_frame_equal(dataframes[key], result[key]) + def test_query_with_empty_result(self): with _mocked_session('get', 200, []): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') From d7013fe0a16b3e2e06dd6d09a86e98a3fa51734a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Tr=C3=B6ndle?= Date: Sat, 14 Mar 2015 12:02:53 +0100 Subject: [PATCH 028/536] Multiple time series support --- influxdb/dataframe_client.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 850e9ea5..641b60c1 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -77,7 +77,10 @@ def write_points_with_precision(self, data, time_precision='s'): def query(self, query, time_precision='s', chunked=False): """ - Quering data into a DataFrame. + Quering data into DataFrames. + + Returns a DataFrame for a single time series and a map for multiple + time series with the time series as value and its name as key. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. @@ -88,10 +91,14 @@ def query(self, query, time_precision='s', chunked=False): result = InfluxDBClient.query(self, query=query, time_precision=time_precision, chunked=chunked) - if len(result) > 0: + if len(result) == 0: + return result + elif len(result) == 1: return self._to_dataframe(result[0], time_precision) else: - return result + return {time_series['name']: self._to_dataframe(time_series, + time_precision) + for time_series in result} def _to_dataframe(self, json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], From 7b5b4305e69beffe29a083056fe6146a1cdb0977 Mon Sep 17 00:00:00 2001 From: aviau Date: Sat, 14 Mar 2015 17:25:02 -0400 Subject: [PATCH 029/536] Now catch requests connection errors Conflicts: influxdb/client.py tests/influxdb/client_test.py --- influxdb/client.py | 29 +++++++++++++------- tests/influxdb/client_test.py | 50 +++++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 9 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 64a3ae4d..8f4fa6e3 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -5,6 +5,7 @@ import json import socket import requests +import requests.exceptions try: @@ -164,15 +165,25 @@ def request(self, url, method='GET', params=None, data=None, if data is not None and not isinstance(data, str): data = json.dumps(data) - response = session.request( - method=method, - url=url, - params=params, - data=data, - headers=self._headers, - verify=self._verify_ssl, - timeout=self._timeout - ) + # Try to send the request a maximum of three times. (see #103) + # TODO (aviau): Make this configurable. + for i in range(0, 3): + try: + response = session.request( + method=method, + url=url, + params=params, + data=data, + headers=self._headers, + verify=self._verify_ssl, + timeout=self._timeout + ) + break + except requests.exceptions.ConnectionError as e: + if i < 2: + continue + else: + raise e if response.status_code == expected_response_code: return response diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 3c75a616..be5705c8 100755 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -4,12 +4,14 @@ """ import json import requests +import requests.exceptions import socket import unittest import requests_mock from nose.tools import raises from mock import patch import warnings +import mock from influxdb import InfluxDBClient from influxdb.client import session @@ -426,3 +428,51 @@ def test_get_list_retention_policies(self): [{'duration': '24h0m0s', 'name': 'fsfdsdf', 'replicaN': 2}] ) + + @mock.patch('requests.Session.request') + def test_request_retry(self, mock_request): + """Tests that two connection errors will be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 3: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points + ) + + @mock.patch('requests.Session.request') + def test_request_retry(self, mock_request): + """Tests that three connection errors will not be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 4: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + + with self.assertRaises(requests.exceptions.ConnectionError): + cli.write_points(self.dummy_points) From 438aacbd32aec266e9517c1e233088a13f0dc521 Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 17 Mar 2015 19:59:15 -0400 Subject: [PATCH 030/536] Include database in all queries (Closes #127) --- influxdb/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 8f4fa6e3..1997d2be 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -217,8 +217,7 @@ def query(self, """ params['q'] = query - if database: - params['db'] = database + params['db'] = database or self._database response = self.request( url="query", From e0a351fc3f0f3af12b508b8c2d7a70723ac04d1d Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 17 Mar 2015 20:05:36 -0400 Subject: [PATCH 031/536] Fixed tests with same name --- tests/influxdb/client_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index be5705c8..7b73e63f 100755 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -454,7 +454,7 @@ def connection_error(self, *args, **kwargs): ) @mock.patch('requests.Session.request') - def test_request_retry(self, mock_request): + def test_request_retry_raises(self, mock_request): """Tests that three connection errors will not be handled""" class CustomMock(object): From 1e890a44ba7d843a140788967295691be0006301 Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 17 Mar 2015 23:08:31 -0400 Subject: [PATCH 032/536] Bumped to 0.4.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index adb789b2..26dbae8a 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '0.3.1' +__version__ = '0.4.0' From 5472d413c805821c3385c8644933d2f4dcbf81cf Mon Sep 17 00:00:00 2001 From: aviau Date: Sat, 14 Mar 2015 17:25:02 -0400 Subject: [PATCH 033/536] Now catch requests connection errors --- influxdb/client.py | 29 +++++++++++++------- tests/influxdb/client_test.py | 50 +++++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 9 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 6fcae994..f17921b7 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -5,6 +5,7 @@ import json import socket import requests +import requests.exceptions import warnings from influxdb import chunked_json @@ -158,15 +159,25 @@ def request(self, url, method='GET', params=None, data=None, if data is not None and not isinstance(data, str): data = json.dumps(data) - response = session.request( - method=method, - url=url, - params=params, - data=data, - headers=self._headers, - verify=self._verify_ssl, - timeout=self._timeout - ) + # Try to send the request a maximum of three times. (see #103) + # TODO (aviau): Make this configurable. + for i in range(0, 3): + try: + response = session.request( + method=method, + url=url, + params=params, + data=data, + headers=self._headers, + verify=self._verify_ssl, + timeout=self._timeout + ) + break + except requests.exceptions.ConnectionError as e: + if i < 2: + continue + else: + raise e if response.status_code == expected_response_code: return response diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 8cec9657..071fc287 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -4,12 +4,14 @@ """ import json import requests +import requests.exceptions import socket import unittest import requests_mock from nose.tools import raises from mock import patch import warnings +import mock from influxdb import InfluxDBClient from influxdb.client import session @@ -640,3 +642,51 @@ def test_delete_database_user(self): def test_update_permission(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_permission('admin', []) + + @mock.patch('requests.Session.request') + def test_request_retry(self, mock_request): + """Tests that two connection errors will be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 3: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points + ) + + @mock.patch('requests.Session.request') + def test_request_retry_raises(self, mock_request): + """Tests that three connection errors will not be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 4: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + + with self.assertRaises(requests.exceptions.ConnectionError): + cli.write_points(self.dummy_points) From 038af774ed8ab8c951dacdc588a9d22ba8c7a70e Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 18 Mar 2015 15:33:58 -0400 Subject: [PATCH 034/536] Bumped to 0.4.1 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 26dbae8a..6685a915 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '0.4.0' +__version__ = '0.4.1' From 2fbd6e039fcb6cbae496944234a5c3d64922b25b Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 19 Mar 2015 11:37:44 -0400 Subject: [PATCH 035/536] README: Now merged 0.9.0 branch --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index d24c1dab..bf307dbb 100644 --- a/README.rst +++ b/README.rst @@ -36,14 +36,14 @@ InfluxDB is an open-source distributed time series database, find more about Inf .. _installation: -InfluxDB v0.9.0 -=============== +InfluxDB < 0.9.0 +================ -InfluxDB v0.9.0 brings many changes to the influxDB api. v0.9.0 users may use the ``0.9.0_support`` branch. Keep in mind that this is a development branch and may break. When v0.9.0 is released, we will merge the ``0.9.0_support`` branch to master and push it to pypi. +This library only supports InfluxDB>=0.9.0. Users of previous versions of InfluxDB may use the influxdb_0.8 branch. You may install it from pip with the following command:: - $ pip install https://github.com/influxdb/influxdb-python/archive/0.9.0_support.zip + $ pip install https://github.com/influxdb/influxdb-python/archive/influxdb_0.8.zip Installation ============ From 3e6db3ef19b3890a1bc45ea815ed75361478b2a4 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 19 Mar 2015 12:17:33 -0400 Subject: [PATCH 036/536] Bring back 0.8.x support --- README.rst | 10 +- influxdb/influxdb08/__init__.py | 11 + influxdb/influxdb08/chunked_json.py | 21 + influxdb/influxdb08/client.py | 786 ++++++++++++++++++ influxdb/influxdb08/dataframe_client.py | 146 ++++ influxdb/influxdb08/helper.py | 150 ++++ tests/influxdb/influxdb08/__init__.py | 1 + tests/influxdb/influxdb08/client_test.py | 692 +++++++++++++++ .../influxdb08/dataframe_client_test.py | 288 +++++++ tests/influxdb/influxdb08/helper_test.py | 194 +++++ 10 files changed, 2292 insertions(+), 7 deletions(-) create mode 100644 influxdb/influxdb08/__init__.py create mode 100644 influxdb/influxdb08/chunked_json.py create mode 100644 influxdb/influxdb08/client.py create mode 100644 influxdb/influxdb08/dataframe_client.py create mode 100644 influxdb/influxdb08/helper.py create mode 100644 tests/influxdb/influxdb08/__init__.py create mode 100644 tests/influxdb/influxdb08/client_test.py create mode 100644 tests/influxdb/influxdb08/dataframe_client_test.py create mode 100644 tests/influxdb/influxdb08/helper_test.py diff --git a/README.rst b/README.rst index bf307dbb..5afd83c5 100644 --- a/README.rst +++ b/README.rst @@ -36,14 +36,10 @@ InfluxDB is an open-source distributed time series database, find more about Inf .. _installation: -InfluxDB < 0.9.0 -================ +InfluxDB v0.8.X users +===================== -This library only supports InfluxDB>=0.9.0. Users of previous versions of InfluxDB may use the influxdb_0.8 branch. - -You may install it from pip with the following command:: - - $ pip install https://github.com/influxdb/influxdb-python/archive/influxdb_0.8.zip +Influxdb >=0.9.0 brings many breaking changes to the API. InfluxDB 0.8.X users may use the legacy client by using ```from influxdb.influxdb08 import InfluxDBClient``` instead. Installation ============ diff --git a/influxdb/influxdb08/__init__.py b/influxdb/influxdb08/__init__.py new file mode 100644 index 00000000..6ba218e3 --- /dev/null +++ b/influxdb/influxdb08/__init__.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +from .client import InfluxDBClient +from .dataframe_client import DataFrameClient +from .helper import SeriesHelper + + +__all__ = [ + 'InfluxDBClient', + 'DataFrameClient', + 'SeriesHelper', +] diff --git a/influxdb/influxdb08/chunked_json.py b/influxdb/influxdb08/chunked_json.py new file mode 100644 index 00000000..50d304f1 --- /dev/null +++ b/influxdb/influxdb08/chunked_json.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +# +# Author: Adrian Sampson +# Source: https://gist.github.com/sampsyo/920215 +# + +import json + +_decoder = json.JSONDecoder() + + +def loads(s): + """A generator reading a sequence of JSON values from a string.""" + while s: + s = s.strip() + obj, pos = _decoder.raw_decode(s) + if not pos: + raise ValueError('no JSON object found at %i' % pos) + yield obj + s = s[pos:] diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py new file mode 100644 index 00000000..f17921b7 --- /dev/null +++ b/influxdb/influxdb08/client.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +""" +Python client for InfluxDB +""" +import json +import socket +import requests +import requests.exceptions +import warnings + +from influxdb import chunked_json + +try: + xrange +except NameError: + xrange = range + +session = requests.Session() + + +class InfluxDBClientError(Exception): + "Raised when an error occurs in the request" + def __init__(self, content, code): + super(InfluxDBClientError, self).__init__( + "{0}: {1}".format(code, content)) + self.content = content + self.code = code + + +class InfluxDBClient(object): + + """ + The ``InfluxDBClient`` object holds information necessary to connect + to InfluxDB. Requests can be made to InfluxDB directly through the client. + + :param host: hostname to connect to InfluxDB, defaults to 'localhost' + :type host: string + :param port: port to connect to InfluxDB, defaults to 'localhost' + :type port: int + :param username: user to connect, defaults to 'root' + :type username: string + :param password: password of the user, defaults to 'root' + :type password: string + :param database: database name to connect to, defaults is None + :type database: string + :param ssl: use https instead of http to connect to InfluxDB, defaults is + False + :type ssl: boolean + :param verify_ssl: verify SSL certificates for HTTPS requests, defaults is + False + :type verify_ssl: boolean + :param timeout: number of seconds Requests will wait for your client to + establish a connection, defaults to None + :type timeout: int + :param use_udp: use UDP to connect to InfluxDB, defaults is False + :type use_udp: int + :param udp_port: UDP port to connect to InfluxDB, defaults is 4444 + :type udp_port: int + """ + + def __init__(self, + host='localhost', + port=8086, + username='root', + password='root', + database=None, + ssl=False, + verify_ssl=False, + timeout=None, + use_udp=False, + udp_port=4444): + """ + Construct a new InfluxDBClient object. + """ + self._host = host + self._port = port + self._username = username + self._password = password + self._database = database + self._timeout = timeout + + self._verify_ssl = verify_ssl + + self.use_udp = use_udp + self.udp_port = udp_port + if use_udp: + self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + self._scheme = "http" + + if ssl is True: + self._scheme = "https" + + self._baseurl = "{0}://{1}:{2}".format( + self._scheme, + self._host, + self._port) + + self._headers = { + 'Content-type': 'application/json', + 'Accept': 'text/plain'} + + # Change member variables + + def switch_database(self, database): + """ + switch_database() + + Change client database. + + :param database: the new database name to switch to + :type database: string + """ + self._database = database + + def switch_db(self, database): + """ + DEPRECATED. Change client database. + + """ + warnings.warn( + "switch_db is deprecated, and will be removed " + "in future versions. Please use " + "``InfluxDBClient.switch_database(database)`` instead.", + FutureWarning) + return self.switch_database(database) + + def switch_user(self, username, password): + """ + switch_user() + + Change client username. + + :param username: the new username to switch to + :type username: string + :param password: the new password to switch to + :type password: string + """ + self._username = username + self._password = password + + def request(self, url, method='GET', params=None, data=None, + expected_response_code=200): + """ + Make a http request to API + """ + url = "{0}/{1}".format(self._baseurl, url) + + if params is None: + params = {} + + auth = { + 'u': self._username, + 'p': self._password + } + + params.update(auth) + + if data is not None and not isinstance(data, str): + data = json.dumps(data) + + # Try to send the request a maximum of three times. (see #103) + # TODO (aviau): Make this configurable. + for i in range(0, 3): + try: + response = session.request( + method=method, + url=url, + params=params, + data=data, + headers=self._headers, + verify=self._verify_ssl, + timeout=self._timeout + ) + break + except requests.exceptions.ConnectionError as e: + if i < 2: + continue + else: + raise e + + if response.status_code == expected_response_code: + return response + else: + raise InfluxDBClientError(response.content, response.status_code) + + def write(self, data): + """ Provided as convenience for influxdb v0.9.0, this may change. """ + self.request( + url="write", + method='POST', + params=None, + data=data, + expected_response_code=200 + ) + return True + + # Writing Data + # + # Assuming you have a database named foo_production you can write data + # by doing a POST to /db/foo_production/series?u=some_user&p=some_password + # with a JSON body of points. + + def write_points(self, data, time_precision='s', *args, **kwargs): + """ + Write to multiple time series names. + + :param data: A list of dicts. + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param batch_size: [Optional] Value to write the points in batches + instead of all at one time. Useful for when doing data dumps from + one database to another or when doing a massive write operation + :type batch_size: int + """ + + def list_chunks(l, n): + """ Yield successive n-sized chunks from l. + """ + for i in xrange(0, len(l), n): + yield l[i:i + n] + + batch_size = kwargs.get('batch_size') + if batch_size: + for item in data: + name = item.get('name') + columns = item.get('columns') + point_list = item.get('points') + + for batch in list_chunks(point_list, batch_size): + item = [{ + "points": batch, + "name": name, + "columns": columns + }] + self._write_points( + data=item, + time_precision=time_precision) + + return True + + return self._write_points(data=data, time_precision=time_precision) + + def write_points_with_precision(self, data, time_precision='s'): + """ + DEPRECATED. Write to multiple time series names + + """ + warnings.warn( + "write_points_with_precision is deprecated, and will be removed " + "in future versions. Please use " + "``InfluxDBClient.write_points(time_precision='..')`` instead.", + FutureWarning) + return self._write_points(data=data, time_precision=time_precision) + + def _write_points(self, data, time_precision): + if time_precision not in ['s', 'm', 'ms', 'u']: + raise Exception( + "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + + if self.use_udp and time_precision != 's': + raise Exception( + "InfluxDB only supports seconds precision for udp writes" + ) + + url = "db/{0}/series".format(self._database) + + params = { + 'time_precision': time_precision + } + + if self.use_udp: + self.send_packet(data) + else: + self.request( + url=url, + method='POST', + params=params, + data=data, + expected_response_code=200 + ) + + return True + + # One Time Deletes + + def delete_points(self, name): + """ + Delete an entire series + """ + url = "db/{0}/series/{1}".format(self._database, name) + + self.request( + url=url, + method='DELETE', + expected_response_code=204 + ) + + return True + + # Regularly Scheduled Deletes + + def create_scheduled_delete(self, json_body): + """ + TODO: Create scheduled delete + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + # get list of deletes + # curl http://localhost:8086/db/site_dev/scheduled_deletes + # + # remove a regularly scheduled delete + # curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id + + def get_list_scheduled_delete(self): + """ + TODO: Get list of scheduled deletes + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def remove_scheduled_delete(self, delete_id): + """ + TODO: Remove scheduled delete + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def query(self, query, time_precision='s', chunked=False): + """ + Quering data + + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param chunked: [Optional, default=False] True if the data shall be + retrieved in chunks, False otherwise. + """ + return self._query(query, time_precision=time_precision, + chunked=chunked) + + # Querying Data + # + # GET db/:name/series. It takes five parameters + def _query(self, query, time_precision='s', chunked=False): + if time_precision not in ['s', 'm', 'ms', 'u']: + raise Exception( + "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") + + if chunked is True: + chunked_param = 'true' + else: + chunked_param = 'false' + + # Build the URL of the serie to query + url = "db/{0}/series".format(self._database) + + params = { + 'q': query, + 'time_precision': time_precision, + 'chunked': chunked_param + } + + response = self.request( + url=url, + method='GET', + params=params, + expected_response_code=200 + ) + + if chunked: + return list(chunked_json.loads(response.content.decode())) + else: + return response.json() + + # Creating and Dropping Databases + # + # ### create a database + # curl -X POST http://localhost:8086/db -d '{"name": "site_development"}' + # + # ### drop a database + # curl -X DELETE http://localhost:8086/db/site_development + + def create_database(self, database): + """ + create_database() + + Create a database on the InfluxDB server. + + :param database: the name of the database to create + :type database: string + :rtype: boolean + """ + url = "db" + + data = {'name': database} + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=201 + ) + + return True + + def delete_database(self, database): + """ + delete_database() + + Drop a database on the InfluxDB server. + + :param database: the name of the database to delete + :type database: string + :rtype: boolean + """ + url = "db/{0}".format(database) + + self.request( + url=url, + method='DELETE', + expected_response_code=204 + ) + + return True + + # ### get list of databases + # curl -X GET http://localhost:8086/db + + def get_list_database(self): + """ + Get the list of databases + """ + url = "db" + + response = self.request( + url=url, + method='GET', + expected_response_code=200 + ) + + return response.json() + + def get_database_list(self): + """ + DEPRECATED. Get the list of databases + + """ + warnings.warn( + "get_database_list is deprecated, and will be removed " + "in future versions. Please use " + "``InfluxDBClient.get_list_database`` instead.", + FutureWarning) + return self.get_list_database() + + def delete_series(self, series): + """ + delete_series() + + Drop a series on the InfluxDB server. + + :param series: the name of the series to delete + :type series: string + :rtype: boolean + """ + url = "db/{0}/series/{1}".format( + self._database, + series + ) + + self.request( + url=url, + method='DELETE', + expected_response_code=204 + ) + + return True + + def get_list_series(self): + """ + Get a list of all time series in a database + """ + + response = self._query('list series') + + series_list = [] + for series in response[0]['points']: + series_list.append(series[1]) + + return series_list + + def get_list_continuous_queries(self): + """ + Get a list of continuous queries + """ + + response = self._query('list continuous queries') + queries_list = [] + for query in response[0]['points']: + queries_list.append(query[2]) + + return queries_list + + # Security + # get list of cluster admins + # curl http://localhost:8086/cluster_admins?u=root&p=root + + # add cluster admin + # curl -X POST http://localhost:8086/cluster_admins?u=root&p=root \ + # -d '{"name": "paul", "password": "i write teh docz"}' + + # update cluster admin password + # curl -X POST http://localhost:8086/cluster_admins/paul?u=root&p=root \ + # -d '{"password": "new pass"}' + + # delete cluster admin + # curl -X DELETE http://localhost:8086/cluster_admins/paul?u=root&p=root + + # Database admins, with a database name of site_dev + # get list of database admins + # curl http://localhost:8086/db/site_dev/admins?u=root&p=root + + # add database admin + # curl -X POST http://localhost:8086/db/site_dev/admins?u=root&p=root \ + # -d '{"name": "paul", "password": "i write teh docz"}' + + # update database admin password + # curl -X POST http://localhost:8086/db/site_dev/admins/paul?u=root&p=root\ + # -d '{"password": "new pass"}' + + # delete database admin + # curl -X DELETE \ + # http://localhost:8086/db/site_dev/admins/paul?u=root&p=root + + def get_list_cluster_admins(self): + """ + Get list of cluster admins + """ + response = self.request( + url="cluster_admins", + method='GET', + expected_response_code=200 + ) + + return response.json() + + def add_cluster_admin(self, new_username, new_password): + """ + Add cluster admin + """ + data = { + 'name': new_username, + 'password': new_password + } + + self.request( + url="cluster_admins", + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def update_cluster_admin_password(self, username, new_password): + """ + Update cluster admin password + """ + url = "cluster_admins/{0}".format(username) + + data = { + 'password': new_password + } + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def delete_cluster_admin(self, username): + """ + Delete cluster admin + """ + url = "cluster_admins/{0}".format(username) + + self.request( + url=url, + method='DELETE', + expected_response_code=200 + ) + + return True + + def set_database_admin(self, username): + """ + Set user as database admin + """ + return self.alter_database_admin(username, True) + + def unset_database_admin(self, username): + """ + Unset user as database admin + """ + return self.alter_database_admin(username, False) + + def alter_database_admin(self, username, is_admin): + url = "db/{0}/users/{1}".format(self._database, username) + + data = {'admin': is_admin} + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def get_list_database_admins(self): + """ + TODO: Get list of database admins + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def add_database_admin(self, new_username, new_password): + """ + TODO: Add cluster admin + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def update_database_admin_password(self, username, new_password): + """ + TODO: Update database admin password + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def delete_database_admin(self, username): + """ + TODO: Delete database admin + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + ### + # Limiting User Access + + # Database users + # get list of database users + # curl http://localhost:8086/db/site_dev/users?u=root&p=root + + # add database user + # curl -X POST http://localhost:8086/db/site_dev/users?u=root&p=root \ + # -d '{"name": "paul", "password": "i write teh docz"}' + + # update database user password + # curl -X POST http://localhost:8086/db/site_dev/users/paul?u=root&p=root \ + # -d '{"password": "new pass"}' + + # delete database user + # curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root + + def get_database_users(self): + """ + Get list of database users + """ + url = "db/{0}/users".format(self._database) + + response = self.request( + url=url, + method='GET', + expected_response_code=200 + ) + + return response.json() + + def add_database_user(self, new_username, new_password, permissions=None): + """ + Add database user + + :param permissions: A ``(readFrom, writeTo)`` tuple + """ + url = "db/{0}/users".format(self._database) + + data = { + 'name': new_username, + 'password': new_password + } + + if permissions: + try: + data['readFrom'], data['writeTo'] = permissions + except (ValueError, TypeError): + raise TypeError( + "'permissions' must be (readFrom, writeTo) tuple" + ) + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + return True + + def update_database_user_password(self, username, new_password): + """ + Update password + """ + url = "db/{0}/users/{1}".format(self._database, username) + + data = { + 'password': new_password + } + + self.request( + url=url, + method='POST', + data=data, + expected_response_code=200 + ) + + if username == self._username: + self._password = new_password + + return True + + def delete_database_user(self, username): + """ + Delete database user + """ + url = "db/{0}/users/{1}".format(self._database, username) + + self.request( + url=url, + method='DELETE', + expected_response_code=200 + ) + + return True + + # update the user by POSTing to db/site_dev/users/paul + + def update_permission(self, username, json_body): + """ + TODO: Update read/write permission + + 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, + but it is documented in http://influxdb.org/docs/api/http.html. + See also: src/api/http/api.go:l57 + """ + raise NotImplementedError() + + def send_packet(self, packet): + data = json.dumps(packet) + byte = data.encode('utf-8') + self.udp_socket.sendto(byte, (self._host, self.udp_port)) diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py new file mode 100644 index 00000000..641b60c1 --- /dev/null +++ b/influxdb/influxdb08/dataframe_client.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +""" +DataFrame client for InfluxDB +""" +import math +import warnings + +from .client import InfluxDBClient + + +class DataFrameClient(InfluxDBClient): + """ + The ``DataFrameClient`` object holds information necessary to connect + to InfluxDB. Requests can be made to InfluxDB directly through the client. + The client reads and writes from pandas DataFrames. + """ + + def __init__(self, *args, **kwargs): + super(DataFrameClient, self).__init__(*args, **kwargs) + try: + global pd + import pandas as pd + except ImportError as ex: + raise ImportError( + 'DataFrameClient requires Pandas, "{ex}" problem importing' + .format(ex=str(ex)) + ) + + self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') + + def write_points(self, data, *args, **kwargs): + """ + Write to multiple time series names. + + :param data: A dictionary mapping series names to pandas DataFrames + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param batch_size: [Optional] Value to write the points in batches + instead of all at one time. Useful for when doing data dumps from + one database to another or when doing a massive write operation + :type batch_size: int + """ + + batch_size = kwargs.get('batch_size') + time_precision = kwargs.get('time_precision', 's') + if batch_size: + kwargs.pop('batch_size') # don't hand over to InfluxDBClient + for key, data_frame in data.items(): + number_batches = int(math.ceil( + len(data_frame) / float(batch_size))) + for batch in range(number_batches): + start_index = batch * batch_size + end_index = (batch + 1) * batch_size + data = [self._convert_dataframe_to_json( + name=key, + dataframe=data_frame.ix[start_index:end_index].copy(), + time_precision=time_precision)] + InfluxDBClient.write_points(self, data, *args, **kwargs) + return True + else: + data = [self._convert_dataframe_to_json( + name=key, dataframe=dataframe, time_precision=time_precision) + for key, dataframe in data.items()] + return InfluxDBClient.write_points(self, data, *args, **kwargs) + + def write_points_with_precision(self, data, time_precision='s'): + """ + DEPRECATED. Write to multiple time series names + + """ + warnings.warn( + "write_points_with_precision is deprecated, and will be removed " + "in future versions. Please use " + "``DataFrameClient.write_points(time_precision='..')`` instead.", + FutureWarning) + return self.write_points(data, time_precision='s') + + def query(self, query, time_precision='s', chunked=False): + """ + Quering data into DataFrames. + + Returns a DataFrame for a single time series and a map for multiple + time series with the time series as value and its name as key. + + :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' + or 'u'. + :param chunked: [Optional, default=False] True if the data shall be + retrieved in chunks, False otherwise. + + """ + result = InfluxDBClient.query(self, query=query, + time_precision=time_precision, + chunked=chunked) + if len(result) == 0: + return result + elif len(result) == 1: + return self._to_dataframe(result[0], time_precision) + else: + return {time_series['name']: self._to_dataframe(time_series, + time_precision) + for time_series in result} + + def _to_dataframe(self, json_result, time_precision): + dataframe = pd.DataFrame(data=json_result['points'], + columns=json_result['columns']) + if 'sequence_number' in dataframe.keys(): + dataframe.sort(['time', 'sequence_number'], inplace=True) + else: + dataframe.sort(['time'], inplace=True) + pandas_time_unit = time_precision + if time_precision == 'm': + pandas_time_unit = 'ms' + elif time_precision == 'u': + pandas_time_unit = 'us' + dataframe.index = pd.to_datetime(list(dataframe['time']), + unit=pandas_time_unit, + utc=True) + del dataframe['time'] + return dataframe + + def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): + if not isinstance(dataframe, pd.DataFrame): + raise TypeError('Must be DataFrame, but type was: {}.' + .format(type(dataframe))) + if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or + isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): + raise TypeError('Must be DataFrame with DatetimeIndex or \ + PeriodIndex.') + dataframe.index = dataframe.index.to_datetime() + if dataframe.index.tzinfo is None: + dataframe.index = dataframe.index.tz_localize('UTC') + dataframe['time'] = [self._datetime_to_epoch(dt, time_precision) + for dt in dataframe.index] + data = {'name': name, + 'columns': [str(column) for column in dataframe.columns], + 'points': list([list(x) for x in dataframe.values])} + return data + + def _datetime_to_epoch(self, datetime, time_precision='s'): + seconds = (datetime - self.EPOCH).total_seconds() + if time_precision == 's': + return seconds + elif time_precision == 'm' or time_precision == 'ms': + return seconds * 1000 + elif time_precision == 'u': + return seconds * 1000000 diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py new file mode 100644 index 00000000..b2f8f8bb --- /dev/null +++ b/influxdb/influxdb08/helper.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +""" +Helper class for InfluxDB +""" +from collections import namedtuple, defaultdict +from warnings import warn + +import six + + +class SeriesHelper(object): + + """ + Subclassing this helper eases writing data points in bulk. + All data points are immutable, insuring they do not get overwritten. + Each subclass can write to its own database. + The time series names can also be based on one or more defined fields. + + Annotated example:: + + class MySeriesHelper(SeriesHelper): + class Meta: + # Meta class stores time series helper configuration. + series_name = 'events.stats.{server_name}' + # Series name must be a string, curly brackets for dynamic use. + fields = ['time', 'server_name'] + # Defines all the fields in this time series. + ### Following attributes are optional. ### + client = TestSeriesHelper.client + # Client should be an instance of InfluxDBClient. + :warning: Only used if autocommit is True. + bulk_size = 5 + # Defines the number of data points to write simultaneously. + # Only applicable if autocommit is True. + autocommit = True + # If True and no bulk_size, then will set bulk_size to 1. + + """ + __initialized__ = False + + def __new__(cls, *args, **kwargs): + """ + Initializes class attributes for subsequent constructor calls. + + :note: *args and **kwargs are not explicitly used in this function, + but needed for Python 2 compatibility. + """ + if not cls.__initialized__: + cls.__initialized__ = True + try: + _meta = getattr(cls, 'Meta') + except AttributeError: + raise AttributeError( + 'Missing Meta class in {}.'.format( + cls.__name__)) + + for attr in ['series_name', 'fields']: + try: + setattr(cls, '_' + attr, getattr(_meta, attr)) + except AttributeError: + raise AttributeError( + 'Missing {} in {} Meta class.'.format( + attr, + cls.__name__)) + + cls._autocommit = getattr(_meta, 'autocommit', False) + + cls._client = getattr(_meta, 'client', None) + if cls._autocommit and not cls._client: + raise AttributeError( + 'In {}, autocommit is set to True, but no client is set.' + .format(cls.__name__)) + + try: + cls._bulk_size = getattr(_meta, 'bulk_size') + if cls._bulk_size < 1 and cls._autocommit: + warn( + 'Definition of bulk_size in {} forced to 1, ' + 'was less than 1.'.format(cls.__name__)) + cls._bulk_size = 1 + except AttributeError: + cls._bulk_size = -1 + else: + if not cls._autocommit: + warn( + 'Definition of bulk_size in {} has no affect because' + ' autocommit is false.'.format(cls.__name__)) + + cls._datapoints = defaultdict(list) + cls._type = namedtuple(cls.__name__, cls._fields) + + return super(SeriesHelper, cls).__new__(cls) + + def __init__(self, **kw): + """ + Constructor call creates a new data point. All fields must be present. + + :note: Data points written when `bulk_size` is reached per Helper. + :warning: Data points are *immutable* (`namedtuples`). + """ + cls = self.__class__ + + if sorted(cls._fields) != sorted(kw.keys()): + raise NameError( + 'Expected {0}, got {1}.'.format( + cls._fields, + kw.keys())) + + cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw)) + + if cls._autocommit and \ + sum(len(series) for series in cls._datapoints.values()) \ + >= cls._bulk_size: + cls.commit() + + @classmethod + def commit(cls, client=None): + """ + Commit everything from datapoints via the client. + + :param client: InfluxDBClient instance for writing points to InfluxDB. + :attention: any provided client will supersede the class client. + :return: result of client.write_points. + """ + if not client: + client = cls._client + rtn = client.write_points(cls._json_body_()) + cls._reset_() + return rtn + + @classmethod + def _json_body_(cls): + """ + :return: JSON body of these datapoints. + """ + json = [] + for series_name, data in six.iteritems(cls._datapoints): + json.append({'name': series_name, + 'columns': cls._fields, + 'points': [[point.__dict__[k] for k in cls._fields] + for point in data] + }) + return json + + @classmethod + def _reset_(cls): + """ + Reset data storage. + """ + cls._datapoints = defaultdict(list) diff --git a/tests/influxdb/influxdb08/__init__.py b/tests/influxdb/influxdb08/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/tests/influxdb/influxdb08/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py new file mode 100644 index 00000000..f5631388 --- /dev/null +++ b/tests/influxdb/influxdb08/client_test.py @@ -0,0 +1,692 @@ +# -*- coding: utf-8 -*- +""" +unit tests +""" +import json +import requests +import requests.exceptions +import socket +import unittest +import requests_mock +from nose.tools import raises +from mock import patch +import warnings +import mock + +from influxdb.influxdb08 import InfluxDBClient +from influxdb.influxdb08.client import session + + +def _build_response_object(status_code=200, content=""): + resp = requests.Response() + resp.status_code = status_code + resp._content = content.encode("utf8") + return resp + + +def _mocked_session(method="GET", status_code=200, content=""): + + method = method.upper() + + def request(*args, **kwargs): + c = content + + # Check method + assert method == kwargs.get('method', 'GET') + + if method == 'POST': + data = kwargs.get('data', None) + + if data is not None: + # Data must be a string + assert isinstance(data, str) + + # Data must be a JSON string + assert c == json.loads(data, strict=True) + + c = data + + # Anyway, Content must be a JSON string (or empty string) + if not isinstance(c, str): + c = json.dumps(c) + + return _build_response_object(status_code=status_code, content=c) + + mocked = patch.object( + session, + 'request', + side_effect=request + ) + + return mocked + + +class TestInfluxDBClient(unittest.TestCase): + + def setUp(self): + # By default, raise exceptions on warnings + warnings.simplefilter('error', FutureWarning) + + self.dummy_points = [ + { + "points": [ + ["1", 1, 1.0], + ["2", 2, 2.0] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three"] + } + ] + + def test_scheme(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + assert cli._baseurl == 'http://host:8086' + + cli = InfluxDBClient( + 'host', 8086, 'username', 'password', 'database', ssl=True + ) + assert cli._baseurl == 'https://host:8086' + + def test_switch_database(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + cli.switch_database('another_database') + assert cli._database == 'another_database' + + @raises(FutureWarning) + def test_switch_db_deprecated(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + cli.switch_db('another_database') + assert cli._database == 'another_database' + + def test_switch_user(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') + cli.switch_user('another_username', 'another_password') + assert cli._username == 'another_username' + assert cli._password == 'another_password' + + def test_write(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write" + ) + cli = InfluxDBClient(database='db') + cli.write( + {"database": "mydb", + "retentionPolicy": "mypolicy", + "points": [{"name": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "timestamp": "2009-11-10T23:00:00Z", + "values": {"value": 0.64}}]} + ) + + self.assertEqual( + json.loads(m.last_request.body), + {"database": "mydb", + "retentionPolicy": "mypolicy", + "points": [{"name": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "timestamp": "2009-11-10T23:00:00Z", + "values": {"value": 0.64}}]} + ) + + def test_write_points(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/series" + ) + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points + ) + + self.assertListEqual( + json.loads(m.last_request.body), + self.dummy_points + ) + + def test_write_points_string(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/series" + ) + + cli = InfluxDBClient(database='db') + cli.write_points( + str(json.dumps(self.dummy_points)) + ) + + self.assertListEqual( + json.loads(m.last_request.body), + self.dummy_points + ) + + def test_write_points_batch(self): + with _mocked_session('post', 200, self.dummy_points): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.write_points( + data=self.dummy_points, + batch_size=2 + ) is True + + def test_write_points_udp(self): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.bind(('0.0.0.0', 4444)) + + cli = InfluxDBClient( + 'localhost', 8086, 'root', 'root', + 'test', use_udp=True, udp_port=4444 + ) + cli.write_points(self.dummy_points) + + received_data, addr = s.recvfrom(1024) + + assert self.dummy_points == \ + json.loads(received_data.decode(), strict=True) + + def test_write_bad_precision_udp(self): + cli = InfluxDBClient( + 'localhost', 8086, 'root', 'root', + 'test', use_udp=True, udp_port=4444 + ) + + with self.assertRaisesRegexp( + Exception, + "InfluxDB only supports seconds precision for udp writes" + ): + cli.write_points( + self.dummy_points, + time_precision='ms' + ) + + @raises(Exception) + def test_write_points_fails(self): + with _mocked_session('post', 500): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.write_points([]) + + def test_write_points_with_precision(self): + with _mocked_session('post', 200, self.dummy_points): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.write_points(self.dummy_points) is True + + def test_write_points_bad_precision(self): + cli = InfluxDBClient() + with self.assertRaisesRegexp( + Exception, + "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" + ): + cli.write_points( + self.dummy_points, + time_precision='g' + ) + + @raises(Exception) + def test_write_points_with_precision_fails(self): + with _mocked_session('post', 500): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.write_points_with_precision([]) + + def test_delete_points(self): + with _mocked_session('delete', 204) as mocked: + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.delete_points("foo") is True + + assert len(mocked.call_args_list) == 1 + args, kwds = mocked.call_args_list[0] + + assert kwds['params'] == {'u': 'username', 'p': 'password'} + assert kwds['url'] == 'http://host:8086/db/db/series/foo' + + @raises(Exception) + def test_delete_points_with_wrong_name(self): + with _mocked_session('delete', 400): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_points("nonexist") + + @raises(NotImplementedError) + def test_create_scheduled_delete(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.create_scheduled_delete([]) + + @raises(NotImplementedError) + def test_get_list_scheduled_delete(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.get_list_scheduled_delete() + + @raises(NotImplementedError) + def test_remove_scheduled_delete(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.remove_scheduled_delete(1) + + def test_query(self): + data = [ + { + "name": "foo", + "columns": ["time", "sequence_number", "column_one"], + "points": [ + [1383876043, 16, "2"], [1383876043, 15, "1"], + [1383876035, 14, "2"], [1383876035, 13, "1"] + ] + } + ] + with _mocked_session('get', 200, data): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + result = cli.query('select column_one from foo;') + assert len(result[0]['points']) == 4 + + def test_query_chunked(self): + cli = InfluxDBClient(database='db') + example_object = { + 'points': [ + [1415206250119, 40001, 667], + [1415206244555, 30001, 7], + [1415206228241, 20001, 788], + [1415206212980, 10001, 555], + [1415197271586, 10001, 23] + ], + 'name': 'foo', + 'columns': [ + 'time', + 'sequence_number', + 'val' + ] + } + example_response = \ + json.dumps(example_object) + json.dumps(example_object) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.query('select * from foo', chunked=True), + [example_object, example_object] + ) + + @raises(Exception) + def test_query_fail(self): + with _mocked_session('get', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.query('select column_one from foo;') + + def test_query_bad_precision(self): + cli = InfluxDBClient() + with self.assertRaisesRegexp( + Exception, + "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" + ): + cli.query('select column_one from foo', time_precision='g') + + def test_create_database(self): + with _mocked_session('post', 201, {"name": "new_db"}): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.create_database('new_db') is True + + @raises(Exception) + def test_create_database_fails(self): + with _mocked_session('post', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.create_database('new_db') + + def test_delete_database(self): + with _mocked_session('delete', 204): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + assert cli.delete_database('old_db') is True + + @raises(Exception) + def test_delete_database_fails(self): + with _mocked_session('delete', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_database('old_db') + + def test_get_list_database(self): + data = [ + {"name": "a_db"} + ] + with _mocked_session('get', 200, data): + cli = InfluxDBClient('host', 8086, 'username', 'password') + assert len(cli.get_list_database()) == 1 + assert cli.get_list_database()[0]['name'] == 'a_db' + + @raises(Exception) + def test_get_list_database_fails(self): + with _mocked_session('get', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password') + cli.get_list_database() + + @raises(FutureWarning) + def test_get_database_list_deprecated(self): + data = [ + {"name": "a_db"} + ] + with _mocked_session('get', 200, data): + cli = InfluxDBClient('host', 8086, 'username', 'password') + assert len(cli.get_database_list()) == 1 + assert cli.get_database_list()[0]['name'] == 'a_db' + + def test_delete_series(self): + with _mocked_session('delete', 204): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_series('old_series') + + @raises(Exception) + def test_delete_series_fails(self): + with _mocked_session('delete', 401): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_series('old_series') + + def test_get_series_list(self): + cli = InfluxDBClient(database='db') + + with requests_mock.Mocker() as m: + example_response = \ + '[{"name":"list_series_result","columns":' \ + '["time","name"],"points":[[0,"foo"],[0,"bar"]]}]' + + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.get_list_series(), + ['foo', 'bar'] + ) + + def test_get_continuous_queries(self): + cli = InfluxDBClient(database='db') + + with requests_mock.Mocker() as m: + + # Tip: put this in a json linter! + example_response = '[ { "name": "continuous queries", "columns"' \ + ': [ "time", "id", "query" ], "points": [ [ ' \ + '0, 1, "select foo(bar,95) from \\"foo_bar' \ + 's\\" group by time(5m) into response_times.' \ + 'percentiles.5m.95" ], [ 0, 2, "select perce' \ + 'ntile(value,95) from \\"response_times\\" g' \ + 'roup by time(5m) into response_times.percen' \ + 'tiles.5m.95" ] ] } ]' + + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.get_list_continuous_queries(), + [ + 'select foo(bar,95) from "foo_bars" group ' + 'by time(5m) into response_times.percentiles.5m.95', + + 'select percentile(value,95) from "response_times" group ' + 'by time(5m) into response_times.percentiles.5m.95' + ] + ) + + def test_get_list_cluster_admins(self): + pass + + def test_add_cluster_admin(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/cluster_admins" + ) + + cli = InfluxDBClient(database='db') + cli.add_cluster_admin( + new_username='paul', + new_password='laup' + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'name': 'paul', + 'password': 'laup' + } + ) + + def test_update_cluster_admin_password(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/cluster_admins/paul" + ) + + cli = InfluxDBClient(database='db') + cli.update_cluster_admin_password( + username='paul', + new_password='laup' + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + {'password': 'laup'} + ) + + def test_delete_cluster_admin(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.DELETE, + "http://localhost:8086/cluster_admins/paul", + status_code=200, + ) + + cli = InfluxDBClient(database='db') + cli.delete_cluster_admin(username='paul') + + self.assertIsNone(m.last_request.body) + + def test_set_database_admin(self): + pass + + def test_unset_database_admin(self): + pass + + def test_alter_database_admin(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.alter_database_admin( + username='paul', + is_admin=False + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'admin': False + } + ) + + @raises(NotImplementedError) + def test_get_list_database_admins(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.get_list_database_admins() + + @raises(NotImplementedError) + def test_add_database_admin(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.add_database_admin('admin', 'admin_secret_password') + + @raises(NotImplementedError) + def test_update_database_admin_password(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.update_database_admin_password('admin', 'admin_secret_password') + + @raises(NotImplementedError) + def test_delete_database_admin(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.delete_database_admin('admin') + + def test_get_database_users(self): + cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') + + example_response = \ + '[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\ + '{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/users", + text=example_response + ) + users = cli.get_database_users() + + self.assertEqual(json.loads(example_response), users) + + def test_add_database_user(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users" + ) + cli = InfluxDBClient(database='db') + cli.add_database_user( + new_username='paul', + new_password='laup', + permissions=('.*', '.*') + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'writeTo': '.*', + 'password': 'laup', + 'readFrom': '.*', + 'name': 'paul' + } + ) + + def test_add_database_user_bad_permissions(self): + cli = InfluxDBClient() + + with self.assertRaisesRegexp( + Exception, + "'permissions' must be \(readFrom, writeTo\) tuple" + ): + cli.add_database_user( + new_password='paul', + new_username='paul', + permissions=('hello', 'hello', 'hello') + ) + + def test_update_database_user_password(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.update_database_user_password( + username='paul', + new_password='laup' + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + {'password': 'laup'} + ) + + def test_update_database_user_password_current_user(self): + cli = InfluxDBClient( + username='root', + password='hello', + database='database' + ) + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/database/users/root" + ) + + cli.update_database_user_password( + username='root', + new_password='bye' + ) + + self.assertEqual(cli._password, 'bye') + + def test_delete_database_user(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.DELETE, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.delete_database_user(username='paul') + + self.assertIsNone(m.last_request.body) + + @raises(NotImplementedError) + def test_update_permission(self): + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli.update_permission('admin', []) + + @mock.patch('requests.Session.request') + def test_request_retry(self, mock_request): + """Tests that two connection errors will be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 3: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points + ) + + @mock.patch('requests.Session.request') + def test_request_retry_raises(self, mock_request): + """Tests that three connection errors will not be handled""" + + class CustomMock(object): + i = 0 + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < 4: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + mock_request.side_effect = CustomMock().connection_error + + cli = InfluxDBClient(database='db') + + with self.assertRaises(requests.exceptions.ConnectionError): + cli.write_points(self.dummy_points) diff --git a/tests/influxdb/influxdb08/dataframe_client_test.py b/tests/influxdb/influxdb08/dataframe_client_test.py new file mode 100644 index 00000000..9fc54b9e --- /dev/null +++ b/tests/influxdb/influxdb08/dataframe_client_test.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +""" +unit tests for misc module +""" +from .client_test import _mocked_session + +import unittest +import json +import requests_mock +from nose.tools import raises +from datetime import timedelta +from tests import skipIfPYpy, using_pypy +import copy +import warnings + +if not using_pypy: + import pandas as pd + from pandas.util.testing import assert_frame_equal + from influxdb.influxdb08 import DataFrameClient + + +@skipIfPYpy +class TestDataFrameClient(unittest.TestCase): + + def setUp(self): + # By default, raise exceptions on warnings + warnings.simplefilter('error', FutureWarning) + + def test_write_points_from_dataframe(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 3600] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + + def test_write_points_from_dataframe_in_batches(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + assert cli.write_points({"foo": dataframe}, + batch_size=1) is True + + def test_write_points_from_dataframe_with_numeric_column_names(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + # df with numeric column names + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 3600] + ], + "name": "foo", + "columns": ['0', '1', '2', "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + + def test_write_points_from_dataframe_with_period_index(self): + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[pd.Period('1970-01-01'), + pd.Period('1970-01-02')], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 86400] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + + def test_write_points_from_dataframe_with_time_precision(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + ["1", 1, 1.0, 0], + ["2", 2, 2.0, 3600] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + points_ms = copy.deepcopy(points) + points_ms[0]["points"][1][-1] = 3600 * 1000 + + points_us = copy.deepcopy(points) + points_us[0]["points"][1][-1] = 3600 * 1000000 + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + + cli.write_points({"foo": dataframe}, time_precision='s') + self.assertListEqual(json.loads(m.last_request.body), points) + + cli.write_points({"foo": dataframe}, time_precision='m') + self.assertListEqual(json.loads(m.last_request.body), points_ms) + + cli.write_points({"foo": dataframe}, time_precision='u') + self.assertListEqual(json.loads(m.last_request.body), points_us) + + @raises(TypeError) + def test_write_points_from_dataframe_fails_without_time_index(self): + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + columns=["column_one", "column_two", + "column_three"]) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + @raises(TypeError) + def test_write_points_from_dataframe_fails_with_series(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.Series(data=[1.0, 2.0], + index=[now, now + timedelta(hours=1)]) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + def test_query_into_dataframe(self): + data = [ + { + "name": "foo", + "columns": ["time", "sequence_number", "column_one"], + "points": [ + [3600, 16, 2], [3600, 15, 1], + [0, 14, 2], [0, 13, 1] + ] + } + ] + # dataframe sorted ascending by time first, then sequence_number + dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]], + index=pd.to_datetime([0, 0, + 3600, 3600], + unit='s', utc=True), + columns=['sequence_number', 'column_one']) + with _mocked_session('get', 200, data): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query('select column_one from foo;') + assert_frame_equal(dataframe, result) + + def test_query_multiple_time_series(self): + data = [ + { + "name": "series1", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, 323048, 323048, 323048, 0]] + }, + { + "name": "series2", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]] + }, + { + "name": "series3", + "columns": ["time", "mean", "min", "max", "stddev"], + "points": [[0, -0.01220, -0.01220, -0.01220, 0]] + } + ] + dataframes = { + 'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']), + 'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']), + 'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]], + index=pd.to_datetime([0], unit='s', + utc=True), + columns=['mean', 'min', 'max', 'stddev']) + } + with _mocked_session('get', 200, data): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query("""select mean(value), min(value), max(value), + stddev(value) from series1, series2, series3""") + assert dataframes.keys() == result.keys() + for key in dataframes.keys(): + assert_frame_equal(dataframes[key], result[key]) + + def test_query_with_empty_result(self): + with _mocked_session('get', 200, []): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + result = cli.query('select column_one from foo;') + assert result == [] + + def test_list_series(self): + response = [ + { + 'columns': ['time', 'name'], + 'name': 'list_series_result', + 'points': [[0, 'seriesA'], [0, 'seriesB']] + } + ] + with _mocked_session('get', 200, response): + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + series_list = cli.get_list_series() + assert series_list == ['seriesA', 'seriesB'] + + def test_datetime_to_epoch(self): + timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + + self.assertEqual( + cli._datetime_to_epoch(timestamp), + 1356998400.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='s'), + 1356998400.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='m'), + 1356998400000.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='ms'), + 1356998400000.0 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='u'), + 1356998400000000.0 + ) diff --git a/tests/influxdb/influxdb08/helper_test.py b/tests/influxdb/influxdb08/helper_test.py new file mode 100644 index 00000000..3f546a99 --- /dev/null +++ b/tests/influxdb/influxdb08/helper_test.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- + +import unittest +import warnings + +import mock +from influxdb.influxdb08 import SeriesHelper, InfluxDBClient +from requests.exceptions import ConnectionError + + +class TestSeriesHelper(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super(TestSeriesHelper, cls).setUpClass() + + TestSeriesHelper.client = InfluxDBClient( + 'host', + 8086, + 'username', + 'password', + 'database' + ) + + class MySeriesHelper(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 5 + autocommit = True + + TestSeriesHelper.MySeriesHelper = MySeriesHelper + + def test_auto_commit(self): + """ + Tests that write_points is called after the right number of events + """ + class AutoCommitTest(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 5 + client = InfluxDBClient() + autocommit = True + + fake_write_points = mock.MagicMock() + AutoCommitTest(server_name='us.east-1', time=159) + AutoCommitTest._client.write_points = fake_write_points + AutoCommitTest(server_name='us.east-1', time=158) + AutoCommitTest(server_name='us.east-1', time=157) + AutoCommitTest(server_name='us.east-1', time=156) + self.assertFalse(fake_write_points.called) + AutoCommitTest(server_name='us.east-1', time=3443) + self.assertTrue(fake_write_points.called) + + def testSingleSeriesName(self): + """ + Tests JSON conversion when there is only one series name. + """ + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158) + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157) + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156) + expectation = [{'points': [[159, 'us.east-1'], + [158, 'us.east-1'], + [157, 'us.east-1'], + [156, 'us.east-1']], + 'name': 'events.stats.us.east-1', + 'columns': ['time', 'server_name']}] + + rcvd = TestSeriesHelper.MySeriesHelper._json_body_() + self.assertTrue(all([el in expectation for el in rcvd]) and + all([el in rcvd for el in expectation]), + 'Invalid JSON body of time series returned from ' + '_json_body_ for one series name: {}.'.format(rcvd)) + TestSeriesHelper.MySeriesHelper._reset_() + self.assertEqual( + TestSeriesHelper.MySeriesHelper._json_body_(), + [], + 'Resetting helper did not empty datapoints.') + + def testSeveralSeriesNames(self): + ''' + Tests JSON conversion when there is only one series name. + ''' + TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) + TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158) + TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157) + TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156) + expectation = [{'points': [[157, 'lu.lux']], + 'name': 'events.stats.lu.lux', + 'columns': ['time', 'server_name']}, + {'points': [[156, 'uk.london']], + 'name': 'events.stats.uk.london', + 'columns': ['time', 'server_name']}, + {'points': [[158, 'fr.paris-10']], + 'name': 'events.stats.fr.paris-10', + 'columns': ['time', 'server_name']}, + {'points': [[159, 'us.east-1']], + 'name': 'events.stats.us.east-1', + 'columns': ['time', 'server_name']}] + + rcvd = TestSeriesHelper.MySeriesHelper._json_body_() + self.assertTrue(all([el in expectation for el in rcvd]) and + all([el in rcvd for el in expectation]), + 'Invalid JSON body of time series returned from ' + '_json_body_ for several series names: {}.' + .format(rcvd)) + TestSeriesHelper.MySeriesHelper._reset_() + self.assertEqual( + TestSeriesHelper.MySeriesHelper._json_body_(), + [], + 'Resetting helper did not empty datapoints.') + + def testInvalidHelpers(self): + ''' + Tests errors in invalid helpers. + ''' + class MissingMeta(SeriesHelper): + pass + + class MissingClient(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + autocommit = True + + class MissingSeriesName(SeriesHelper): + + class Meta: + fields = ['time', 'server_name'] + + class MissingFields(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + + for cls in [MissingMeta, MissingClient, MissingFields, + MissingSeriesName]: + self.assertRaises( + AttributeError, cls, **{'time': 159, + 'server_name': 'us.east-1'}) + + def testWarnBulkSizeZero(self): + """ + Tests warning for an invalid bulk size. + """ + class WarnBulkSizeZero(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 0 + autocommit = True + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + try: + WarnBulkSizeZero(time=159, server_name='us.east-1') + except ConnectionError: + # Server defined in the client is invalid, we're testing + # the warning only. + pass + self.assertEqual(len(w), 1, + '{} call should have generated one warning.' + .format(WarnBulkSizeZero)) + self.assertIn('forced to 1', str(w[-1].message), + 'Warning message did not contain "forced to 1".') + + def testWarnBulkSizeNoEffect(self): + """ + Tests warning for a set bulk size but autocommit False. + """ + class WarnBulkSizeNoEffect(SeriesHelper): + + class Meta: + series_name = 'events.stats.{server_name}' + fields = ['time', 'server_name'] + bulk_size = 5 + autocommit = False + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + WarnBulkSizeNoEffect(time=159, server_name='us.east-1') + self.assertEqual(len(w), 1, + '{} call should have generated one warning.' + .format(WarnBulkSizeNoEffect)) + self.assertIn('has no affect', str(w[-1].message), + 'Warning message did not contain "has not affect".') From 08f1c3d3f69537dd0a755675f1dc140385abb2de Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 19 Mar 2015 12:21:14 -0400 Subject: [PATCH 037/536] client_test.py: chmod 644 --- tests/influxdb/client_test.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 tests/influxdb/client_test.py diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py old mode 100755 new mode 100644 From f95bd8212ae4221162cc5452704e620cf57e45d5 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 19 Mar 2015 16:33:27 -0400 Subject: [PATCH 038/536] Run UDP tests on random ports --- tests/influxdb/client_test.py | 6 ++++-- tests/influxdb/influxdb08/client_test.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 7b73e63f..69858c42 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -8,6 +8,7 @@ import socket import unittest import requests_mock +import random from nose.tools import raises from mock import patch import warnings @@ -160,11 +161,12 @@ def test_write_points_batch(self): def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.bind(('0.0.0.0', 4444)) + port = random.randint(4000, 8000) + s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', - 'test', use_udp=True, udp_port=4444 + 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index f5631388..ca83004a 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -8,6 +8,7 @@ import socket import unittest import requests_mock +import random from nose.tools import raises from mock import patch import warnings @@ -176,11 +177,12 @@ def test_write_points_batch(self): def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.bind(('0.0.0.0', 4444)) + port = random.randint(4000, 8000) + s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', - 'test', use_udp=True, udp_port=4444 + 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) From 17602b256758e4130daf7fd56c0cca85f8487d35 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 20 Mar 2015 13:39:06 -0400 Subject: [PATCH 039/536] Fixed bad string format syntax --- influxdb/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 1997d2be..67d063ca 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -325,7 +325,7 @@ def create_retention_policy( query_string = \ "CREATE RETENTION POLICY %s ON %s " \ "DURATION %s REPLICATION %s" % \ - (name, database or self._database, duration, replication) + (name, (database or self._database), duration, replication) if default is True: query_string += " DEFAULT" @@ -337,14 +337,14 @@ def get_list_retention_policies(self, database=None): Get the list of retention policies """ return self.query( - "SHOW RETENTION POLICIES %s" % database or self._database + "SHOW RETENTION POLICIES %s" % (database or self._database) ) def get_list_series(self, database=None): """ Get the list of series """ - return self.query("SHOW SERIES", database=database or self._database) + return self.query("SHOW SERIES", database=(database or self._database)) def get_list_users(self): """ From c957248cf1e998080a97cc5a66d488dd89debfaf Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Fri, 20 Mar 2015 16:37:56 -0400 Subject: [PATCH 040/536] README: Added InfluxDB 0.9 support notice --- README.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.rst b/README.rst index 2d24281e..247e5019 100644 --- a/README.rst +++ b/README.rst @@ -36,6 +36,12 @@ InfluxDB is an open-source distributed time series database, find more about Inf .. _installation: +InfluxDB > v0.9 support +======================= + +The 1.0.0 version of this library now supports InfluxDB 0.9. Please note that InfluxDB 0.9 is still pre-release software. For stability, you should use the ``influxdb.influxdb08`` module in conjunction with InfluxDB 0.8. + + InfluxDB v0.8.X users ===================== From f44665fa4821a9b421daca99ce2c859bf333c138 Mon Sep 17 00:00:00 2001 From: Tim Martin Date: Tue, 24 Mar 2015 13:40:30 -0400 Subject: [PATCH 041/536] Updated README.rst Fixed link to readthedocs.org. Now correctly links to the documentation instead of the project home. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 247e5019..57240512 100644 --- a/README.rst +++ b/README.rst @@ -6,7 +6,7 @@ InfluxDB-Python is a client for interacting with InfluxDB_. :target: https://travis-ci.org/influxdb/influxdb-python .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style - :target: https://readthedocs.org/projects/influxdb-python/?badge=latest + :target: http://influxdb-python.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg From 3693aa9ade195771d03db6d820961a0f4467d147 Mon Sep 17 00:00:00 2001 From: Tim Martin Date: Tue, 24 Mar 2015 13:43:28 -0400 Subject: [PATCH 042/536] Targeted english branch inappropriately. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 57240512..85d496ac 100644 --- a/README.rst +++ b/README.rst @@ -6,7 +6,7 @@ InfluxDB-Python is a client for interacting with InfluxDB_. :target: https://travis-ci.org/influxdb/influxdb-python .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style - :target: http://influxdb-python.readthedocs.org/en/latest/ + :target: http://influxdb-python.readthedocs.org/ :alt: Documentation Status .. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg From e45f18ceeede9fe865bacf601a650af3f3655c56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Wed, 18 Mar 2015 17:19:54 -0400 Subject: [PATCH 043/536] slightly better to raise a ValueError here. --- influxdb/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 67d063ca..be6f44b6 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -261,12 +261,12 @@ def _write_points(self, database, retention_policy): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: - raise Exception( + raise ValueError( "Invalid time precision is given. " "(use 'n', 'u', 'ms', 's', 'm' or 'h')") if self.use_udp and time_precision and time_precision != 's': - raise Exception( + raise ValueError( "InfluxDB only supports seconds precision for udp writes" ) From ad7159aab0f9e3da7af31e1470a43aac15034a8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Wed, 18 Mar 2015 17:24:37 -0400 Subject: [PATCH 044/536] slightly better to use super(..) here. --- influxdb/dataframe_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 8191bcba..c5fb0c49 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -86,7 +86,7 @@ def query(self, query, time_precision='s', chunked=False): retrieved in chunks, False otherwise. """ - result = InfluxDBClient.query(self, + result = super(DataFrameClient, self).query( query=query, time_precision=time_precision, chunked=chunked) From 60b58d67f076d9554ab4821101b1031da461b054 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 19 Mar 2015 09:15:50 -0400 Subject: [PATCH 045/536] Use enumerate. --- influxdb/client.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index be6f44b6..e0d7adba 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -113,10 +113,9 @@ def format_query_response(response): if 'columns' in row.keys() and 'values' in row.keys(): for value in row['values']: item = {} - current_col = 0 - for field in value: - item[row['columns'][current_col]] = field - current_col += 1 + for cur_col, field in enumerate(value): + item[row['columns'][cur_col]] = field + cur_col += 1 items.append(item) return series From fb273cd5aa63158c0e00734d64bfbde573f2f7dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 19 Mar 2015 11:33:46 -0400 Subject: [PATCH 046/536] Rename dataframe_client to _dataframe_client. --- influxdb/{dataframe_client.py => _dataframe_client.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename influxdb/{dataframe_client.py => _dataframe_client.py} (100%) diff --git a/influxdb/dataframe_client.py b/influxdb/_dataframe_client.py similarity index 100% rename from influxdb/dataframe_client.py rename to influxdb/_dataframe_client.py From 1c5fed3568aec629ff1b482ecd3a8c2d7f91b15b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 23 Mar 2015 09:28:42 -0400 Subject: [PATCH 047/536] Slightly better dataframe_client. --- influxdb/_dataframe_client.py | 31 ++++++++----------------- influxdb/dataframe_client.py | 19 +++++++++++++++ tests/influxdb/dataframe_client_test.py | 25 ++++++++++++++------ 3 files changed, 47 insertions(+), 28 deletions(-) create mode 100644 influxdb/dataframe_client.py diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index c5fb0c49..7e834464 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -7,10 +7,7 @@ from .client import InfluxDBClient -try: - import pandas as pd -except ImportError: - pd = None +import pandas as pd class DataFrameClient(InfluxDBClient): @@ -20,14 +17,7 @@ class DataFrameClient(InfluxDBClient): The client reads and writes from pandas DataFrames. """ - def __init__(self, *args, **kwargs): - super(DataFrameClient, self).__init__(*args, **kwargs) - if not pd: - raise ImportError( - 'DataFrameClient requires Pandas' - ) - - self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') + EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') def write_points(self, data, *args, **kwargs): """ @@ -56,13 +46,15 @@ def write_points(self, data, *args, **kwargs): name=key, dataframe=data_frame.ix[start_index:end_index].copy(), time_precision=time_precision)] - InfluxDBClient.write_points(self, data, *args, **kwargs) + super(DataFrameClient, self).write_points(data, + *args, **kwargs) return True else: data = [self._convert_dataframe_to_json( name=key, dataframe=dataframe, time_precision=time_precision) for key, dataframe in data.items()] - return InfluxDBClient.write_points(self, data, *args, **kwargs) + return super(DataFrameClient, self).write_points(data, + *args, **kwargs) def write_points_with_precision(self, data, time_precision='s'): """ @@ -86,14 +78,11 @@ def query(self, query, time_precision='s', chunked=False): retrieved in chunks, False otherwise. """ - result = super(DataFrameClient, self).query( - query=query, - time_precision=time_precision, - chunked=chunked) - if len(result['results'][0]) > 0: - return self._to_dataframe(result['results'][0], time_precision) + results = super(DataFrameClient, self).query(query, database=database) + if len(results) > 0: + return self._to_dataframe(results, time_precision) else: - return result + return results def _to_dataframe(self, json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py new file mode 100644 index 00000000..41a6796c --- /dev/null +++ b/influxdb/dataframe_client.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +""" +DataFrame client for InfluxDB +""" + +__all__ = ['DataFrameClient'] + +try: + import pandas + del pandas +except ImportError as err: + from .client import InfluxDBClient + + class DataFrameClient(InfluxDBClient): + def __init__(self, *a, **kw): + raise ImportError("DataFrameClient requires Pandas " + "which couldn't be imported: %s" % err) +else: + from ._dataframe_client import DataFrameClient diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 135e718b..fd4af039 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -211,13 +211,24 @@ def test_query_with_empty_result(self): assert result == [] def test_list_series(self): - response = [ - { - 'columns': ['time', 'name'], - 'name': 'list_series_result', - 'points': [[0, 'seriesA'], [0, 'seriesB']] - } - ] + response = { + 'results': [ + { + 'series': [{ + 'columns': ['id'], + 'name': 'seriesA', + 'values': [[0]], + }] + }, + { + 'series': [{ + 'columns': ['id'], + 'name': 'seriesB', + 'values': [[1]], + }] + }, + ] + } with _mocked_session('get', 200, response): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') series_list = cli.get_list_series() From 9ae3e1759496dafe7032c732b7cdbd3247884c81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 20 Mar 2015 15:56:27 -0400 Subject: [PATCH 048/536] DRY: "database or self._database" is already handled at self.query() level. --- influxdb/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index e0d7adba..e582491f 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -324,7 +324,7 @@ def create_retention_policy( query_string = \ "CREATE RETENTION POLICY %s ON %s " \ "DURATION %s REPLICATION %s" % \ - (name, (database or self._database), duration, replication) + (name, database or self._database, duration, replication) if default is True: query_string += " DEFAULT" @@ -343,7 +343,7 @@ def get_list_series(self, database=None): """ Get the list of series """ - return self.query("SHOW SERIES", database=(database or self._database)) + return self.query("SHOW SERIES", database=database) def get_list_users(self): """ From beda89daf46c22455981d72305bf024e3550605e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 20 Mar 2015 16:48:29 -0400 Subject: [PATCH 049/536] Better to not accept any *args+**kwargs that we simply don't use. Caller has not to give us that. --- influxdb/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index e582491f..e1c75d13 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -236,8 +236,7 @@ def write_points(self, time_precision=None, database=None, retention_policy=None, - *args, - **kwargs): + ): """ Write to multiple time series names. From 421fe5474c66da11981947d0cd0fd6c291089e9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Wed, 25 Mar 2015 10:52:21 -0400 Subject: [PATCH 050/536] Useless when using enumerate() --- influxdb/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index e1c75d13..6170e397 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -115,7 +115,6 @@ def format_query_response(response): item = {} for cur_col, field in enumerate(value): item[row['columns'][cur_col]] = field - cur_col += 1 items.append(item) return series From 48e08a169eb1aa5166ba86cf1357eb5d751d1808 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 20 Mar 2015 15:55:43 -0400 Subject: [PATCH 051/536] Add: implemented a "real" client test module. By *real* we mean : one which really connect to a server instance. --- tests/influxdb/client_test.py | 14 +- tests/influxdb/client_test_with_server.py | 513 ++++++++++++++++++++++ tests/influxdb/influxdb.conf.template | 99 +++++ tests/influxdb/influxdb.udp_conf.template | 99 +++++ tests/influxdb/misc.py | 24 + 5 files changed, 748 insertions(+), 1 deletion(-) create mode 100644 tests/influxdb/client_test_with_server.py create mode 100644 tests/influxdb/influxdb.conf.template create mode 100644 tests/influxdb/influxdb.udp_conf.template create mode 100644 tests/influxdb/misc.py diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 69858c42..3f0a08fb 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -1,6 +1,18 @@ # -*- coding: utf-8 -*- """ -unit tests +unit tests for the InfluxDBClient. + +NB/WARNING : +This module implements tests for the InfluxDBClient class +but does so + + without any server instance running + + by mocking all the expected responses. + +So any change of (response format from) the server will **NOT** be +detected by this module. + +See client_test_with_server.py for tests against a running server instance. + """ import json import requests diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py new file mode 100644 index 00000000..af7c7aae --- /dev/null +++ b/tests/influxdb/client_test_with_server.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +""" +unit tests for checking the good/expected interaction between : + ++ the python client.. (obviously) ++ and a *_real_* server instance running. + +This basically duplicates what's in client_test.py + but without mocking around every call. + +""" + +from __future__ import print_function + +import datetime +import distutils.spawn +import os +import re +import shutil +import subprocess +import sys +import tempfile +import time +import unittest + +import warnings + +from influxdb import InfluxDBClient +from influxdb.client import InfluxDBClientError + +from .misc import get_free_port, is_port_open + + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# try to find where is located the 'influxd' binary: +# You can define 'InfluxDbPythonClientTest_SERVER_BIN_PATH' +# env var to force it : +influxdb_bin_path = influxdb_forced_bin = os.environ.get( + 'InfluxDbPythonClientTest_SERVER_BIN_PATH', '') +if not influxdb_bin_path: + try: + influxdb_bin_path = distutils.spawn.find_executable('influxd') + if not influxdb_bin_path: + raise Exception('not found via distutils') + except Exception as err: + try: + influxdb_bin_path = subprocess.check_output( + ['which', 'influxdb']).strip() + except subprocess.CalledProcessError as err: + # fallback on : + influxdb_bin_path = '/opt/influxdb/influxd' + +is_influxdb_bin_ok = ( + # if the env var is set then consider the influxdb_bin as OK.. + influxdb_forced_bin + or (os.path.isfile(influxdb_bin_path) + and os.access(influxdb_bin_path, os.X_OK)) +) + +if is_influxdb_bin_ok: + # read version : + version = subprocess.check_output([influxdb_bin_path, 'version']) + print(version, file=sys.stderr) + + +dummy_point = [ # some dummy points .. :o + { + "name": "cpu_load_short", + "tags": { + "host": "server01", + "region": "us-west" + }, + "timestamp": "2009-11-10T23:00:00Z", + "fields": { + "value": 0.64 + } + } +] + +dummy_points = [ # some dummy points .. :o + dummy_point[0], + { + "name": "memory", + "tags": { + "host": "server01", + "region": "us-west" + }, + "timestamp": "2009-11-10T23:01:35Z", + "fields": { + "value": 33 + } + } +] + +dummy_point_without_timestamp = [ + { + "name": "cpu_load_short", + "tags": { + "host": "server02", + "region": "us-west" + }, + "fields": { + "value": 0.64 + } + } +] + + +class InfluxDbInstance(object): + + def __init__(self, conf_template): + # create a fresh temporary place for storing all needed files + # for the influxdb server instance : + self.temp_dir_base = tempfile.mkdtemp() + # "temp_dir_base" will be used for conf file and logs, + # while "temp_dir_influxdb" is for the databases files/dirs : + self.temp_dir_influxdb = tempfile.mkdtemp( + dir=self.temp_dir_base) + # we need some "free" ports : + self.broker_port = get_free_port() + self.admin_port = get_free_port() + # as it's UDP we can reuse the same port than the broker: + self.udp_port = get_free_port() + + self.logs_file = os.path.join( + self.temp_dir_base, 'logs.txt') + + with open(conf_template) as fh: + conf = fh.read().format( + broker_port=self.broker_port, + admin_port=self.admin_port, + udp_port=self.udp_port, + broker_raft_dir=os.path.join( + self.temp_dir_influxdb, 'raft'), + broker_node_dir=os.path.join( + self.temp_dir_influxdb, 'db'), + influxdb_cluster_dir=os.path.join( + self.temp_dir_influxdb, 'state'), + influxdb_logfile=self.logs_file + ) + + conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') + with open(conf_file, "w") as fh: + fh.write(conf) + + # now start the server instance: + proc = self.proc = subprocess.Popen( + [influxdb_bin_path, '-config', conf_file], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + print("%s > Started influxdb bin in %r with ports %s and %s.." % ( + datetime.datetime.now(), + self.temp_dir_base, + self.admin_port, self.broker_port)) + + # wait for it to listen on the broker and admin ports: + # usually a fresh instance is ready in less than 1 sec .. + timeout = time.time() + 10 # so 10 secs should be rather enough, + # otherwise either your system load is rather high, + # or you run a 286 @ 1Mhz ? + try: + while time.time() < timeout: + if (is_port_open(self.broker_port) + and is_port_open(self.admin_port)): + break + time.sleep(0.5) + if proc.poll() is not None: + raise RuntimeError('influxdb prematurely exited') + else: + proc.terminate() + proc.wait() + raise RuntimeError('Timeout waiting for influxdb to listen' + ' on its broker port') + except RuntimeError as err: + data = self.get_logs_and_output() + data['reason'] = str(err) + data['now'] = datetime.datetime.now() + raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" + "stdout=%(out)r\nstderr=%(err)r\nlogs=%(logs)r" + % data) + + def get_logs_and_output(self): + proc = self.proc + with open(self.logs_file) as fh: + return { + 'rc': proc.returncode, + 'out': proc.stdout.read(), + 'err': proc.stderr.read(), + 'logs': fh.read() + } + + def close(self, remove_tree=True): + self.proc.terminate() + self.proc.wait() + if remove_tree: + shutil.rmtree(self.temp_dir_base) + + +class InfluxDbClientTestWithServerInstanceMixin(object): + ''' A mixin for unittest.TestCase to start an influxdb server instance + in a fresh temporary place. + ''' + + # 'influxdb_template_conf' attribute must be set on the class or instance + + def setUp(self): + # By default, raise exceptions on warnings + warnings.simplefilter('error', FutureWarning) + + self.influxd_inst = InfluxDbInstance(self.influxdb_template_conf) + self.cli = InfluxDBClient('localhost', + self.influxd_inst.broker_port, + 'root', '', database='db') + + def tearDown(self): + remove_tree = sys.exc_info() == (None, None, None) + self.influxd_inst.close(remove_tree=remove_tree) + + +@unittest.skipIf(not is_influxdb_bin_ok, "not found any influxd binary") +class TestInfluxDBClient(InfluxDbClientTestWithServerInstanceMixin, + unittest.TestCase): + + influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') + + def test_create_database(self): + self.assertIsNone(self.cli.create_database('new_db_1')) + self.assertIsNone(self.cli.create_database('new_db_2')) + self.assertEqual( + self.cli.get_list_database(), + ['new_db_1', 'new_db_2'] + ) + + def test_create_database_fails(self): + self.assertIsNone(self.cli.create_database('new_db')) + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.create_database('new_db') + self.assertEqual(500, ctx.exception.code) + self.assertEqual('{"results":[{"error":"database exists"}]}', + ctx.exception.content) + + def test_drop_database(self): + self.test_create_database() + self.assertIsNone(self.cli.drop_database('new_db_1')) + self.assertEqual(['new_db_2'], self.cli.get_list_database()) + + def test_drop_database_fails(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.drop_database('db') + self.assertEqual(500, ctx.exception.code) + self.assertEqual('{"results":[{"error":"database not found"}]}', + ctx.exception.content) + + def test_write(self): + new_dummy_point = dummy_point[0].copy() + new_dummy_point['database'] = 'db' + self.cli.create_database('db') + self.assertIs(True, self.cli.write(new_dummy_point)) + + @unittest.skip("fail against real server instance, " + "don't know if it should succeed actually..") + def test_write_check_read(self): + self.test_write() + # hmmmm damn, + # after write has returned, if we directly query for the data it's not + # directly available.. (don't know if this is expected behavior ( + # but it maybe)) + # So we have to : + time.sleep(5) + # so that then the data is available through select : + rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db') + self.assertEqual( + {'cpu_load_short': [ + {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, + rsp + ) + + def test_write_points(self): + ''' same as test_write() but with write_points \o/ ''' + self.cli.create_database('db') + self.assertIs(True, self.cli.write_points(dummy_point)) + + def test_write_points_check_read(self): + ''' same as test_write_check_read() but with write_points \o/ ''' + self.test_write_points() + time.sleep(1) # same as test_write_check_read() + self.assertEqual( + {'cpu_load_short': [ + {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, + self.cli.query('SELECT * FROM cpu_load_short')) + + def test_write_multiple_points_different_series(self): + self.cli.create_database('db') + self.assertIs(True, self.cli.write_points(dummy_points)) + time.sleep(1) + self.assertEqual( + {'cpu_load_short': [ + {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, + self.cli.query('SELECT * FROM cpu_load_short')) + self.assertEqual( + {'memory': [ + {'time': '2009-11-10T23:01:35Z', 'value': 33}]}, + self.cli.query('SELECT * FROM memory')) + + @unittest.skip('Not implemented for 0.9') + def test_write_points_batch(self): + self.cli.create_database('db') + self.cli.write_points( + points=dummy_point * 3, + batch_size=2 + ) + + def test_write_points_with_precision(self): + ''' check that points written with an explicit precision have + actually that precision used. + ''' + # for that we'll check that - for each precision - the actual 'time' + # value returned by a select has the correct regex format.. + # n : u'2015-03-20T15:23:36.615654966Z' + # u : u'2015-03-20T15:24:10.542554Z' + # ms : u'2015-03-20T15:24:50.878Z' + # s : u'2015-03-20T15:20:24Z' + # m : u'2015-03-20T15:25:00Z' + # h : u'2015-03-20T15:00:00Z' + base_regex = '\d{4}-\d{2}-\d{2}T\d{2}:' # YYYY-MM-DD 'T' hh: + base_s_regex = base_regex + '\d{2}:\d{2}' # base_regex + mm:ss + + # As far as we can see the values aren't directly available depending + # on the precision used. + # The less the precision, the more to wait for the value to be + # actually written/available. + for idx, (precision, expected_regex, sleep_time) in enumerate(( + ('n', base_s_regex + '\.\d{1,9}Z', 1), + ('u', base_s_regex + '\.\d{1,6}Z', 1), + ('ms', base_s_regex + '\.\d{1,3}Z', 1), + ('s', base_s_regex + 'Z', 1), + ('m', base_regex + '\d{2}:00Z', 60), + + # ('h', base_regex + '00:00Z', ), + # that would require a sleep of possibly up to 3600 secs (/ 2 ?).. + )): + db = 'db' + self.cli.create_database(db) + before = datetime.datetime.now() + self.assertIs( + True, + self.cli.write_points( + dummy_point_without_timestamp, + time_precision=precision, + database=db)) + + # sys.stderr.write('checking presision with %r : + # before=%s\n' % (precision, before)) + after = datetime.datetime.now() + + if sleep_time > 1: + sleep_time -= (after if before.min != after.min + else before).second + + start = time.time() + timeout = start + sleep_time + # sys.stderr.write('should sleep %s ..\n' % sleep_time) + while time.time() < timeout: + rsp = self.cli.query('SELECT * FROM cpu_load_short', + database=db) + if rsp != {'cpu_load_short': []}: + # sys.stderr.write('already ? only slept %s\n' % ( + # time.time() - start)) + break + time.sleep(1) + else: + pass + # sys.stderr.write('ok !\n') + sleep_time = 0 + + if sleep_time: + # sys.stderr.write('sleeping %s..\n' % sleep_time) + time.sleep(sleep_time) + + rsp = self.cli.query('SELECT * FROM cpu_load_short', database=db) + + # sys.stderr.write('precision=%s rsp_timestamp = %r\n' % ( + # precision, rsp['cpu_load_short'][0]['time'])) + m = re.match(expected_regex, rsp['cpu_load_short'][0]['time']) + self.assertIsNotNone(m) + self.cli.drop_database(db) + + def test_query(self): + self.cli.create_database('db') + self.assertIs(True, self.cli.write_points(dummy_point)) + + @unittest.skip('Not implemented for 0.9') + def test_query_chunked(self): + cli = InfluxDBClient(database='db') + example_object = { + 'points': [ + [1415206250119, 40001, 667], + [1415206244555, 30001, 7], + [1415206228241, 20001, 788], + [1415206212980, 10001, 555], + [1415197271586, 10001, 23] + ], + 'name': 'foo', + 'columns': [ + 'time', + 'sequence_number', + 'val' + ] + } + del cli + del example_object + # TODO + + def test_query_fail(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.query('select column_one from foo') + self.assertEqual( + ('500: {"results":[{"error":"database not found: db"}]}',), + ctx.exception.args) + + def test_get_list_series_empty(self): + self.cli.create_database('mydb') + rsp = self.cli.get_list_series('mydb') + self.assertEqual({}, rsp) + + def test_get_list_series_non_empty(self): + self.cli.create_database('mydb') + self.cli.write_points(dummy_point, database='mydb') + rsp = self.cli.get_list_series('mydb') + self.assertEqual( + {'cpu_load_short': [ + {'region': 'us-west', 'host': 'server01', '_id': 1}]}, + rsp + ) + + def test_default_retention_policy(self): + self.cli.create_database('db') + rsp = self.cli.get_list_retention_policies('db') + self.assertEqual( + [ + {'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}], + rsp + ) + + def test_create_retention_policy_default(self): + self.cli.create_database('db') + rsp = self.cli.create_retention_policy( + 'somename', '1d', 4, default=True, database='db' + ) + self.assertIsNone(rsp) + rsp = self.cli.get_list_retention_policies('db') + self.assertEqual( + [ + {'duration': '0', 'default': False, + 'replicaN': 1, 'name': 'default'}, + {'duration': '24h0m0s', 'default': True, + 'replicaN': 4, 'name': 'somename'} + ], + rsp + ) + + def test_create_retention_policy(self): + self.cli.create_database('db') + self.cli.create_retention_policy( + 'somename', '1d', 4, database='db' + ) + rsp = self.cli.get_list_retention_policies('db') + self.assertEqual( + [ + {'duration': '0', 'default': True, 'replicaN': 1, + 'name': 'default'}, + {'duration': '24h0m0s', 'default': False, 'replicaN': 4, + 'name': 'somename'} + ], + rsp + ) + + +@unittest.skipIf(not is_influxdb_bin_ok, "not found any influxd binary") +class UdpTests(InfluxDbClientTestWithServerInstanceMixin, + unittest.TestCase): + + influxdb_template_conf = os.path.join(THIS_DIR, + 'influxdb.udp_conf.template') + + def test_write_points_udp(self): + + cli = InfluxDBClient( + 'localhost', self.influxd_inst.broker_port, + 'dont', 'care', + database='db', + use_udp=True, udp_port=self.influxd_inst.udp_port + ) + cli.create_database('db') + cli.write_points(dummy_point) + + # ho boy, + # once write_points finishes then the points aren't actually + # already directly available !! + # Well, it's normal because we sent by udp (no response !). + # So we have to wait some enough time, + time.sleep(1) # 1 sec seems to be a good choice. + rsp = cli.query('SELECT * FROM cpu_load_short') + + self.assertEqual( + # this is dummy_points : + {'cpu_load_short': [ + {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, + rsp + ) diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template new file mode 100644 index 00000000..4efbf70c --- /dev/null +++ b/tests/influxdb/influxdb.conf.template @@ -0,0 +1,99 @@ +# Welcome to the InfluxDB configuration file. + +# If hostname (on the OS) doesn't return a name that can be resolved by the other +# systems in the cluster, you'll have to set the hostname to an IP or something +# that can be resolved here. +# hostname = "" +bind-address = "0.0.0.0" + +# Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com +# The data includes raft id (random 8 bytes), os, arch and version +# We don't track ip addresses of servers reporting. This is only used +# to track the number of instances running and the versions, which +# is very helpful for us. +# Change this option to true to disable reporting. +reporting-disabled = false + +# Controls settings for initial start-up. Once a node a successfully started, +# these settings are ignored. +[initialization] +join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. + +# Control authentication +# If not set authetication is DISABLED. Be sure to explicitly set this flag to +# true if you want authentication. +[authentication] +enabled = false + +# Configure the admin server +[admin] +enabled = true +port = {admin_port} + +# Configure the HTTP API endpoint. All time-series data and queries uses this endpoint. +[api] +# ssl-port = 8087 # SSL support is enabled if you set a port and cert +# ssl-cert = "/path/to/cert.pem" + +# Configure the Graphite plugins. +[[graphite]] # 1 or more of these sections may be present. +enabled = false +# protocol = "" # Set to "tcp" or "udp" +# address = "0.0.0.0" # If not set, is actually set to bind-address. +# port = 2003 +# name-position = "last" +# name-separator = "-" +# database = "" # store graphite data in this database + +# Configure the collectd input. +[collectd] +enabled = false +#address = "0.0.0.0" # If not set, is actually set to bind-address. +#port = 25827 +#database = "collectd_database" +#typesdb = "types.db" + +# Configure UDP listener for series data. +[udp] +enabled = false +#bind-address = "0.0.0.0" +#port = 4444 + +# Broker configuration. Brokers are nodes which participate in distributed +# consensus. +[broker] +# Where the Raft logs are stored. The user running InfluxDB will need read/write access. +dir = "{broker_raft_dir}" +port = {broker_port} + +# Data node configuration. Data nodes are where the time-series data, in the form of +# shards, is stored. +[data] + dir = "{broker_node_dir}" + port = {broker_port} + + # Auto-create a retention policy when a database is created. Defaults to true. + retention-auto-create = true + + # Control whether retention policies are enforced and how long the system waits between + # enforcing those policies. + retention-check-enabled = true + retention-check-period = "10m" + +[cluster] +# Location for cluster state storage. For storing state persistently across restarts. +dir = "{influxdb_cluster_dir}" + +[logging] +file = "{influxdb_logfile}" # Leave blank to redirect logs to stderr. +write-tracing = false # If true, enables detailed logging of the write system. +raft-tracing = false # If true, enables detailed logging of Raft consensus. + +# InfluxDB can store statistics about itself. This is useful for monitoring purposes. +# This feature is disabled by default, but if enabled, these statistics can be queried +# as any other data. +[statistics] +enabled = false +database = "internal" # The database to which the data is written. +retention-policy = "default" # The retention policy within the database. +write-interval = "1m" # Period between writing the data. \ No newline at end of file diff --git a/tests/influxdb/influxdb.udp_conf.template b/tests/influxdb/influxdb.udp_conf.template new file mode 100644 index 00000000..79ed4d72 --- /dev/null +++ b/tests/influxdb/influxdb.udp_conf.template @@ -0,0 +1,99 @@ +# Welcome to the InfluxDB configuration file. + +# If hostname (on the OS) doesn't return a name that can be resolved by the other +# systems in the cluster, you'll have to set the hostname to an IP or something +# that can be resolved here. +# hostname = "" +bind-address = "0.0.0.0" + +# Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com +# The data includes raft id (random 8 bytes), os, arch and version +# We don't track ip addresses of servers reporting. This is only used +# to track the number of instances running and the versions, which +# is very helpful for us. +# Change this option to true to disable reporting. +reporting-disabled = false + +# Controls settings for initial start-up. Once a node a successfully started, +# these settings are ignored. +[initialization] +join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. + +# Control authentication +# If not set authetication is DISABLED. Be sure to explicitly set this flag to +# true if you want authentication. +[authentication] +enabled = false + +# Configure the admin server +[admin] +enabled = true +port = {admin_port} + +# Configure the HTTP API endpoint. All time-series data and queries uses this endpoint. +[api] +# ssl-port = 8087 # SSL support is enabled if you set a port and cert +# ssl-cert = "/path/to/cert.pem" + +# Configure the Graphite plugins. +[[graphite]] # 1 or more of these sections may be present. +enabled = false +# protocol = "" # Set to "tcp" or "udp" +# address = "0.0.0.0" # If not set, is actually set to bind-address. +# port = 2003 +# name-position = "last" +# name-separator = "-" +# database = "" # store graphite data in this database + +# Configure the collectd input. +[collectd] +enabled = false +#address = "0.0.0.0" # If not set, is actually set to bind-address. +#port = 25827 +#database = "collectd_database" +#typesdb = "types.db" + +# Configure UDP listener for series data. +[udp] +enabled = true +#bind-address = "0.0.0.0" +port = {udp_port} + +# Broker configuration. Brokers are nodes which participate in distributed +# consensus. +[broker] +# Where the Raft logs are stored. The user running InfluxDB will need read/write access. +dir = "{broker_raft_dir}" +port = {broker_port} + +# Data node configuration. Data nodes are where the time-series data, in the form of +# shards, is stored. +[data] + dir = "{broker_node_dir}" + port = {broker_port} + + # Auto-create a retention policy when a database is created. Defaults to true. + retention-auto-create = true + + # Control whether retention policies are enforced and how long the system waits between + # enforcing those policies. + retention-check-enabled = true + retention-check-period = "10m" + +[cluster] +# Location for cluster state storage. For storing state persistently across restarts. +dir = "{influxdb_cluster_dir}" + +[logging] +file = "{influxdb_logfile}" # Leave blank to redirect logs to stderr. +write-tracing = false # If true, enables detailed logging of the write system. +raft-tracing = false # If true, enables detailed logging of Raft consensus. + +# InfluxDB can store statistics about itself. This is useful for monitoring purposes. +# This feature is disabled by default, but if enabled, these statistics can be queried +# as any other data. +[statistics] +enabled = false +database = "internal" # The database to which the data is written. +retention-policy = "default" # The retention policy within the database. +write-interval = "1m" # Period between writing the data. \ No newline at end of file diff --git a/tests/influxdb/misc.py b/tests/influxdb/misc.py new file mode 100644 index 00000000..4761d0e9 --- /dev/null +++ b/tests/influxdb/misc.py @@ -0,0 +1,24 @@ + + +import socket + + +def get_free_port(ip='127.0.0.1'): + sock = socket.socket() + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.bind((ip, 0)) + return sock.getsockname()[1] + finally: + sock.close() + + +def is_port_open(port, ip='127.0.0.1'): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + result = sock.connect_ex((ip, port)) + if not result: + sock.shutdown(socket.SHUT_RDWR) + return result == 0 + finally: + sock.close() From d86ad1fb0b31b9d1038c56c18275c705dc2c24df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 23 Mar 2015 13:48:18 -0400 Subject: [PATCH 052/536] Add: build server package and install it. So to be able to run client_test_with_server.py. --- .travis.yml | 1 + build_influxdb_server.sh | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100755 build_influxdb_server.sh diff --git a/.travis.yml b/.travis.yml index d35046cd..d8c0a437 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,6 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls + - ./build_influxdb_server.sh script: - travis_wait tox -e $TOX_ENV after_success: diff --git a/build_influxdb_server.sh b/build_influxdb_server.sh new file mode 100755 index 00000000..7b0bf957 --- /dev/null +++ b/build_influxdb_server.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +# +# build and install, +# the latest influxdb server master +# + +set -e + +tmpdir=$(mktemp -d) + +echo "Using tempdir $tmpdir .." + +cd "$tmpdir" + +# rpm for package.sh (below) which will also build an .rpm +sudo apt-get install ruby ruby-dev build-essential rpm + +echo $PATH +echo $(which gem) +echo $(which ruby) + +gem=$(which gem) + +sudo $gem install fpm + +mkdir -p go/src/github.com/influxdb +cd go/src/github.com/influxdb + +git clone --depth 5 https://github.com/influxdb/influxdb +cd influxdb + +version=0.0.0-$(git describe --always | sed 's/^v//') +echo "describe: $version" + +export GOPATH="$tmpdir/go" +{ echo y ; yes no ; } | ./package.sh "$version" + +deb=$(ls *.deb) +sudo dpkg -i "$deb" From e8ea370e9ab1fb1b9d3a860ed93a6b6e468ce117 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 23 Mar 2015 11:57:53 -0400 Subject: [PATCH 053/536] For python2/3 compat. --- influxdb/client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index e1c75d13..d61baf15 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -19,6 +19,8 @@ class InfluxDBClientError(Exception): """Raised when an error occurs in the request""" def __init__(self, content, code): + if isinstance(content, type(b'')): + content = content.decode('UTF-8', errors='replace') super(InfluxDBClientError, self).__init__( "{0}: {1}".format(code, content)) self.content = content From b00409a8d08bd0c90fa64934ca2b13c526f28ed2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 19 Mar 2015 12:11:53 -0400 Subject: [PATCH 054/536] Fix: query() requires database argument. --- influxdb/_dataframe_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 7e834464..361624c2 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -68,7 +68,7 @@ def write_points_with_precision(self, data, time_precision='s'): FutureWarning) return self.write_points(data, time_precision='s') - def query(self, query, time_precision='s', chunked=False): + def query(self, query, time_precision='s', chunked=False, database=None): """ Quering data into a DataFrame. From 53a3d3350f9bd11f0da5298c41a86deb75837a06 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 25 Mar 2015 13:43:21 -0400 Subject: [PATCH 055/536] Fixed spelling mistakes + newline + relative import --- tests/influxdb/client_test_with_server.py | 30 +++++++++++------------ tests/influxdb/influxdb.conf.template | 2 +- tests/influxdb/influxdb.udp_conf.template | 2 +- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index af7c7aae..2c331446 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -28,13 +28,13 @@ from influxdb import InfluxDBClient from influxdb.client import InfluxDBClientError -from .misc import get_free_port, is_port_open +from tests.influxdb.misc import get_free_port, is_port_open THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -# try to find where is located the 'influxd' binary: +# try to find where the 'influxd' binary is located: # You can define 'InfluxDbPythonClientTest_SERVER_BIN_PATH' # env var to force it : influxdb_bin_path = influxdb_forced_bin = os.environ.get( @@ -65,7 +65,7 @@ print(version, file=sys.stderr) -dummy_point = [ # some dummy points .. :o +dummy_point = [ # some dummy points { "name": "cpu_load_short", "tags": { @@ -79,7 +79,7 @@ } ] -dummy_points = [ # some dummy points .. :o +dummy_points = [ # some dummy points dummy_point[0], { "name": "memory", @@ -111,7 +111,7 @@ class InfluxDbInstance(object): def __init__(self, conf_template): - # create a fresh temporary place for storing all needed files + # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() # "temp_dir_base" will be used for conf file and logs, @@ -121,7 +121,7 @@ def __init__(self, conf_template): # we need some "free" ports : self.broker_port = get_free_port() self.admin_port = get_free_port() - # as it's UDP we can reuse the same port than the broker: + # as it's UDP we can reuse the same port as the broker: self.udp_port = get_free_port() self.logs_file = os.path.join( @@ -157,8 +157,8 @@ def __init__(self, conf_template): # wait for it to listen on the broker and admin ports: # usually a fresh instance is ready in less than 1 sec .. - timeout = time.time() + 10 # so 10 secs should be rather enough, - # otherwise either your system load is rather high, + timeout = time.time() + 10 # so 10 secs should be enough, + # otherwise either your system load is high, # or you run a 286 @ 1Mhz ? try: while time.time() < timeout: @@ -200,7 +200,7 @@ def close(self, remove_tree=True): class InfluxDbClientTestWithServerInstanceMixin(object): ''' A mixin for unittest.TestCase to start an influxdb server instance - in a fresh temporary place. + in a temporary directory. ''' # 'influxdb_template_conf' attribute must be set on the class or instance @@ -219,7 +219,7 @@ def tearDown(self): self.influxd_inst.close(remove_tree=remove_tree) -@unittest.skipIf(not is_influxdb_bin_ok, "not found any influxd binary") +@unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") class TestInfluxDBClient(InfluxDbClientTestWithServerInstanceMixin, unittest.TestCase): @@ -479,7 +479,7 @@ def test_create_retention_policy(self): ) -@unittest.skipIf(not is_influxdb_bin_ok, "not found any influxd binary") +@unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") class UdpTests(InfluxDbClientTestWithServerInstanceMixin, unittest.TestCase): @@ -497,11 +497,9 @@ def test_write_points_udp(self): cli.create_database('db') cli.write_points(dummy_point) - # ho boy, - # once write_points finishes then the points aren't actually - # already directly available !! - # Well, it's normal because we sent by udp (no response !). - # So we have to wait some enough time, + # The points are not immediately available after write_points. + # This is to be expected because we are using udp (no response !). + # So we have to wait some time, time.sleep(1) # 1 sec seems to be a good choice. rsp = cli.query('SELECT * FROM cpu_load_short') diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template index 4efbf70c..7a511781 100644 --- a/tests/influxdb/influxdb.conf.template +++ b/tests/influxdb/influxdb.conf.template @@ -96,4 +96,4 @@ raft-tracing = false # If true, enables detailed logging of Raft consensus. enabled = false database = "internal" # The database to which the data is written. retention-policy = "default" # The retention policy within the database. -write-interval = "1m" # Period between writing the data. \ No newline at end of file +write-interval = "1m" # Period between writing the data. diff --git a/tests/influxdb/influxdb.udp_conf.template b/tests/influxdb/influxdb.udp_conf.template index 79ed4d72..3eb14ed6 100644 --- a/tests/influxdb/influxdb.udp_conf.template +++ b/tests/influxdb/influxdb.udp_conf.template @@ -96,4 +96,4 @@ raft-tracing = false # If true, enables detailed logging of Raft consensus. enabled = false database = "internal" # The database to which the data is written. retention-policy = "default" # The retention policy within the database. -write-interval = "1m" # Period between writing the data. \ No newline at end of file +write-interval = "1m" # Period between writing the data. From 00913fa63fd5a60cea2cfca20ff6394be94c8bc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 26 Mar 2015 17:39:01 -0400 Subject: [PATCH 056/536] Add: also set a snapshot port in test config templates --- tests/influxdb/client_test_with_server.py | 19 ++++++++----------- tests/influxdb/influxdb.conf.template | 9 +++++++-- tests/influxdb/influxdb.udp_conf.template | 8 ++++++-- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 2c331446..179ee8a7 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -116,29 +116,26 @@ def __init__(self, conf_template): self.temp_dir_base = tempfile.mkdtemp() # "temp_dir_base" will be used for conf file and logs, # while "temp_dir_influxdb" is for the databases files/dirs : - self.temp_dir_influxdb = tempfile.mkdtemp( + tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( dir=self.temp_dir_base) # we need some "free" ports : self.broker_port = get_free_port() self.admin_port = get_free_port() - # as it's UDP we can reuse the same port as the broker: self.udp_port = get_free_port() + self.snapshot_port = get_free_port() - self.logs_file = os.path.join( - self.temp_dir_base, 'logs.txt') + self.logs_file = os.path.join(self.temp_dir_base, 'logs.txt') with open(conf_template) as fh: conf = fh.read().format( broker_port=self.broker_port, admin_port=self.admin_port, udp_port=self.udp_port, - broker_raft_dir=os.path.join( - self.temp_dir_influxdb, 'raft'), - broker_node_dir=os.path.join( - self.temp_dir_influxdb, 'db'), - influxdb_cluster_dir=os.path.join( - self.temp_dir_influxdb, 'state'), - influxdb_logfile=self.logs_file + broker_raft_dir=os.path.join(tempdir, 'raft'), + broker_node_dir=os.path.join(tempdir, 'db'), + cluster_dir=os.path.join(tempdir, 'state'), + logfile=self.logs_file, + snapshot_port=self.snapshot_port, ) conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template index 7a511781..82608f73 100644 --- a/tests/influxdb/influxdb.conf.template +++ b/tests/influxdb/influxdb.conf.template @@ -82,10 +82,10 @@ port = {broker_port} [cluster] # Location for cluster state storage. For storing state persistently across restarts. -dir = "{influxdb_cluster_dir}" +dir = "{cluster_dir}" [logging] -file = "{influxdb_logfile}" # Leave blank to redirect logs to stderr. +file = "{logfile}" # Leave blank to redirect logs to stderr. write-tracing = false # If true, enables detailed logging of the write system. raft-tracing = false # If true, enables detailed logging of Raft consensus. @@ -97,3 +97,8 @@ enabled = false database = "internal" # The database to which the data is written. retention-policy = "default" # The retention policy within the database. write-interval = "1m" # Period between writing the data. + + +[snapshot] +bind-address = "127.0.0.1" +port = {snapshot_port} diff --git a/tests/influxdb/influxdb.udp_conf.template b/tests/influxdb/influxdb.udp_conf.template index 3eb14ed6..4134172f 100644 --- a/tests/influxdb/influxdb.udp_conf.template +++ b/tests/influxdb/influxdb.udp_conf.template @@ -82,10 +82,10 @@ port = {broker_port} [cluster] # Location for cluster state storage. For storing state persistently across restarts. -dir = "{influxdb_cluster_dir}" +dir = "{cluster_dir}" [logging] -file = "{influxdb_logfile}" # Leave blank to redirect logs to stderr. +file = "{logfile}" # Leave blank to redirect logs to stderr. write-tracing = false # If true, enables detailed logging of the write system. raft-tracing = false # If true, enables detailed logging of Raft consensus. @@ -97,3 +97,7 @@ enabled = false database = "internal" # The database to which the data is written. retention-policy = "default" # The retention policy within the database. write-interval = "1m" # Period between writing the data. + +[snapshot] +bind-address = "127.0.0.1" +port = {snapshot_port} From 1a116d92df7b7aa750dfe6e0f84b9ed13b4f90bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 26 Mar 2015 17:40:54 -0400 Subject: [PATCH 057/536] workaround: last influxd master doesn't anymore store a log file unfortunately :s So don't error if it's not readable. For now keeping the code for the log file as it might be reintroduced later. --- tests/influxdb/client_test_with_server.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 179ee8a7..10aab84e 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -180,13 +180,17 @@ def __init__(self, conf_template): def get_logs_and_output(self): proc = self.proc - with open(self.logs_file) as fh: - return { - 'rc': proc.returncode, - 'out': proc.stdout.read(), - 'err': proc.stderr.read(), - 'logs': fh.read() - } + try: + with open(self.logs_file) as fh: + logs = fh.read() + except IOError as err: + logs = "Couldn't read logs: %s" % err + return { + 'rc': proc.returncode, + 'out': proc.stdout.read(), + 'err': proc.stderr.read(), + 'logs': logs + } def close(self, remove_tree=True): self.proc.terminate() From 8d20673d2d84e7f332b2875a3088ebec033031b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 27 Mar 2015 10:09:34 -0400 Subject: [PATCH 058/536] Move warnings setup to module level --- tests/influxdb/client_test_with_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 10aab84e..d78e3d5d 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -25,6 +25,9 @@ import warnings +# By default, raise exceptions on warnings +warnings.simplefilter('error', FutureWarning) + from influxdb import InfluxDBClient from influxdb.client import InfluxDBClientError @@ -207,8 +210,6 @@ class InfluxDbClientTestWithServerInstanceMixin(object): # 'influxdb_template_conf' attribute must be set on the class or instance def setUp(self): - # By default, raise exceptions on warnings - warnings.simplefilter('error', FutureWarning) self.influxd_inst = InfluxDbInstance(self.influxdb_template_conf) self.cli = InfluxDBClient('localhost', From 4b571a58a4ca40036634233514d285d09db316bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 27 Mar 2015 10:19:36 -0400 Subject: [PATCH 059/536] Small enh: split TestInfluxDBClient in 2 : SimpleTests + CommonTests And so use more dedicated mixins for the server: + SingleTestCaseWithServerMixin + ManyTestCasesWithServerMixin --- tests/influxdb/client_test_with_server.py | 116 +++++++++++++--------- 1 file changed, 71 insertions(+), 45 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index d78e3d5d..e88a62d9 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -112,6 +112,9 @@ class InfluxDbInstance(object): + ''' A class to launch of fresh influxdb server instance + in a temporary place, using a config file template. + ''' def __init__(self, conf_template): # create a temporary dir to store all needed files @@ -202,31 +205,60 @@ def close(self, remove_tree=True): shutil.rmtree(self.temp_dir_base) -class InfluxDbClientTestWithServerInstanceMixin(object): + +def _setup_influxdb_server(inst): + inst.influxd_inst = InfluxDbInstance(inst.influxdb_template_conf) + inst.cli = InfluxDBClient('localhost', + inst.influxd_inst.broker_port, + 'root', '', database='db') +def _unsetup_influxdb_server(inst): + remove_tree = sys.exc_info() == (None, None, None) + inst.influxd_inst.close(remove_tree=remove_tree) + + +class SingleTestCaseWithServerMixin(object): ''' A mixin for unittest.TestCase to start an influxdb server instance - in a temporary directory. + in a temporary directory **for each test function/case** ''' - # 'influxdb_template_conf' attribute must be set on the class or instance + # 'influxdb_template_conf' attribute must be set on the TestCase class or instance. + + setUp = _setup_influxdb_server + tearDown = _unsetup_influxdb_server + + +class ManyTestCasesWithServerMixin(object): + ''' Same than SingleTestCaseWithServerMixin + but creates a single instance for the whole class. + Also pre-creates a fresh database: 'db'. + ''' + + # 'influxdb_template_conf' attribute must be set on the class itself ! + + @classmethod + def setUpClass(cls): + _setup_influxdb_server(cls) def setUp(self): + self.cli.create_database('db') - self.influxd_inst = InfluxDbInstance(self.influxdb_template_conf) - self.cli = InfluxDBClient('localhost', - self.influxd_inst.broker_port, - 'root', '', database='db') + @classmethod + def tearDownClass(cls): + _unsetup_influxdb_server(cls) def tearDown(self): - remove_tree = sys.exc_info() == (None, None, None) - self.influxd_inst.close(remove_tree=remove_tree) + self.cli.drop_database('db') @unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") -class TestInfluxDBClient(InfluxDbClientTestWithServerInstanceMixin, - unittest.TestCase): +class SimpleTests(SingleTestCaseWithServerMixin, + unittest.TestCase): influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') + def test_fresh_server_no_db(self): + self.assertEqual([], self.cli.get_list_database()) + def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) @@ -255,10 +287,24 @@ def test_drop_database_fails(self): self.assertEqual('{"results":[{"error":"database not found"}]}', ctx.exception.content) + def test_query_fail(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.query('select column_one from foo') + self.assertEqual( + ('500: {"results":[{"error":"database not found: db"}]}',), + ctx.exception.args) + + + +@unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") +class CommonTests(ManyTestCasesWithServerMixin, + unittest.TestCase): + + influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') + def test_write(self): new_dummy_point = dummy_point[0].copy() new_dummy_point['database'] = 'db' - self.cli.create_database('db') self.assertIs(True, self.cli.write(new_dummy_point)) @unittest.skip("fail against real server instance, " @@ -281,7 +327,6 @@ def test_write_check_read(self): def test_write_points(self): ''' same as test_write() but with write_points \o/ ''' - self.cli.create_database('db') self.assertIs(True, self.cli.write_points(dummy_point)) def test_write_points_check_read(self): @@ -294,7 +339,6 @@ def test_write_points_check_read(self): self.cli.query('SELECT * FROM cpu_load_short')) def test_write_multiple_points_different_series(self): - self.cli.create_database('db') self.assertIs(True, self.cli.write_points(dummy_points)) time.sleep(1) self.assertEqual( @@ -308,7 +352,6 @@ def test_write_multiple_points_different_series(self): @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): - self.cli.create_database('db') self.cli.write_points( points=dummy_point * 3, batch_size=2 @@ -343,7 +386,8 @@ def test_write_points_with_precision(self): # ('h', base_regex + '00:00Z', ), # that would require a sleep of possibly up to 3600 secs (/ 2 ?).. )): - db = 'db' + db = 'db1' # to not shoot us in the foot/head, + # we work on a fresh db each time: self.cli.create_database(db) before = datetime.datetime.now() self.assertIs( @@ -390,7 +434,6 @@ def test_write_points_with_precision(self): self.cli.drop_database(db) def test_query(self): - self.cli.create_database('db') self.assertIs(True, self.cli.write_points(dummy_point)) @unittest.skip('Not implemented for 0.9') @@ -413,24 +456,15 @@ def test_query_chunked(self): } del cli del example_object - # TODO - - def test_query_fail(self): - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.query('select column_one from foo') - self.assertEqual( - ('500: {"results":[{"error":"database not found: db"}]}',), - ctx.exception.args) + # TODO ? def test_get_list_series_empty(self): - self.cli.create_database('mydb') - rsp = self.cli.get_list_series('mydb') + rsp = self.cli.get_list_series() self.assertEqual({}, rsp) def test_get_list_series_non_empty(self): - self.cli.create_database('mydb') - self.cli.write_points(dummy_point, database='mydb') - rsp = self.cli.get_list_series('mydb') + self.cli.write_points(dummy_point) + rsp = self.cli.get_list_series() self.assertEqual( {'cpu_load_short': [ {'region': 'us-west', 'host': 'server01', '_id': 1}]}, @@ -438,8 +472,7 @@ def test_get_list_series_non_empty(self): ) def test_default_retention_policy(self): - self.cli.create_database('db') - rsp = self.cli.get_list_retention_policies('db') + rsp = self.cli.get_list_retention_policies() self.assertEqual( [ {'duration': '0', 'default': True, @@ -448,12 +481,10 @@ def test_default_retention_policy(self): ) def test_create_retention_policy_default(self): - self.cli.create_database('db') - rsp = self.cli.create_retention_policy( - 'somename', '1d', 4, default=True, database='db' - ) + rsp = self.cli.create_retention_policy('somename', '1d', 4, + default=True) self.assertIsNone(rsp) - rsp = self.cli.get_list_retention_policies('db') + rsp = self.cli.get_list_retention_policies() self.assertEqual( [ {'duration': '0', 'default': False, @@ -465,11 +496,8 @@ def test_create_retention_policy_default(self): ) def test_create_retention_policy(self): - self.cli.create_database('db') - self.cli.create_retention_policy( - 'somename', '1d', 4, database='db' - ) - rsp = self.cli.get_list_retention_policies('db') + self.cli.create_retention_policy('somename', '1d', 4) + rsp = self.cli.get_list_retention_policies() self.assertEqual( [ {'duration': '0', 'default': True, 'replicaN': 1, @@ -482,21 +510,19 @@ def test_create_retention_policy(self): @unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") -class UdpTests(InfluxDbClientTestWithServerInstanceMixin, +class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.udp_conf.template') def test_write_points_udp(self): - cli = InfluxDBClient( 'localhost', self.influxd_inst.broker_port, 'dont', 'care', database='db', use_udp=True, udp_port=self.influxd_inst.udp_port ) - cli.create_database('db') cli.write_points(dummy_point) # The points are not immediately available after write_points. From d9c6187872b64bd1b9224825689956e91153c989 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 27 Mar 2015 10:24:14 -0400 Subject: [PATCH 060/536] flake8 --- tests/influxdb/client_test_with_server.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index e88a62d9..397ba000 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -204,6 +204,7 @@ def close(self, remove_tree=True): if remove_tree: shutil.rmtree(self.temp_dir_base) +############################################################################ def _setup_influxdb_server(inst): @@ -211,17 +212,22 @@ def _setup_influxdb_server(inst): inst.cli = InfluxDBClient('localhost', inst.influxd_inst.broker_port, 'root', '', database='db') + + def _unsetup_influxdb_server(inst): remove_tree = sys.exc_info() == (None, None, None) inst.influxd_inst.close(remove_tree=remove_tree) +############################################################################ + class SingleTestCaseWithServerMixin(object): ''' A mixin for unittest.TestCase to start an influxdb server instance in a temporary directory **for each test function/case** ''' - # 'influxdb_template_conf' attribute must be set on the TestCase class or instance. + # 'influxdb_template_conf' attribute must be set + # on the TestCase class or instance. setUp = _setup_influxdb_server tearDown = _unsetup_influxdb_server @@ -249,6 +255,8 @@ def tearDownClass(cls): def tearDown(self): self.cli.drop_database('db') +############################################################################ + @unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") class SimpleTests(SingleTestCaseWithServerMixin, @@ -294,6 +302,7 @@ def test_query_fail(self): ('500: {"results":[{"error":"database not found: db"}]}',), ctx.exception.args) +############################################################################ @unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") @@ -508,6 +517,8 @@ def test_create_retention_policy(self): rsp ) +############################################################################ + @unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") class UdpTests(ManyTestCasesWithServerMixin, From 28eda55e54af34cf026c6f260ba42c4a5ff07b21 Mon Sep 17 00:00:00 2001 From: Matt Robenolt Date: Sat, 28 Mar 2015 17:16:13 -0700 Subject: [PATCH 061/536] Add wheel/twine support --- dev-requirements.txt | 2 ++ release.sh | 3 ++- setup.cfg | 2 ++ 3 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 setup.cfg diff --git a/dev-requirements.txt b/dev-requirements.txt index c4473b8a..78d40a24 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,3 +4,5 @@ mock pandas Sphinx==1.2.3 sphinx_rtd_theme +wheel +twine diff --git a/release.sh b/release.sh index d0337897..d94e16fd 100755 --- a/release.sh +++ b/release.sh @@ -1,3 +1,4 @@ #!/usr/bin/env bash -python setup.py sdist upload +python setup.py sdist bdist_wheel +twine upload dist/* diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..5e409001 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[wheel] +universal = 1 From 15b652f2bf50db892292b0220b30d6ff93d7ad26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 30 Mar 2015 10:31:04 -0400 Subject: [PATCH 062/536] Adapt get_list_database to new 'show databases' format --- influxdb/client.py | 3 ++- tests/influxdb/client_test.py | 11 +++-------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 3ddd794d..8b4d7a43 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -295,7 +295,8 @@ def get_list_database(self): """ Get the list of databases """ - return [db['name'] for db in self.query("SHOW DATABASES")] + rsp = self.query("SHOW DATABASES") + return [db['name'] for db in rsp['databases']] def create_database(self, dbname): """ diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 3f0a08fb..01550b9c 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -350,14 +350,9 @@ def test_drop_database_fails(self): cli.drop_database('old_db') def test_get_list_database(self): - data = { - "results": - [ - {"series": [ - {"columns": ["name"], - "values":[["mydb"], ["myotherdb"]]}]} - ] - } + data = {'results': [{'series': [ + {'name': 'databases', 'columns': ['name'], + 'values': [['mydb'], ['myotherdb']]}]}]} with _mocked_session('get', 200, json.dumps(data)): self.assertListEqual( From f338c5767769730dccc2ab73f5c0339f977dff36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 30 Mar 2015 12:29:20 -0400 Subject: [PATCH 063/536] Add test which trigger issue #143 --- tests/influxdb/client_test_with_server.py | 64 ++++++++++++++++++++++- 1 file changed, 63 insertions(+), 1 deletion(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 397ba000..3e89b677 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -14,6 +14,7 @@ import datetime import distutils.spawn +from functools import partial import os import re import shutil @@ -36,7 +37,7 @@ THIS_DIR = os.path.abspath(os.path.dirname(__file__)) - +############################################################################# # try to find where the 'influxd' binary is located: # You can define 'InfluxDbPythonClientTest_SERVER_BIN_PATH' # env var to force it : @@ -68,6 +69,18 @@ print(version, file=sys.stderr) +############################################################################# + +def point(serie_name, timestamp=None, tags=None, **fields): + res = {'name': serie_name} + if timestamp: + res['timestamp'] = timestamp + if tags: + res['tags'] = tags + res['fields'] = fields + return res + + dummy_point = [ # some dummy points { "name": "cpu_load_short", @@ -110,6 +123,8 @@ } ] +############################################################################# + class InfluxDbInstance(object): ''' A class to launch of fresh influxdb server instance @@ -517,6 +532,53 @@ def test_create_retention_policy(self): rsp ) + def test_issue_143(self): + pt = partial(point, 'serie', timestamp='2015-03-30T16:16:37Z') + pts = [ + pt(value=15), + pt(tags={'tag_1': 'value1'}, value=5), + pt(tags={'tag_1': 'value2'}, value=10), + ] + self.cli.write_points(pts) + time.sleep(1) + rsp = self.cli.query('SELECT * FROM serie GROUP BY tag_1') + # print(rsp, file=sys.stderr) + self.assertEqual({ + ('serie', (('tag_1', ''),)): [ + {'time': '2015-03-30T16:16:37Z', 'value': 15}], + ('serie', (('tag_1', 'value1'),)): [ + {'time': '2015-03-30T16:16:37Z', 'value': 5}], + ('serie', (('tag_1', 'value2'),)): [ + {'time': '2015-03-30T16:16:37Z', 'value': 10}]}, + rsp + ) + + # a slightly more complex one with 2 tags values: + pt = partial(point, 'serie2', timestamp='2015-03-30T16:16:37Z') + pts = [ + pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), + pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), + pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10), + ] + self.cli.write_points(pts) + time.sleep(1) + rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2') + # print(rsp, file=sys.stderr) + self.assertEqual( + { + ('serie2', (('tag1', 'value1'), ('tag2', 'v1'))): [ + {'time': '2015-03-30T16:16:37Z', 'value': 0} + ], + ('serie2', (('tag1', 'value1'), ('tag2', 'v2'))): [ + {'time': '2015-03-30T16:16:37Z', 'value': 5} + ], + ('serie2', (('tag1', 'value2'), ('tag2', 'v1'))): [ + {'time': '2015-03-30T16:16:37Z', 'value': 10}] + }, + rsp + ) + + ############################################################################ From a5f075ff7e5a4c74bd96deb8f4f7a4a51aee5a6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 30 Mar 2015 14:15:48 -0400 Subject: [PATCH 064/536] Fix issue #143 --- influxdb/client.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 8b4d7a43..303f9032 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -109,7 +109,14 @@ def format_query_response(response): for row in result['series']: items = [] if 'name' in row.keys(): - series[row['name']] = items + name = row['name'] + tags = row.get('tags', None) + if tags: + name = (row['name'], tuple(tags.items())) + if name not in series: + series[name] = items + else: + series[name].extend(items) else: series = items # Special case for system queries. if 'columns' in row.keys() and 'values' in row.keys(): From 89bfbf5a05ca129437c6b7df7bdd4cdffe22ac9c Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 30 Mar 2015 14:20:46 -0400 Subject: [PATCH 065/536] Released 1.0.2 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 782a2005..77c3f582 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '1.0.0' +__version__ = '1.0.2' From 8333619133d166e7d29f0cc85b0a9f222dcc9f8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 31 Mar 2015 10:26:28 -0400 Subject: [PATCH 066/536] Add a test to verify that influxd always keep the same order for tags name when used in a group by query. --- tests/influxdb/client_test_with_server.py | 55 +++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 3e89b677..0cc1b36e 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -11,10 +11,12 @@ """ from __future__ import print_function +import random import datetime import distutils.spawn from functools import partial +import itertools import os import re import shutil @@ -578,6 +580,59 @@ def test_issue_143(self): rsp ) + def test_tags_json_order(self): + n_pts = 100 + n_tags = 5 # that will make 120 possible orders (fact(5) == 120) + all_tags = ['tag%s' % i for i in range(n_tags)] + n_tags_values = 1 + n_tags // 3 + all_tags_values = ['value%s' % random.randint(0, i) + for i in range(n_tags_values)] + pt = partial(point, 'serie', timestamp='2015-03-30T16:16:37Z') + pts = [ + pt(value=random.randint(0, 100)) + for _ in range(n_pts) + ] + for pt in pts: + tags = pt['tags'] = {} + for tag in all_tags: + tags[tag] = random.choice(all_tags_values) + + self.cli.write_points(pts) + time.sleep(1) + + # Influxd, when queried with a "group by tag1(, tag2, ..)" and as far + # as we currently see, always returns the tags (alphabetically-) + # ordered by their name in the json response.. + # That might not always be the case so here we will also be + # asserting that behavior. + expected_ordered_tags = tuple(sorted(all_tags)) + + # try all the possible orders of tags for the group by in the query: + for tags in itertools.permutations(all_tags): + query = ('SELECT * FROM serie ' + 'GROUP BY %s' % ','.join(tags)) + rsp = self.cli.query(query) + # and verify that, for each "serie_key" in the response, + # the tags names are ordered as we expect it: + for serie_key in rsp: + # first also asserts that the serie key is a 2-tuple: + self.assertTrue(isinstance(serie_key, tuple)) + self.assertEqual(2, len(serie_key)) + # also assert that the first component is an str instance: + self.assertIsInstance(serie_key[0], type(b''.decode())) + self.assertIsInstance(serie_key[1], tuple) + # also assert that the number of items in the second component + # is the number of tags requested in the group by actually, + # and that each one has correct format/type/.. + self.assertEqual(n_tags, len(serie_key[1])) + for tag_data in serie_key[1]: + self.assertIsInstance(tag_data, tuple) + self.assertEqual(2, len(tag_data)) + tag_name = tag_data[0] + self.assertIsInstance(tag_name, type(b''.decode())) + # then check the tags order: + rsp_tags = tuple(t[0] for t in serie_key[1]) + self.assertEqual(expected_ordered_tags, rsp_tags) ############################################################################ From e89876f7dc8056ec15875fdcbecae7d94659036f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 31 Mar 2015 10:23:37 -0400 Subject: [PATCH 067/536] Fix: by default always keep the order from the server json responses Undecoding a json encoded dictionnary in python by default will lose the order of the json. You have to explicitely give a good object_pairs_hook to json dumps in order for it the keep the order of dictionnaries. clean: also removed (now) useless if normally, with tags now appropriately used in the "name", we shouldn't get the same "serie-tags" multiple times in the same answer. So this test, more specifically the else part, is/was rather useless/odd.. --- influxdb/client.py | 46 ++++++++++++++++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 10 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 303f9032..1bc14d1a 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -2,6 +2,7 @@ """ Python client for InfluxDB """ +from collections import OrderedDict import json import socket import requests @@ -28,6 +29,7 @@ def __init__(self, content, code): class InfluxDBClient(object): + """ The ``InfluxDBClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. @@ -67,7 +69,8 @@ def __init__(self, verify_ssl=False, timeout=None, use_udp=False, - udp_port=4444): + udp_port=4444, + ): """ Construct a new InfluxDBClient object. """ @@ -99,6 +102,27 @@ def __init__(self, 'Content-type': 'application/json', 'Accept': 'text/plain'} + # + # By default we keep the "order" of the json responses: + # more clearly: any dict contained in the json response will have + # its key-value items order kept as in the raw answer, thanks to + # `collections.OrderedDict`. + # if one doesn't care in that, then it can simply change its client + # instance 'keep_json_response_order' attribute value (to a falsy one). + # This will then eventually help for performance considerations. + _keep_json_response_order = True + # NB: For "group by" query type : + # This setting is actually necessary in order to have a consistent and + # reproducible rsp format if you "group by" on more than 1 tag. + + @property + def keep_json_response_order(self): + return self._keep_json_response_order + + @keep_json_response_order.setter + def keep_json_response_order(self, new_value): + self._keep_json_response_order = new_value + @staticmethod def format_query_response(response): """Returns a list of items from a query response""" @@ -113,17 +137,16 @@ def format_query_response(response): tags = row.get('tags', None) if tags: name = (row['name'], tuple(tags.items())) - if name not in series: - series[name] = items - else: - series[name].extend(items) + assert name not in series + series[name] = items else: series = items # Special case for system queries. if 'columns' in row.keys() and 'values' in row.keys(): + columns = row['columns'] for value in row['values']: item = {} for cur_col, field in enumerate(value): - item[row['columns'][cur_col]] = field + item[columns[cur_col]] = field items.append(item) return series @@ -234,10 +257,13 @@ def query(self, expected_response_code=expected_response_code ) - if raw: - return response.json() - else: - return self.format_query_response(response.json()) + json_kw = {} + if self.keep_json_response_order: + json_kw.update(object_pairs_hook=OrderedDict) + data = response.json(**json_kw) + + return (data if raw + else self.format_query_response(data)) def write_points(self, points, From aee6d934cb6181b20c2886eeb005872b6b7cee3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 31 Mar 2015 15:24:47 -0400 Subject: [PATCH 068/536] Test fix: precision should only be used with timestamp provided for refs see: https://github.com/influxdb/influxdb/issues/2124 https://groups.google.com/forum/#!topic/influxdb/yWpv_WJ6-uU --- tests/influxdb/client_test_with_server.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 0cc1b36e..968d234b 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -398,14 +398,26 @@ def test_write_points_with_precision(self): base_regex = '\d{4}-\d{2}-\d{2}T\d{2}:' # YYYY-MM-DD 'T' hh: base_s_regex = base_regex + '\d{2}:\d{2}' # base_regex + mm:ss + point = { + "name": "cpu_load_short", + "tags": { + "host": "server01", + "region": "us-west" + }, + "timestamp": "2009-11-10T12:34:56.123456789Z", + "fields": { + "value": 0.64 + } + } + # As far as we can see the values aren't directly available depending # on the precision used. # The less the precision, the more to wait for the value to be # actually written/available. for idx, (precision, expected_regex, sleep_time) in enumerate(( - ('n', base_s_regex + '\.\d{1,9}Z', 1), - ('u', base_s_regex + '\.\d{1,6}Z', 1), - ('ms', base_s_regex + '\.\d{1,3}Z', 1), + ('n', base_s_regex + '\.\d{9}Z', 1), + ('u', base_s_regex + '\.\d{6}Z', 1), + ('ms', base_s_regex + '\.\d{3}Z', 1), ('s', base_s_regex + 'Z', 1), ('m', base_regex + '\d{2}:00Z', 60), @@ -419,7 +431,7 @@ def test_write_points_with_precision(self): self.assertIs( True, self.cli.write_points( - dummy_point_without_timestamp, + [point], time_precision=precision, database=db)) From 5977f63d4193af6dd8528db008d915fd2190c932 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 31 Mar 2015 17:01:19 -0400 Subject: [PATCH 069/536] Pythonic: `x in dict.keys()` => `x in dict` --- influxdb/_dataframe_client.py | 2 +- influxdb/client.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 361624c2..959ecb98 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -87,7 +87,7 @@ def query(self, query, time_precision='s', chunked=False, database=None): def _to_dataframe(self, json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], columns=json_result['columns']) - if 'sequence_number' in dataframe.keys(): + if 'sequence_number' in dataframe: dataframe.sort(['time', 'sequence_number'], inplace=True) else: dataframe.sort(['time'], inplace=True) diff --git a/influxdb/client.py b/influxdb/client.py index 1bc14d1a..5fb8d3b3 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -127,12 +127,12 @@ def keep_json_response_order(self, new_value): def format_query_response(response): """Returns a list of items from a query response""" series = {} - if 'results' in response.keys(): + if 'results' in response: for result in response['results']: - if 'series' in result.keys(): + if 'series' in result: for row in result['series']: items = [] - if 'name' in row.keys(): + if 'name' in row: name = row['name'] tags = row.get('tags', None) if tags: @@ -141,7 +141,7 @@ def format_query_response(response): series[name] = items else: series = items # Special case for system queries. - if 'columns' in row.keys() and 'values' in row.keys(): + if 'columns' in row and 'values' in row: columns = row['columns'] for value in row['values']: item = {} From 03ec9010e187330f768b1d91aa50e22847d3c179 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 1 Apr 2015 19:58:11 +0100 Subject: [PATCH 070/536] Added alter database user for InfluxDB 0.8 and its tests. --- influxdb/influxdb08/client.py | 29 +++++++++++-- tests/influxdb/influxdb08/client_test.py | 54 ++++++++++++++++++++++-- 2 files changed, 75 insertions(+), 8 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index f17921b7..9b7682e3 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -736,11 +736,32 @@ def update_database_user_password(self, username, new_password): """ Update password """ + return self.alter_database_user(username, new_password) + + def alter_database_user(self, username, password=None, permissions=None): + """ + Alters a database user and/or their permissions. + :param permissions: A ``(readFrom, writeTo)`` tuple + :raise TypeError: if permissions cannot be read. + :raise ValueError: if neither password nor permissions provided. + """ url = "db/{0}/users/{1}".format(self._database, username) - data = { - 'password': new_password - } + if not password and not permissions: + raise ValueError("Nothing to alter for user {}.".format(username)) + + data = {} + + if password: + data['password'] = password + + if permissions: + try: + data['readFrom'], data['writeTo'] = permissions + except (ValueError, TypeError): + raise TypeError( + "'permissions' must be (readFrom, writeTo) tuple" + ) self.request( url=url, @@ -750,7 +771,7 @@ def update_database_user_password(self, username, new_password): ) if username == self._username: - self._password = new_password + self._password = password return True diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index ca83004a..1b7384f3 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -591,7 +591,7 @@ def test_add_database_user_bad_permissions(self): permissions=('hello', 'hello', 'hello') ) - def test_update_database_user_password(self): + def test_alter_database_user_password(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -599,14 +599,60 @@ def test_update_database_user_password(self): ) cli = InfluxDBClient(database='db') - cli.update_database_user_password( + cli.alter_database_user( username='paul', - new_password='laup' + password='n3wp4ss!' ) self.assertDictEqual( json.loads(m.last_request.body), - {'password': 'laup'} + { + 'password': 'n3wp4ss!' + } + ) + + def test_alter_database_user_permissions(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.alter_database_user( + username='paul', + permissions=('^$', '.*') + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'readFrom': '^$', + 'writeTo': '.*' + } + ) + + def test_alter_database_user_password_and_permissions(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/db/db/users/paul" + ) + + cli = InfluxDBClient(database='db') + cli.alter_database_user( + username='paul', + password='n3wp4ss!', + permissions=('^$', '.*') + ) + + self.assertDictEqual( + json.loads(m.last_request.body), + { + 'password': 'n3wp4ss!', + 'readFrom': '^$', + 'writeTo': '.*' + } ) def test_update_database_user_password_current_user(self): From f59d06e6075351796c051dc58b9334672c213d97 Mon Sep 17 00:00:00 2001 From: Can ZHANG Date: Fri, 3 Apr 2015 16:08:38 +0800 Subject: [PATCH 071/536] Remove dict comprehension for py2.6 Signed-off-by: Can ZHANG --- influxdb/influxdb08/dataframe_client.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index 641b60c1..f73fc269 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -96,9 +96,11 @@ def query(self, query, time_precision='s', chunked=False): elif len(result) == 1: return self._to_dataframe(result[0], time_precision) else: - return {time_series['name']: self._to_dataframe(time_series, - time_precision) - for time_series in result} + ret = {} + for time_series in result: + ret[time_series['name']] = self._to_dataframe(time_series, + time_precision) + return ret def _to_dataframe(self, json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], From 92ba774c7356155ca1a281a47c5061ce54119e20 Mon Sep 17 00:00:00 2001 From: Matt Robenolt Date: Sat, 28 Mar 2015 18:09:45 -0700 Subject: [PATCH 072/536] Use a Session per InfluxDBClient instance --- influxdb/client.py | 5 ++--- tests/influxdb/client_test.py | 31 +++++++++++++++---------------- 2 files changed, 17 insertions(+), 19 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 5fb8d3b3..4941ebeb 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -14,8 +14,6 @@ except NameError: xrange = range -session = requests.Session() - class InfluxDBClientError(Exception): """Raised when an error occurs in the request""" @@ -85,6 +83,7 @@ def __init__(self, self.use_udp = use_udp self.udp_port = udp_port + self._session = requests.Session() if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -199,7 +198,7 @@ def request(self, url, method='GET', params=None, data=None, # TODO (aviau): Make this configurable. for i in range(0, 3): try: - response = session.request( + response = self._session.request( method=method, url=url, params=params, diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 01550b9c..436e5aad 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -27,7 +27,6 @@ import mock from influxdb import InfluxDBClient -from influxdb.client import session def _build_response_object(status_code=200, content=""): @@ -37,7 +36,7 @@ def _build_response_object(status_code=200, content=""): return resp -def _mocked_session(method="GET", status_code=200, content=""): +def _mocked_session(cli, method="GET", status_code=200, content=""): method = method.upper() @@ -66,7 +65,7 @@ def request(*args, **kwargs): return _build_response_object(status_code=status_code, content=c) mocked = patch.object( - session, + cli._session, 'request', side_effect=request ) @@ -164,8 +163,8 @@ def test_write_points(self): @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): - with _mocked_session('post', 200, self.dummy_points): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'post', 200, self.dummy_points): assert cli.write_points( data=self.dummy_points, batch_size=2 @@ -209,8 +208,8 @@ def test_write_bad_precision_udp(self): @raises(Exception) def test_write_points_fails(self): - with _mocked_session('post', 500): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'post', 500): cli.write_points([]) def test_write_points_with_precision(self): @@ -248,8 +247,8 @@ def test_write_points_bad_precision(self): @raises(Exception) def test_write_points_with_precision_fails(self): - with _mocked_session('post', 500): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'post', 500): cli.write_points_with_precision([]) def test_query(self): @@ -309,7 +308,7 @@ def test_query_chunked(self): @raises(Exception) def test_query_fail(self): - with _mocked_session('get', 401): + with _mocked_session(self.cli, 'get', 401): self.cli.query('select column_one from foo;') def test_create_database(self): @@ -327,7 +326,7 @@ def test_create_database(self): @raises(Exception) def test_create_database_fails(self): - with _mocked_session('post', 401): + with _mocked_session(self.cli, 'post', 401): self.cli.create_database('new_db') def test_drop_database(self): @@ -345,8 +344,8 @@ def test_drop_database(self): @raises(Exception) def test_drop_database_fails(self): - with _mocked_session('delete', 401): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'delete', 401): cli.drop_database('old_db') def test_get_list_database(self): @@ -354,7 +353,7 @@ def test_get_list_database(self): {'name': 'databases', 'columns': ['name'], 'values': [['mydb'], ['myotherdb']]}]}]} - with _mocked_session('get', 200, json.dumps(data)): + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): self.assertListEqual( self.cli.get_list_database(), ['mydb', 'myotherdb'] @@ -362,8 +361,8 @@ def test_get_list_database(self): @raises(Exception) def test_get_list_database_fails(self): - with _mocked_session('get', 401): - cli = InfluxDBClient('host', 8086, 'username', 'password') + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 401): cli.get_list_database() def test_get_list_series(self): From 4c95d658524afba5580da9c82db835de64a25a6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Wed, 8 Apr 2015 11:37:37 -0400 Subject: [PATCH 073/536] Update configuration template for server config Server configuration file changed. Adapted the test template config file. Slightly adapted InfluxDbInstance.__init__() : The client have now to connect to the "webui_port". --- tests/influxdb/client_test_with_server.py | 63 +++++++------ tests/influxdb/influxdb.conf.template | 59 +++++++------ tests/influxdb/influxdb.udp_conf.template | 103 ---------------------- 3 files changed, 66 insertions(+), 159 deletions(-) delete mode 100644 tests/influxdb/influxdb.udp_conf.template diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 968d234b..dc8ad72f 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -133,7 +133,7 @@ class InfluxDbInstance(object): in a temporary place, using a config file template. ''' - def __init__(self, conf_template): + def __init__(self, conf_template, udp_enabled=False): # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() @@ -142,28 +142,28 @@ def __init__(self, conf_template): tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( dir=self.temp_dir_base) # we need some "free" ports : - self.broker_port = get_free_port() - self.admin_port = get_free_port() - self.udp_port = get_free_port() - self.snapshot_port = get_free_port() - - self.logs_file = os.path.join(self.temp_dir_base, 'logs.txt') - - with open(conf_template) as fh: - conf = fh.read().format( - broker_port=self.broker_port, - admin_port=self.admin_port, - udp_port=self.udp_port, - broker_raft_dir=os.path.join(tempdir, 'raft'), - broker_node_dir=os.path.join(tempdir, 'db'), - cluster_dir=os.path.join(tempdir, 'state'), - logfile=self.logs_file, - snapshot_port=self.snapshot_port, - ) + + ports = dict( + broker_port=get_free_port(), + webui_port=get_free_port(), + admin_port=get_free_port(), + udp_port=get_free_port() if udp_enabled else -1, + ) + + conf_data = dict( + broker_raft_dir=os.path.join(tempdir, 'raft'), + broker_node_dir=os.path.join(tempdir, 'db'), + cluster_dir=os.path.join(tempdir, 'state'), + logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), + udp_enabled='true' if udp_enabled else 'false', + ) + conf_data.update(ports) + self.__dict__.update(conf_data) conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') with open(conf_file, "w") as fh: - fh.write(conf) + with open(conf_template) as fh_template: + fh.write(fh_template.read().format(**conf_data)) # now start the server instance: proc = self.proc = subprocess.Popen( @@ -182,8 +182,13 @@ def __init__(self, conf_template): # or you run a 286 @ 1Mhz ? try: while time.time() < timeout: - if (is_port_open(self.broker_port) + if (is_port_open(self.webui_port) and is_port_open(self.admin_port)): + # it's hard to check if a UDP port is open.. + if udp_enabled: + # so let's just sleep 0.5 sec in this case + # to be sure that the server has open the port + time.sleep(0.5) break time.sleep(0.5) if proc.poll() is not None: @@ -192,13 +197,13 @@ def __init__(self, conf_template): proc.terminate() proc.wait() raise RuntimeError('Timeout waiting for influxdb to listen' - ' on its broker port') + ' on its ports (%s)' % ports) except RuntimeError as err: data = self.get_logs_and_output() data['reason'] = str(err) data['now'] = datetime.datetime.now() raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" - "stdout=%(out)r\nstderr=%(err)r\nlogs=%(logs)r" + "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r" % data) def get_logs_and_output(self): @@ -225,9 +230,11 @@ def close(self, remove_tree=True): def _setup_influxdb_server(inst): - inst.influxd_inst = InfluxDbInstance(inst.influxdb_template_conf) + inst.influxd_inst = InfluxDbInstance( + inst.influxdb_template_conf, + udp_enabled=getattr(inst, 'influxdb_udp_enabled', False)) inst.cli = InfluxDBClient('localhost', - inst.influxd_inst.broker_port, + inst.influxd_inst.webui_port, 'root', '', database='db') @@ -653,12 +660,14 @@ def test_tags_json_order(self): class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): + influxdb_udp_enabled = True + influxdb_template_conf = os.path.join(THIS_DIR, - 'influxdb.udp_conf.template') + 'influxdb.conf.template') def test_write_points_udp(self): cli = InfluxDBClient( - 'localhost', self.influxd_inst.broker_port, + 'localhost', self.influxd_inst.webui_port, 'dont', 'care', database='db', use_udp=True, udp_port=self.influxd_inst.udp_port diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template index 82608f73..dde1a4ae 100644 --- a/tests/influxdb/influxdb.conf.template +++ b/tests/influxdb/influxdb.conf.template @@ -5,6 +5,7 @@ # that can be resolved here. # hostname = "" bind-address = "0.0.0.0" +port = {webui_port} # Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com # The data includes raft id (random 8 bytes), os, arch and version @@ -14,11 +15,6 @@ bind-address = "0.0.0.0" # Change this option to true to disable reporting. reporting-disabled = false -# Controls settings for initial start-up. Once a node a successfully started, -# these settings are ignored. -[initialization] -join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. - # Control authentication # If not set authetication is DISABLED. Be sure to explicitly set this flag to # true if you want authentication. @@ -53,52 +49,57 @@ enabled = false #database = "collectd_database" #typesdb = "types.db" +# Configure the OpenTSDB input. +[opentsdb] +enabled = false +#address = "0.0.0.0" # If not set, is actually set to bind-address. +#port = 4242 +#database = "opentsdb_database" + # Configure UDP listener for series data. [udp] -enabled = false +enabled = {udp_enabled} #bind-address = "0.0.0.0" #port = 4444 +port = {udp_port} # Broker configuration. Brokers are nodes which participate in distributed # consensus. [broker] +enabled = true # Where the Raft logs are stored. The user running InfluxDB will need read/write access. +#dir = "/var/opt/influxdb/raft" dir = "{broker_raft_dir}" port = {broker_port} # Data node configuration. Data nodes are where the time-series data, in the form of # shards, is stored. [data] - dir = "{broker_node_dir}" - port = {broker_port} +enabled = true +#dir = "/var/opt/influxdb/db" +dir = "{broker_node_dir}" - # Auto-create a retention policy when a database is created. Defaults to true. - retention-auto-create = true +# Auto-create a retention policy when a database is created. Defaults to true. +retention-auto-create = true - # Control whether retention policies are enforced and how long the system waits between - # enforcing those policies. - retention-check-enabled = true - retention-check-period = "10m" +# Control whether retention policies are enforced and how long the system waits between +# enforcing those policies. +retention-check-enabled = true +retention-check-period = "10m" -[cluster] -# Location for cluster state storage. For storing state persistently across restarts. -dir = "{cluster_dir}" +# Configuration for snapshot endpoint. +[snapshot] +enabled = false # Enabled by default if not set. +bind-address = "127.0.0.1" +port = 8087 [logging] -file = "{logfile}" # Leave blank to redirect logs to stderr. write-tracing = false # If true, enables detailed logging of the write system. raft-tracing = false # If true, enables detailed logging of Raft consensus. -# InfluxDB can store statistics about itself. This is useful for monitoring purposes. -# This feature is disabled by default, but if enabled, these statistics can be queried -# as any other data. -[statistics] +# InfluxDB can store statistical and diagnostic information about itself. This is useful for +# monitoring purposes. This feature is disabled by default, but if enabled, these data can be +# queried like any other data. +[monitoring] enabled = false -database = "internal" # The database to which the data is written. -retention-policy = "default" # The retention policy within the database. write-interval = "1m" # Period between writing the data. - - -[snapshot] -bind-address = "127.0.0.1" -port = {snapshot_port} diff --git a/tests/influxdb/influxdb.udp_conf.template b/tests/influxdb/influxdb.udp_conf.template deleted file mode 100644 index 4134172f..00000000 --- a/tests/influxdb/influxdb.udp_conf.template +++ /dev/null @@ -1,103 +0,0 @@ -# Welcome to the InfluxDB configuration file. - -# If hostname (on the OS) doesn't return a name that can be resolved by the other -# systems in the cluster, you'll have to set the hostname to an IP or something -# that can be resolved here. -# hostname = "" -bind-address = "0.0.0.0" - -# Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com -# The data includes raft id (random 8 bytes), os, arch and version -# We don't track ip addresses of servers reporting. This is only used -# to track the number of instances running and the versions, which -# is very helpful for us. -# Change this option to true to disable reporting. -reporting-disabled = false - -# Controls settings for initial start-up. Once a node a successfully started, -# these settings are ignored. -[initialization] -join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. - -# Control authentication -# If not set authetication is DISABLED. Be sure to explicitly set this flag to -# true if you want authentication. -[authentication] -enabled = false - -# Configure the admin server -[admin] -enabled = true -port = {admin_port} - -# Configure the HTTP API endpoint. All time-series data and queries uses this endpoint. -[api] -# ssl-port = 8087 # SSL support is enabled if you set a port and cert -# ssl-cert = "/path/to/cert.pem" - -# Configure the Graphite plugins. -[[graphite]] # 1 or more of these sections may be present. -enabled = false -# protocol = "" # Set to "tcp" or "udp" -# address = "0.0.0.0" # If not set, is actually set to bind-address. -# port = 2003 -# name-position = "last" -# name-separator = "-" -# database = "" # store graphite data in this database - -# Configure the collectd input. -[collectd] -enabled = false -#address = "0.0.0.0" # If not set, is actually set to bind-address. -#port = 25827 -#database = "collectd_database" -#typesdb = "types.db" - -# Configure UDP listener for series data. -[udp] -enabled = true -#bind-address = "0.0.0.0" -port = {udp_port} - -# Broker configuration. Brokers are nodes which participate in distributed -# consensus. -[broker] -# Where the Raft logs are stored. The user running InfluxDB will need read/write access. -dir = "{broker_raft_dir}" -port = {broker_port} - -# Data node configuration. Data nodes are where the time-series data, in the form of -# shards, is stored. -[data] - dir = "{broker_node_dir}" - port = {broker_port} - - # Auto-create a retention policy when a database is created. Defaults to true. - retention-auto-create = true - - # Control whether retention policies are enforced and how long the system waits between - # enforcing those policies. - retention-check-enabled = true - retention-check-period = "10m" - -[cluster] -# Location for cluster state storage. For storing state persistently across restarts. -dir = "{cluster_dir}" - -[logging] -file = "{logfile}" # Leave blank to redirect logs to stderr. -write-tracing = false # If true, enables detailed logging of the write system. -raft-tracing = false # If true, enables detailed logging of Raft consensus. - -# InfluxDB can store statistics about itself. This is useful for monitoring purposes. -# This feature is disabled by default, but if enabled, these statistics can be queried -# as any other data. -[statistics] -enabled = false -database = "internal" # The database to which the data is written. -retention-policy = "default" # The retention policy within the database. -write-interval = "1m" # Period between writing the data. - -[snapshot] -bind-address = "127.0.0.1" -port = {snapshot_port} From f8990c796ae59fc611a7ce1b5ae4734c10a39400 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 8 Apr 2015 18:41:47 +0100 Subject: [PATCH 074/536] Adds from_DSN to InfluxDBClient for v0.9. --- influxdb/client.py | 49 +++++++++++++++++++++++++++++++++++ tests/influxdb/client_test.py | 14 ++++++++++ 2 files changed, 63 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 4941ebeb..6670b93a 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -7,6 +7,7 @@ import socket import requests import requests.exceptions +from sys import version_info try: @@ -14,6 +15,11 @@ except NameError: xrange = range +if version_info.major == 3: + from urllib.parse import urlparse +else: + from urlparse import urlparse + class InfluxDBClientError(Exception): """Raised when an error occurs in the request""" @@ -101,6 +107,49 @@ def __init__(self, 'Content-type': 'application/json', 'Accept': 'text/plain'} + @staticmethod + def from_DSN(dsn): + """ + Returns an instance of InfluxDBClient from the provided data source + name. + :param dsn: data source name + :type dsn: string + :raise ValueError: if the provided DSN has any unexpected value. + """ + dsn = dsn.lower() + + init_args = {} + conn_params = urlparse(dsn) + scheme_info = conn_params.scheme.split('+') + if len(scheme_info) == 1: + scheme = scheme_info[0] + modifier = None + else: + modifier, scheme = scheme_info + + if scheme != 'influxdb': + raise ValueError('Unknown scheme "{}".'.format(scheme)) + if modifier: + if modifier == 'udp': + init_args['use_udp'] = True + elif modifier == 'https': + init_args['ssl'] = True + else: + raise ValueError('Unknown scheme modifier "{}".'.format(modifier)) + + if conn_params.hostname: + init_args['host'] = conn_params.hostname + if conn_params.port: + init_args['port'] = conn_params.port + if conn_params.username: + init_args['username'] = conn_params.username + if conn_params.password: + init_args['password'] = conn_params.password + if conn_params.path and len(conn_params.path) > 1: + init_args['database'] = conn_params.path[1:] + + return InfluxDBClient(**init_args) + # # By default we keep the "order" of the json responses: # more clearly: any dict contained in the json response will have diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 436e5aad..4388b214 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -103,6 +103,20 @@ def test_scheme(self): ) assert cli._baseurl == 'https://host:8086' + def test_dsn(self): + cli = InfluxDBClient.from_DSN('influxdb://usr:pwd@host:1886/db') + assert cli._baseurl == 'http://host:1886' + assert cli._username == 'usr' + assert cli._password == 'pwd' + assert cli._database == 'db' + assert cli.use_udp == False + + cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') + assert cli.use_udp == True + + cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') + assert cli._baseurl == 'https://host:1886' + def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') From c8f700fe539df4ddbecc2b5626556bc8bad36912 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 8 Apr 2015 18:47:49 +0100 Subject: [PATCH 075/536] Adds from_DSN to InfluxDBClient for v0.8. --- influxdb/influxdb08/client.py | 49 ++++++++++++++++++++++++ tests/influxdb/influxdb08/client_test.py | 14 +++++++ 2 files changed, 63 insertions(+) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 9b7682e3..c24ce30e 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -7,6 +7,7 @@ import requests import requests.exceptions import warnings +from sys import version_info from influxdb import chunked_json @@ -15,6 +16,11 @@ except NameError: xrange = range +if version_info.major == 3: + from urllib.parse import urlparse +else: + from urlparse import urlparse + session = requests.Session() @@ -100,6 +106,49 @@ def __init__(self, 'Content-type': 'application/json', 'Accept': 'text/plain'} + @staticmethod + def from_DSN(dsn): + """ + Returns an instance of InfluxDBClient from the provided data source + name. + :param dsn: data source name + :type dsn: string + :raise ValueError: if the provided DSN has any unexpected value. + """ + dsn = dsn.lower() + + init_args = {} + conn_params = urlparse(dsn) + scheme_info = conn_params.scheme.split('+') + if len(scheme_info) == 1: + scheme = scheme_info[0] + modifier = None + else: + modifier, scheme = scheme_info + + if scheme != 'influxdb': + raise ValueError('Unknown scheme "{}".'.format(scheme)) + if modifier: + if modifier == 'udp': + init_args['use_udp'] = True + elif modifier == 'https': + init_args['ssl'] = True + else: + raise ValueError('Unknown scheme modifier "{}".'.format(modifier)) + + if conn_params.hostname: + init_args['host'] = conn_params.hostname + if conn_params.port: + init_args['port'] = conn_params.port + if conn_params.username: + init_args['username'] = conn_params.username + if conn_params.password: + init_args['password'] = conn_params.password + if conn_params.path and len(conn_params.path) > 1: + init_args['database'] = conn_params.path[1:] + + return InfluxDBClient(**init_args) + # Change member variables def switch_database(self, database): diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 1b7384f3..468ba86b 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -88,6 +88,20 @@ def test_scheme(self): ) assert cli._baseurl == 'https://host:8086' + def test_dsn(self): + cli = InfluxDBClient.from_DSN('influxdb://usr:pwd@host:1886/db') + assert cli._baseurl == 'http://host:1886' + assert cli._username == 'usr' + assert cli._password == 'pwd' + assert cli._database == 'db' + assert cli.use_udp == False + + cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') + assert cli.use_udp == True + + cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') + assert cli._baseurl == 'https://host:1886' + def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') From d3bb67d4793435274a6096169835715179cbe8d4 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Wed, 8 Apr 2015 15:46:09 -0400 Subject: [PATCH 076/536] Revert "Sync tests against last master update" --- tests/influxdb/client_test_with_server.py | 63 ++++++------- tests/influxdb/influxdb.conf.template | 59 ++++++------- tests/influxdb/influxdb.udp_conf.template | 103 ++++++++++++++++++++++ 3 files changed, 159 insertions(+), 66 deletions(-) create mode 100644 tests/influxdb/influxdb.udp_conf.template diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index dc8ad72f..968d234b 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -133,7 +133,7 @@ class InfluxDbInstance(object): in a temporary place, using a config file template. ''' - def __init__(self, conf_template, udp_enabled=False): + def __init__(self, conf_template): # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() @@ -142,28 +142,28 @@ def __init__(self, conf_template, udp_enabled=False): tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( dir=self.temp_dir_base) # we need some "free" ports : - - ports = dict( - broker_port=get_free_port(), - webui_port=get_free_port(), - admin_port=get_free_port(), - udp_port=get_free_port() if udp_enabled else -1, - ) - - conf_data = dict( - broker_raft_dir=os.path.join(tempdir, 'raft'), - broker_node_dir=os.path.join(tempdir, 'db'), - cluster_dir=os.path.join(tempdir, 'state'), - logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), - udp_enabled='true' if udp_enabled else 'false', - ) - conf_data.update(ports) - self.__dict__.update(conf_data) + self.broker_port = get_free_port() + self.admin_port = get_free_port() + self.udp_port = get_free_port() + self.snapshot_port = get_free_port() + + self.logs_file = os.path.join(self.temp_dir_base, 'logs.txt') + + with open(conf_template) as fh: + conf = fh.read().format( + broker_port=self.broker_port, + admin_port=self.admin_port, + udp_port=self.udp_port, + broker_raft_dir=os.path.join(tempdir, 'raft'), + broker_node_dir=os.path.join(tempdir, 'db'), + cluster_dir=os.path.join(tempdir, 'state'), + logfile=self.logs_file, + snapshot_port=self.snapshot_port, + ) conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') with open(conf_file, "w") as fh: - with open(conf_template) as fh_template: - fh.write(fh_template.read().format(**conf_data)) + fh.write(conf) # now start the server instance: proc = self.proc = subprocess.Popen( @@ -182,13 +182,8 @@ def __init__(self, conf_template, udp_enabled=False): # or you run a 286 @ 1Mhz ? try: while time.time() < timeout: - if (is_port_open(self.webui_port) + if (is_port_open(self.broker_port) and is_port_open(self.admin_port)): - # it's hard to check if a UDP port is open.. - if udp_enabled: - # so let's just sleep 0.5 sec in this case - # to be sure that the server has open the port - time.sleep(0.5) break time.sleep(0.5) if proc.poll() is not None: @@ -197,13 +192,13 @@ def __init__(self, conf_template, udp_enabled=False): proc.terminate() proc.wait() raise RuntimeError('Timeout waiting for influxdb to listen' - ' on its ports (%s)' % ports) + ' on its broker port') except RuntimeError as err: data = self.get_logs_and_output() data['reason'] = str(err) data['now'] = datetime.datetime.now() raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" - "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r" + "stdout=%(out)r\nstderr=%(err)r\nlogs=%(logs)r" % data) def get_logs_and_output(self): @@ -230,11 +225,9 @@ def close(self, remove_tree=True): def _setup_influxdb_server(inst): - inst.influxd_inst = InfluxDbInstance( - inst.influxdb_template_conf, - udp_enabled=getattr(inst, 'influxdb_udp_enabled', False)) + inst.influxd_inst = InfluxDbInstance(inst.influxdb_template_conf) inst.cli = InfluxDBClient('localhost', - inst.influxd_inst.webui_port, + inst.influxd_inst.broker_port, 'root', '', database='db') @@ -660,14 +653,12 @@ def test_tags_json_order(self): class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): - influxdb_udp_enabled = True - influxdb_template_conf = os.path.join(THIS_DIR, - 'influxdb.conf.template') + 'influxdb.udp_conf.template') def test_write_points_udp(self): cli = InfluxDBClient( - 'localhost', self.influxd_inst.webui_port, + 'localhost', self.influxd_inst.broker_port, 'dont', 'care', database='db', use_udp=True, udp_port=self.influxd_inst.udp_port diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template index dde1a4ae..82608f73 100644 --- a/tests/influxdb/influxdb.conf.template +++ b/tests/influxdb/influxdb.conf.template @@ -5,7 +5,6 @@ # that can be resolved here. # hostname = "" bind-address = "0.0.0.0" -port = {webui_port} # Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com # The data includes raft id (random 8 bytes), os, arch and version @@ -15,6 +14,11 @@ port = {webui_port} # Change this option to true to disable reporting. reporting-disabled = false +# Controls settings for initial start-up. Once a node a successfully started, +# these settings are ignored. +[initialization] +join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. + # Control authentication # If not set authetication is DISABLED. Be sure to explicitly set this flag to # true if you want authentication. @@ -49,57 +53,52 @@ enabled = false #database = "collectd_database" #typesdb = "types.db" -# Configure the OpenTSDB input. -[opentsdb] -enabled = false -#address = "0.0.0.0" # If not set, is actually set to bind-address. -#port = 4242 -#database = "opentsdb_database" - # Configure UDP listener for series data. [udp] -enabled = {udp_enabled} +enabled = false #bind-address = "0.0.0.0" #port = 4444 -port = {udp_port} # Broker configuration. Brokers are nodes which participate in distributed # consensus. [broker] -enabled = true # Where the Raft logs are stored. The user running InfluxDB will need read/write access. -#dir = "/var/opt/influxdb/raft" dir = "{broker_raft_dir}" port = {broker_port} # Data node configuration. Data nodes are where the time-series data, in the form of # shards, is stored. [data] -enabled = true -#dir = "/var/opt/influxdb/db" -dir = "{broker_node_dir}" + dir = "{broker_node_dir}" + port = {broker_port} -# Auto-create a retention policy when a database is created. Defaults to true. -retention-auto-create = true + # Auto-create a retention policy when a database is created. Defaults to true. + retention-auto-create = true -# Control whether retention policies are enforced and how long the system waits between -# enforcing those policies. -retention-check-enabled = true -retention-check-period = "10m" + # Control whether retention policies are enforced and how long the system waits between + # enforcing those policies. + retention-check-enabled = true + retention-check-period = "10m" -# Configuration for snapshot endpoint. -[snapshot] -enabled = false # Enabled by default if not set. -bind-address = "127.0.0.1" -port = 8087 +[cluster] +# Location for cluster state storage. For storing state persistently across restarts. +dir = "{cluster_dir}" [logging] +file = "{logfile}" # Leave blank to redirect logs to stderr. write-tracing = false # If true, enables detailed logging of the write system. raft-tracing = false # If true, enables detailed logging of Raft consensus. -# InfluxDB can store statistical and diagnostic information about itself. This is useful for -# monitoring purposes. This feature is disabled by default, but if enabled, these data can be -# queried like any other data. -[monitoring] +# InfluxDB can store statistics about itself. This is useful for monitoring purposes. +# This feature is disabled by default, but if enabled, these statistics can be queried +# as any other data. +[statistics] enabled = false +database = "internal" # The database to which the data is written. +retention-policy = "default" # The retention policy within the database. write-interval = "1m" # Period between writing the data. + + +[snapshot] +bind-address = "127.0.0.1" +port = {snapshot_port} diff --git a/tests/influxdb/influxdb.udp_conf.template b/tests/influxdb/influxdb.udp_conf.template new file mode 100644 index 00000000..4134172f --- /dev/null +++ b/tests/influxdb/influxdb.udp_conf.template @@ -0,0 +1,103 @@ +# Welcome to the InfluxDB configuration file. + +# If hostname (on the OS) doesn't return a name that can be resolved by the other +# systems in the cluster, you'll have to set the hostname to an IP or something +# that can be resolved here. +# hostname = "" +bind-address = "0.0.0.0" + +# Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com +# The data includes raft id (random 8 bytes), os, arch and version +# We don't track ip addresses of servers reporting. This is only used +# to track the number of instances running and the versions, which +# is very helpful for us. +# Change this option to true to disable reporting. +reporting-disabled = false + +# Controls settings for initial start-up. Once a node a successfully started, +# these settings are ignored. +[initialization] +join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. + +# Control authentication +# If not set authetication is DISABLED. Be sure to explicitly set this flag to +# true if you want authentication. +[authentication] +enabled = false + +# Configure the admin server +[admin] +enabled = true +port = {admin_port} + +# Configure the HTTP API endpoint. All time-series data and queries uses this endpoint. +[api] +# ssl-port = 8087 # SSL support is enabled if you set a port and cert +# ssl-cert = "/path/to/cert.pem" + +# Configure the Graphite plugins. +[[graphite]] # 1 or more of these sections may be present. +enabled = false +# protocol = "" # Set to "tcp" or "udp" +# address = "0.0.0.0" # If not set, is actually set to bind-address. +# port = 2003 +# name-position = "last" +# name-separator = "-" +# database = "" # store graphite data in this database + +# Configure the collectd input. +[collectd] +enabled = false +#address = "0.0.0.0" # If not set, is actually set to bind-address. +#port = 25827 +#database = "collectd_database" +#typesdb = "types.db" + +# Configure UDP listener for series data. +[udp] +enabled = true +#bind-address = "0.0.0.0" +port = {udp_port} + +# Broker configuration. Brokers are nodes which participate in distributed +# consensus. +[broker] +# Where the Raft logs are stored. The user running InfluxDB will need read/write access. +dir = "{broker_raft_dir}" +port = {broker_port} + +# Data node configuration. Data nodes are where the time-series data, in the form of +# shards, is stored. +[data] + dir = "{broker_node_dir}" + port = {broker_port} + + # Auto-create a retention policy when a database is created. Defaults to true. + retention-auto-create = true + + # Control whether retention policies are enforced and how long the system waits between + # enforcing those policies. + retention-check-enabled = true + retention-check-period = "10m" + +[cluster] +# Location for cluster state storage. For storing state persistently across restarts. +dir = "{cluster_dir}" + +[logging] +file = "{logfile}" # Leave blank to redirect logs to stderr. +write-tracing = false # If true, enables detailed logging of the write system. +raft-tracing = false # If true, enables detailed logging of Raft consensus. + +# InfluxDB can store statistics about itself. This is useful for monitoring purposes. +# This feature is disabled by default, but if enabled, these statistics can be queried +# as any other data. +[statistics] +enabled = false +database = "internal" # The database to which the data is written. +retention-policy = "default" # The retention policy within the database. +write-interval = "1m" # Period between writing the data. + +[snapshot] +bind-address = "127.0.0.1" +port = {snapshot_port} From 92c964c672117ad94dbfc145794246253bd5fb13 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 8 Apr 2015 15:51:11 -0400 Subject: [PATCH 077/536] Test against v0.9.0-rc18 --- .travis.yml | 2 +- build_influxdb_server.sh | 40 ---------------------------------------- 2 files changed, 1 insertion(+), 41 deletions(-) delete mode 100755 build_influxdb_server.sh diff --git a/.travis.yml b/.travis.yml index d8c0a437..8c8b81d2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls - - ./build_influxdb_server.sh + - wget http://get.influxdb.org/influxdb_0.9.0-rc18_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc18_amd64.deb script: - travis_wait tox -e $TOX_ENV after_success: diff --git a/build_influxdb_server.sh b/build_influxdb_server.sh deleted file mode 100755 index 7b0bf957..00000000 --- a/build_influxdb_server.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env bash - -# -# build and install, -# the latest influxdb server master -# - -set -e - -tmpdir=$(mktemp -d) - -echo "Using tempdir $tmpdir .." - -cd "$tmpdir" - -# rpm for package.sh (below) which will also build an .rpm -sudo apt-get install ruby ruby-dev build-essential rpm - -echo $PATH -echo $(which gem) -echo $(which ruby) - -gem=$(which gem) - -sudo $gem install fpm - -mkdir -p go/src/github.com/influxdb -cd go/src/github.com/influxdb - -git clone --depth 5 https://github.com/influxdb/influxdb -cd influxdb - -version=0.0.0-$(git describe --always | sed 's/^v//') -echo "describe: $version" - -export GOPATH="$tmpdir/go" -{ echo y ; yes no ; } | ./package.sh "$version" - -deb=$(ls *.deb) -sudo dpkg -i "$deb" From 6f757e8b741b79257ec83c58185a0252c9f7e940 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 1 Apr 2015 10:58:15 -0400 Subject: [PATCH 078/536] Query now returns a ResultSet --- influxdb/client.py | 9 +++--- influxdb/resultset.py | 53 ++++++++++++++++++++++++++++++++ tests/influxdb/resultset_test.py | 51 ++++++++++++++++++++++++++++++ 3 files changed, 108 insertions(+), 5 deletions(-) create mode 100644 influxdb/resultset.py create mode 100644 tests/influxdb/resultset_test.py diff --git a/influxdb/client.py b/influxdb/client.py index 4941ebeb..668be519 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -8,6 +8,7 @@ import requests import requests.exceptions +from influxdb.resultset import ResultSet try: xrange @@ -132,10 +133,8 @@ def format_query_response(response): for row in result['series']: items = [] if 'name' in row: - name = row['name'] - tags = row.get('tags', None) - if tags: - name = (row['name'], tuple(tags.items())) + tags = row.get('tags', {}) + name = (row['name'], tuple(tags.items())) assert name not in series series[name] = items else: @@ -147,7 +146,7 @@ def format_query_response(response): for cur_col, field in enumerate(value): item[columns[cur_col]] = field items.append(item) - return series + return ResultSet(series) def switch_database(self, database): """ diff --git a/influxdb/resultset.py b/influxdb/resultset.py new file mode 100644 index 00000000..ca3712d3 --- /dev/null +++ b/influxdb/resultset.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +import collections + + +class ResultSet(collections.MutableMapping): + """A dictionary that applies an arbitrary key-altering + function before accessing the keys""" + + def __init__(self, series): + self.store = dict() + self.update(series) # use the free update to set keys + + def __getitem__(self, key): + if isinstance(key, tuple): + name = key[0] + tags = key[1] + else: + name = key + tags = None + + for serie in self.store.keys(): + if serie[0] == name: + serie_matches = True + serie_tags = dict((tag, value) for tag, value in serie[1]) + if tags is not None: + for tag in tags.items(): + try: + if serie_tags[tag[0]] != tag[1]: + serie_matches = False + break + except KeyError: + serie_matches = False + break + if serie_matches: + yield {"points": self.store[serie], "tags": serie_tags} + + def __setitem__(self, key, value): + self.store[key] = value + + def __repr__(self): + rep = "" + for serie in self.store.keys(): + rep += "%s: %s" % (dict((tag, value) for tag, value in serie[1]), self.store[serie]) + return '%s(%s)' % (type(self).__name__, rep) + + def __delitem__(self, key): + del self.store[key] + + def __iter__(self): + return iter(self.store) + + def __len__(self): + return len(self.store) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py new file mode 100644 index 00000000..0b71af5c --- /dev/null +++ b/tests/influxdb/resultset_test.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- + +import unittest + +from influxdb.resultset import ResultSet + + +class TestResultSet(unittest.TestCase): + + def setUp(self): + self.query_response = { + ('serie', (('tag_1', ''),)): [ + {'time': '2015-03-30T16:16:37Z', 'value': 15}], + ('serie', (('tag_1', 'value1'),)): [ + {'time': '2015-03-30T16:16:37Z', 'value': 5}], + ('serie', (('tag_1', 'value2'),)): [ + {'time': '2015-03-30T16:16:37Z', 'value': 10}] + } + self.rs = ResultSet(self.query_response) + + def test_filter_by_name(self): + self.assertItemsEqual( + self.rs['serie'], + [ + {'points': [{'value': 10, 'time': '2015-03-30T16:16:37Z'}], + 'tags': {'tag_1': 'value2'}}, + {'points': [{'value': 15, 'time': '2015-03-30T16:16:37Z'}], + 'tags': {'tag_1': ''}}, + {'points': [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], + 'tags': {'tag_1': 'value1'}} + ] + ) + + def test_filter_by_tags(self): + self.assertItemsEqual( + self.rs[('serie', {'tag_1': 'value2'})], + [{'points': [{'value': 10, 'time': '2015-03-30T16:16:37Z'}], + 'tags': {'tag_1': 'value2'}}] + ) + + def test_repr(self): + expected = \ + "ResultSet({'tag_1': 'value2'}: [{'value': 10, 'time': " \ + "'2015-03-30T16:16:37Z'}]{'tag_1': ''}: [{'value': 15, 'time': " \ + "'2015-03-30T16:16:37Z'}]{'tag_1': 'value1'}: " \ + "[{'value': 5, 'time': '2015-03-30T16:16:37Z'}])" + + self.assertEqual( + str(self.rs), + expected + ) From a4078c4253b9a07a9babb2ad81853ee5b7a637cf Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 1 Apr 2015 12:31:19 -0400 Subject: [PATCH 079/536] Fixed repr to include tuple --- influxdb/resultset.py | 2 +- tests/influxdb/resultset_test.py | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index ca3712d3..54fd3e63 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -40,7 +40,7 @@ def __setitem__(self, key, value): def __repr__(self): rep = "" for serie in self.store.keys(): - rep += "%s: %s" % (dict((tag, value) for tag, value in serie[1]), self.store[serie]) + rep += "('%s', %s): %s" % (serie[0], dict((tag, value) for tag, value in serie[1]), self.store[serie]) return '%s(%s)' % (type(self).__name__, rep) def __delitem__(self, key): diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 0b71af5c..35b49cd1 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -40,10 +40,11 @@ def test_filter_by_tags(self): def test_repr(self): expected = \ - "ResultSet({'tag_1': 'value2'}: [{'value': 10, 'time': " \ - "'2015-03-30T16:16:37Z'}]{'tag_1': ''}: [{'value': 15, 'time': " \ - "'2015-03-30T16:16:37Z'}]{'tag_1': 'value1'}: " \ - "[{'value': 5, 'time': '2015-03-30T16:16:37Z'}])" + "ResultSet(('serie', {'tag_1': 'value2'}): [{'value': 10, " \ + "'time': '2015-03-30T16:16:37Z'}]('serie', {'tag_1': ''}):" \ + " [{'value': 15, 'time': '2015-03-30T16:16:37Z'}]('serie'," \ + " {'tag_1': 'value1'}): [{'value': 5," \ + " 'time': '2015-03-30T16:16:37Z'}])" self.assertEqual( str(self.rs), From d6ea8329eda28a0ff05601ec21d97d3666731cd2 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 1 Apr 2015 16:26:10 -0400 Subject: [PATCH 080/536] Refactored resultset --- influxdb/client.py | 45 ++++++----------- influxdb/resultset.py | 51 ++++++++++--------- tests/influxdb/client_test.py | 8 ++- tests/influxdb/resultset_test.py | 84 ++++++++++++++++++++++++++------ 4 files changed, 117 insertions(+), 71 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 668be519..2e44a95c 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -123,31 +123,6 @@ def keep_json_response_order(self): def keep_json_response_order(self, new_value): self._keep_json_response_order = new_value - @staticmethod - def format_query_response(response): - """Returns a list of items from a query response""" - series = {} - if 'results' in response: - for result in response['results']: - if 'series' in result: - for row in result['series']: - items = [] - if 'name' in row: - tags = row.get('tags', {}) - name = (row['name'], tuple(tags.items())) - assert name not in series - series[name] = items - else: - series = items # Special case for system queries. - if 'columns' in row and 'values' in row: - columns = row['columns'] - for value in row['values']: - item = {} - for cur_col, field in enumerate(value): - item[columns[cur_col]] = field - items.append(item) - return ResultSet(series) - def switch_database(self, database): """ switch_database() @@ -233,8 +208,7 @@ def query(self, query, params={}, expected_response_code=200, - database=None, - raw=False): + database=None): """ Query data @@ -260,8 +234,7 @@ def query(self, json_kw.update(object_pairs_hook=OrderedDict) data = response.json(**json_kw) - return (data if raw - else self.format_query_response(data)) + return ResultSet(data) def write_points(self, points, @@ -327,7 +300,8 @@ def get_list_database(self): Get the list of databases """ rsp = self.query("SHOW DATABASES") - return [db['name'] for db in rsp['databases']] + print rsp + return [db['name'] for db in list(rsp[('databases', {})])[0]['points']] def create_database(self, dbname): """ @@ -375,7 +349,16 @@ def get_list_series(self, database=None): """ Get the list of series """ - return self.query("SHOW SERIES", database=database) + + rs = self.query("SHOW SERIES", database=database) + series = list(rs) + result = [] + for serie in rs.keys(): + print "EEEEE", serie + + #x = series[0] + #return x['points'] + return list(self.query("SHOW SERIES", database=database))[0]['points'] def get_list_users(self): """ diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 54fd3e63..c783836e 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -7,7 +7,7 @@ class ResultSet(collections.MutableMapping): function before accessing the keys""" def __init__(self, series): - self.store = dict() + self.raw = dict() self.update(series) # use the free update to set keys def __getitem__(self, key): @@ -18,36 +18,43 @@ def __getitem__(self, key): name = key tags = None - for serie in self.store.keys(): - if serie[0] == name: - serie_matches = True - serie_tags = dict((tag, value) for tag, value in serie[1]) - if tags is not None: - for tag in tags.items(): - try: - if serie_tags[tag[0]] != tag[1]: + results = [] + + for result in self.raw['results']: + for serie in result['series']: + if serie['name'] == name: + serie_matches = True + + if tags is not None: + for tag in tags.items(): + if serie['tags'][tag[0]] != tag[1]: serie_matches = False break - except KeyError: - serie_matches = False - break - if serie_matches: - yield {"points": self.store[serie], "tags": serie_tags} + + if serie_matches: + items = [] + for value in serie['values']: + item = {} + for cur_col, field in enumerate(value): + item[serie['columns'][cur_col]] = field + items.append(item) + + results.append({"points": items, "tags": serie['tags']}) + continue + return results def __setitem__(self, key, value): - self.store[key] = value + self.raw[key] = value def __repr__(self): - rep = "" - for serie in self.store.keys(): - rep += "('%s', %s): %s" % (serie[0], dict((tag, value) for tag, value in serie[1]), self.store[serie]) - return '%s(%s)' % (type(self).__name__, rep) + return str(self.raw) def __delitem__(self, key): - del self.store[key] + del self.raw[key] def __iter__(self): - return iter(self.store) + for serie in self.raw: + yield {"points": self.raw[serie], "tags": dict((tag, value) for tag, value in serie[1])} def __len__(self): - return len(self.store) + return len(self.raw) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 436e5aad..54a66ef1 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -367,8 +367,9 @@ def test_get_list_database_fails(self): def test_get_list_series(self): example_response = \ - '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ - ' "columns": ["name", "duration", "replicaN"]}]}]}' + '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \ + '["_id", "host", "region"], "values": ' \ + '[[1, "server01", "us-west"]]}]}]}' with requests_mock.Mocker() as m: m.register_uri( @@ -376,6 +377,9 @@ def test_get_list_series(self): "http://localhost:8086/query", text=example_response ) + + print self.cli.get_list_series() + self.assertListEqual( self.cli.get_list_series(), [{'duration': '24h0m0s', diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 35b49cd1..6bf37915 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -9,33 +9,85 @@ class TestResultSet(unittest.TestCase): def setUp(self): self.query_response = { - ('serie', (('tag_1', ''),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 15}], - ('serie', (('tag_1', 'value1'),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 5}], - ('serie', (('tag_1', 'value2'),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 10}] + "results": [ + {"series": [{"name": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "columns": ["time", "value"], + "values": [ + ["2015-01-29T21:51:28.968422294Z", 0.64] + ]}, + {"name": "cpu_load_short", + "tags": {"host": "server02", + "region": "us-west"}, + "columns": ["time", "value"], + "values": [ + ["2015-01-29T21:51:28.968422294Z", 0.64] + ]}, + {"name": "other_serie", + "tags": {"host": "server01", + "region": "us-west"}, + "columns": ["time", "value"], + "values": [ + ["2015-01-29T21:51:28.968422294Z", 0.64] + ]}]} + ] } self.rs = ResultSet(self.query_response) def test_filter_by_name(self): self.assertItemsEqual( - self.rs['serie'], + self.rs['cpu_load_short'], [ - {'points': [{'value': 10, 'time': '2015-03-30T16:16:37Z'}], - 'tags': {'tag_1': 'value2'}}, - {'points': [{'value': 15, 'time': '2015-03-30T16:16:37Z'}], - 'tags': {'tag_1': ''}}, - {'points': [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], - 'tags': {'tag_1': 'value1'}} + { + "tags": {"host": "server01", "region": "us-west"}, + "points": [ + {"time": "2015-01-29T21:51:28.968422294Z", + "value": 0.64} + ] + }, + { + "tags": {"host": "server02", "region": "us-west"}, + "points": [ + {"time": "2015-01-29T21:51:28.968422294Z", + "value": 0.64} + ] + } ] ) def test_filter_by_tags(self): self.assertItemsEqual( - self.rs[('serie', {'tag_1': 'value2'})], - [{'points': [{'value': 10, 'time': '2015-03-30T16:16:37Z'}], - 'tags': {'tag_1': 'value2'}}] + self.rs[('cpu_load_short', {"host": "server01"})], + [ + { + "tags": {"host": "server01", "region": "us-west"}, + "points": [ + {"time": "2015-01-29T21:51:28.968422294Z", + "value": 0.64} + ] + } + ] + ) + + self.assertItemsEqual( + self.rs[('cpu_load_short', {"region": "us-west"})], + [ + { + "tags": {"host": "server01", "region": "us-west"}, + "points": [ + {"time": "2015-01-29T21:51:28.968422294Z", + "value": 0.64} + ] + }, + { + "tags": {"host": "server02", "region": "us-west"}, + "points": [ + {"time": "2015-01-29T21:51:28.968422294Z", + "value": 0.64} + ] + } + ] ) def test_repr(self): From ba2b16891b357ecbc08ba23508a5353d67834300 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 1 Apr 2015 18:05:40 -0400 Subject: [PATCH 081/536] begin working on tests --- influxdb/client.py | 20 ++++++-------------- influxdb/resultset.py | 19 +++++++++++++------ tests/influxdb/client_test.py | 2 -- tests/influxdb/client_test_with_server.py | 6 +++--- tests/influxdb/resultset_test.py | 12 ------------ 5 files changed, 22 insertions(+), 37 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 2e44a95c..60b8e1bb 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -299,9 +299,7 @@ def get_list_database(self): """ Get the list of databases """ - rsp = self.query("SHOW DATABASES") - print rsp - return [db['name'] for db in list(rsp[('databases', {})])[0]['points']] + return self.query("SHOW DATABASES")['results'][0]['points'] def create_database(self, dbname): """ @@ -343,22 +341,16 @@ def get_list_retention_policies(self, database=None): """ return self.query( "SHOW RETENTION POLICIES %s" % (database or self._database) - ) + )['results'][0]['points'] def get_list_series(self, database=None): """ Get the list of series """ - - rs = self.query("SHOW SERIES", database=database) - series = list(rs) - result = [] - for serie in rs.keys(): - print "EEEEE", serie - - #x = series[0] - #return x['points'] - return list(self.query("SHOW SERIES", database=database))[0]['points'] + rsp = self.query("SHOW SERIES", database=database) + print "RSP", rsp.raw + print "RSP", rsp['results'] + return rsp def get_list_users(self): """ diff --git a/influxdb/resultset.py b/influxdb/resultset.py index c783836e..6631cc8c 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -22,24 +22,32 @@ def __getitem__(self, key): for result in self.raw['results']: for serie in result['series']: - if serie['name'] == name: + serie_name = serie.get('name', 'results') + if serie_name == name or serie_name == 'results': serie_matches = True + serie_tags = serie.get('tags', {}) + if tags is not None: for tag in tags.items(): - if serie['tags'][tag[0]] != tag[1]: + try: + if serie_tags[tag[0]] != tag[1]: + serie_matches = False + break + except KeyError: serie_matches = False break if serie_matches: items = [] - for value in serie['values']: + serie_values = serie.get('values', []) + for value in serie_values: item = {} for cur_col, field in enumerate(value): item[serie['columns'][cur_col]] = field items.append(item) - results.append({"points": items, "tags": serie['tags']}) + results.append({"points": items, "tags": serie_tags}) continue return results @@ -53,8 +61,7 @@ def __delitem__(self, key): del self.raw[key] def __iter__(self): - for serie in self.raw: - yield {"points": self.raw[serie], "tags": dict((tag, value) for tag, value in serie[1])} + return iter(self.raw) def __len__(self): return len(self.raw) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 54a66ef1..5dc03e3d 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -378,8 +378,6 @@ def test_get_list_series(self): text=example_response ) - print self.cli.get_list_series() - self.assertListEqual( self.cli.get_list_series(), [{'duration': '24h0m0s', diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 968d234b..4ffef3f2 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -289,7 +289,7 @@ def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( self.cli.get_list_database(), - ['new_db_1', 'new_db_2'] + [{u'name': u'new_db_1'}, {u'name': u'new_db_2'}] ) def test_create_database_fails(self): @@ -303,7 +303,7 @@ def test_create_database_fails(self): def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) - self.assertEqual(['new_db_2'], self.cli.get_list_database()) + self.assertEqual([{u'name': u'new_db_2'}], self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: @@ -498,7 +498,7 @@ def test_query_chunked(self): def test_get_list_series_empty(self): rsp = self.cli.get_list_series() - self.assertEqual({}, rsp) + self.assertEqual([], rsp) def test_get_list_series_non_empty(self): self.cli.write_points(dummy_point) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 6bf37915..8f1768e8 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -90,15 +90,3 @@ def test_filter_by_tags(self): ] ) - def test_repr(self): - expected = \ - "ResultSet(('serie', {'tag_1': 'value2'}): [{'value': 10, " \ - "'time': '2015-03-30T16:16:37Z'}]('serie', {'tag_1': ''}):" \ - " [{'value': 15, 'time': '2015-03-30T16:16:37Z'}]('serie'," \ - " {'tag_1': 'value1'}): [{'value': 5," \ - " 'time': '2015-03-30T16:16:37Z'}])" - - self.assertEqual( - str(self.rs), - expected - ) From 3da7d94ec84dcb704196146ab7bf0ecfcc73fcde Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 2 Apr 2015 17:59:02 -0400 Subject: [PATCH 082/536] after rethink about it : not completly sure we really need it.. --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 60b8e1bb..e236057f 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -110,7 +110,7 @@ def __init__(self, # if one doesn't care in that, then it can simply change its client # instance 'keep_json_response_order' attribute value (to a falsy one). # This will then eventually help for performance considerations. - _keep_json_response_order = True + _keep_json_response_order = False # NB: For "group by" query type : # This setting is actually necessary in order to have a consistent and # reproducible rsp format if you "group by" on more than 1 tag. From 450e118b71387b174b971652cc841f4e3df961ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 2 Apr 2015 18:01:54 -0400 Subject: [PATCH 083/536] README ! MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit list(resultset) -> liste de série (chacune contenant liste de points) list(resultset["key"]) -> liste de points (correspondant à "key") --- influxdb/resultset.py | 143 ++++++++++++++++++++++++++++++------------ 1 file changed, 104 insertions(+), 39 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 6631cc8c..1e4dc650 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,67 +1,132 @@ # -*- coding: utf-8 -*- + import collections +from collections import namedtuple + + +_sentinel = object() + + +class NamedValues(object): + def __init__(self, point): + self._point = point + + def __getattr__(self, item): + try: + index = self._point.columns.index(item) + except ValueError: + raise AttributeError('Point have no such attribute (%r)' % item) + return self._point._point_values[index] + + def __repr__(self): + return 'Values(%s)' % ', '.join( + '%s=%r' % (k, self._point._point_values[k]) + for k in self._point.columns) + + +class Point(object): + + def __init__(self, serie, columns, values, tags=None): + assert len(columns) == len(values) + self.columns = columns + self._point_values = values + if tags is None: + tags = {} + self.serie = serie + self.tags = tags + self.values = NamedValues(self) + def __getitem__(self, tag_name): + """Indexing a Point return the tag value associated with + the given tag name""" + return self._tags[tag_name] -class ResultSet(collections.MutableMapping): - """A dictionary that applies an arbitrary key-altering - function before accessing the keys""" + def __iter__(self): + """Iterating over a Point will return its tags names one per one""" + return iter(self._tags) + + def __len__(self): + """The len of a Point is its number of columns/values""" + return len(self.columns) + + def __repr__(self): + return 'Point(values=%s, tags=%s)' % ( + ', '.join('%s=%r' % ( + k, getattr(self.values, k)) for k in self.columns), + self.tags) + + + +class ResultSet(object): + """A wrapper around series results """ def __init__(self, series): - self.raw = dict() - self.update(series) # use the free update to set keys + self.raw = series # ['results'] + results = series['results'] + if False: + self.have_tags = ( + results + and 'series' in results[0] + and results[0]['series'] + and 'tags' in results[0]['series'][0] + ) + # self.raw.update(series) # use the free update to set keys def __getitem__(self, key): + ''' + :param key: Either a serie name or a 2-tuple(serie_name, tags_dict) + If the given serie name is None then any serie (matching the eventual + given tags) will be given its points one after the other. + :return: A generator yielding `Point`s matching the given key + ''' if isinstance(key, tuple): + if 2 != len(key): + raise TypeError('only 2-tuples allowed') name = key[0] tags = key[1] + if not isinstance(tags, dict): + raise TypeError('should be a dict') else: name = key tags = None - results = [] - for result in self.raw['results']: for serie in result['series']: - serie_name = serie.get('name', 'results') - if serie_name == name or serie_name == 'results': - serie_matches = True + serie_name = serie.get('name', None) + if serie_name is None: + # this is a "system" query or a query which doesn't returned a named "serie" + # 'list retention' is in this case.. + if key is None: + for point in serie['values']: + yield Point('$SYSTEM$', serie['columns'], point) + elif name in (None, serie_name): + # by default if no tags was provided then + # we will matches every returned serie + serie_matches = True serie_tags = serie.get('tags', {}) - - if tags is not None: - for tag in tags.items(): - try: - if serie_tags[tag[0]] != tag[1]: - serie_matches = False - break - except KeyError: - serie_matches = False + if tags: + serie_matches = False + for tag_name, tag_value in tags.items(): + serie_tag_value = serie_tags.get(tag_name, _sentinel) + if serie_tag_value != tag_value: break + else: + serie_matches = True if serie_matches: - items = [] - serie_values = serie.get('values', []) - for value in serie_values: - item = {} - for cur_col, field in enumerate(value): - item[serie['columns'][cur_col]] = field - items.append(item) - - results.append({"points": items, "tags": serie_tags}) - continue - return results - - def __setitem__(self, key, value): - self.raw[key] = value + for point in serie['values']: + yield Point(serie_name, serie['columns'], point, serie_tags) def __repr__(self): return str(self.raw) - def __delitem__(self, key): - del self.raw[key] - def __iter__(self): - return iter(self.raw) + ''' Iterating a ResultSet will yield one dict instance per serie result. + ''' + for results in self.raw['results']: + for many_series in results['series']: + yield many_series - def __len__(self): - return len(self.raw) + #def __len__(self): + # return len(self.raw) From 1a4b4ebf0c8a4f35553ab8ae64915d7ae59d48e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 2 Apr 2015 18:02:24 -0400 Subject: [PATCH 084/536] adapted get_list_database().. --- influxdb/client.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index e236057f..989405f7 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -215,7 +215,9 @@ def query(self, :param params: Additional parameters to be passed to requests. :param database: Database to query, default to None. :param expected_response_code: Expected response code. Defaults to 200. - :param raw: Wether or not to return the raw influxdb response. + + :rtype : ResultSet + """ params['q'] = query @@ -299,7 +301,10 @@ def get_list_database(self): """ Get the list of databases """ - return self.query("SHOW DATABASES")['results'][0]['points'] + rsp = self.query("SHOW DATABASES") #['results'][0]['points'] + lrsp = list(rsp) + return [value[0] for value in lrsp[0].get('values', [])] + #return rsp['results'][0]['points'] def create_database(self, dbname): """ From 37be5b2f476ed17c1e31e2edc3068f808c460ed8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 2 Apr 2015 18:03:30 -0400 Subject: [PATCH 085/536] =?UTF-8?q?r=C3=A9-adapt=C3=A9=20pour=20get=5Flist?= =?UTF-8?q?=5Fdatabase()?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/influxdb/client_test_with_server.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 4ffef3f2..43797d2d 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -287,9 +287,10 @@ def test_fresh_server_no_db(self): def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) + rsp = self.cli.get_list_database() self.assertEqual( - self.cli.get_list_database(), - [{u'name': u'new_db_1'}, {u'name': u'new_db_2'}] + rsp, + ['new_db_1', 'new_db_2'] ) def test_create_database_fails(self): @@ -303,7 +304,7 @@ def test_create_database_fails(self): def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) - self.assertEqual([{u'name': u'new_db_2'}], self.cli.get_list_database()) + self.assertEqual(['new_db_2'], self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: From a2173902fe8ea53b874e11b129df564bd61df927 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 3 Apr 2015 09:25:48 -0400 Subject: [PATCH 086/536] =?UTF-8?q?adapt=C3=A9:?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit + test_write_points_check_read + test_write_multiple_points_different_series + test_create_retention_policy_default --- tests/influxdb/client_test_with_server.py | 140 ++++++++++++++++------ 1 file changed, 102 insertions(+), 38 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 43797d2d..a54a3125 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -12,7 +12,7 @@ from __future__ import print_function import random - +from collections import OrderedDict import datetime import distutils.spawn from functools import partial @@ -360,22 +360,40 @@ def test_write_points_check_read(self): ''' same as test_write_check_read() but with write_points \o/ ''' self.test_write_points() time.sleep(1) # same as test_write_check_read() + rsp = self.cli.query('SELECT * FROM cpu_load_short') self.assertEqual( - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - self.cli.query('SELECT * FROM cpu_load_short')) + [{'values': [['2009-11-10T23:00:00Z', 0.64]], + 'name': 'cpu_load_short', + 'columns': ['time', 'value']}], + list(rsp) + ) + + rsp2 = list(rsp['cpu_load_short']) + self.assertEqual(len(rsp2), 1) + pt = rsp2[0] + + self.assertEqual( + ['cpu_load_short', ['time', 'value'], {}, ['2009-11-10T23:00:00Z', 0.64]], + [pt.serie, pt.columns, pt.tags, [pt.values.time, pt.values.value]] + ) def test_write_multiple_points_different_series(self): self.assertIs(True, self.cli.write_points(dummy_points)) time.sleep(1) + rsp = self.cli.query('SELECT * FROM cpu_load_short') + lrsp = list(rsp) self.assertEqual( - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - self.cli.query('SELECT * FROM cpu_load_short')) + [{'values': [['2009-11-10T23:00:00Z', 0.64]], + 'name': 'cpu_load_short', + 'columns': ['time', 'value']}], + lrsp) + rsp = list(self.cli.query('SELECT * FROM memory')) self.assertEqual( - {'memory': [ - {'time': '2009-11-10T23:01:35Z', 'value': 33}]}, - self.cli.query('SELECT * FROM memory')) + [{ + 'values': [['2009-11-10T23:01:35Z', 33]], + 'name': 'memory', 'columns': ['time', 'value']}], + rsp + ) @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): @@ -465,7 +483,7 @@ def test_write_points_with_precision(self): time.sleep(sleep_time) rsp = self.cli.query('SELECT * FROM cpu_load_short', database=db) - + rsp = list(rsp)[0] # sys.stderr.write('precision=%s rsp_timestamp = %r\n' % ( # precision, rsp['cpu_load_short'][0]['time'])) m = re.match(expected_regex, rsp['cpu_load_short'][0]['time']) @@ -524,13 +542,10 @@ def test_create_retention_policy_default(self): default=True) self.assertIsNone(rsp) rsp = self.cli.get_list_retention_policies() - self.assertEqual( - [ - {'duration': '0', 'default': False, - 'replicaN': 1, 'name': 'default'}, - {'duration': '24h0m0s', 'default': True, - 'replicaN': 4, 'name': 'somename'} - ], + self.assertEqual([ + {'columns': ['name', 'duration', 'replicaN', 'default'], + 'values': [['default', '0', 1, False], + ['somename', '24h0m0s', 4, True]]}], rsp ) @@ -548,7 +563,7 @@ def test_create_retention_policy(self): ) def test_issue_143(self): - pt = partial(point, 'serie', timestamp='2015-03-30T16:16:37Z') + pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ pt(value=15), pt(tags={'tag_1': 'value1'}, value=5), @@ -556,15 +571,19 @@ def test_issue_143(self): ] self.cli.write_points(pts) time.sleep(1) - rsp = self.cli.query('SELECT * FROM serie GROUP BY tag_1') + rsp = list(self.cli.query('SELECT * FROM a_serie_name GROUP BY tag_1')) # print(rsp, file=sys.stderr) - self.assertEqual({ - ('serie', (('tag_1', ''),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 15}], - ('serie', (('tag_1', 'value1'),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 5}], - ('serie', (('tag_1', 'value2'),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 10}]}, + + self.assertEqual([ + {'name': 'a_serie_name', 'columns': ['time', 'value'], + 'values': [['2015-03-30T16:16:37Z', 15]], + 'tags': {'tag_1': ''}}, + {'name': 'a_serie_name', 'columns': ['time', 'value'], + 'values': [['2015-03-30T16:16:37Z', 5]], + 'tags': {'tag_1': 'value1'}}, + {'name': 'a_serie_name', 'columns': ['time', 'value'], + 'values': [['2015-03-30T16:16:37Z', 10]], + 'tags': {'tag_1': 'value2'}}], rsp ) @@ -579,20 +598,38 @@ def test_issue_143(self): time.sleep(1) rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2') # print(rsp, file=sys.stderr) + self.assertEqual([ + {'name': 'serie2', 'columns': ['time', 'value'], + 'values': [['2015-03-30T16:16:37Z', 0]], + 'tags': {'tag2': 'v1', 'tag1': 'value1'}}, + {'name': 'serie2', 'columns': ['time', 'value'], + 'values': [['2015-03-30T16:16:37Z', 5]], + 'tags': {'tag2': 'v2', 'tag1': 'value1'}}, + {'name': 'serie2', 'columns': ['time', 'value'], + 'values': [['2015-03-30T16:16:37Z', 10]], + 'tags': {'tag2': 'v1', 'tag1': 'value2'}}], + list(rsp) + ) + + d = all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}]) self.assertEqual( - { - ('serie2', (('tag1', 'value1'), ('tag2', 'v1'))): [ - {'time': '2015-03-30T16:16:37Z', 'value': 0} - ], - ('serie2', (('tag1', 'value1'), ('tag2', 'v2'))): [ - {'time': '2015-03-30T16:16:37Z', 'value': 5} - ], - ('serie2', (('tag1', 'value2'), ('tag2', 'v1'))): [ - {'time': '2015-03-30T16:16:37Z', 'value': 10}] - }, - rsp + [ + 2, + ['time', 'value'], + {'tag2': 'v1', 'tag1': 'value1'}, + ['2015-03-30T16:16:37Z', 0], + {'tag2': 'v1', 'tag1': 'value2'}, + ['2015-03-30T16:16:37Z', 10] + ], + [ + len(d), + d[0].columns, + d[0].tags, [d[0].values.time, d[0].values.value], + d[1].tags, [d[1].values.time, d[1].values.value] + ] ) + def test_tags_json_order(self): n_pts = 100 n_tags = 5 # that will make 120 possible orders (fact(5) == 120) @@ -647,6 +684,33 @@ def test_tags_json_order(self): rsp_tags = tuple(t[0] for t in serie_key[1]) self.assertEqual(expected_ordered_tags, rsp_tags) + + def test_query_multiple_series(self): + pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z') + pts = [ + pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), + #pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), + #pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10), + ] + self.cli.write_points(pts) + + pt = partial(point, 'serie2', timestamp='1970-03-30T16:16:37Z') + pts = [ + pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0, data1=33, data2="bla"), + #pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), + #pt(tags={'tag1': 'value2', 'tag2': 'v3'}, value=10), # data2="what"), + ] + self.cli.write_points(pts) + + rsp = self.cli.query('SELECT * FROM serie1, serie2') + print(rsp) + + # same but with the tags given : + #rsp = self.cli.query('SELECT * FROM serie1, serie2 GROUP BY *') + print(rsp) + + + ############################################################################ From 1c6df3f9280a7f7fe1a87baf819e51b9f82eea67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 3 Apr 2015 09:26:28 -0400 Subject: [PATCH 087/536] keep None as serie name in case of "system" query.. --- influxdb/resultset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 1e4dc650..2840614d 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -98,7 +98,7 @@ def __getitem__(self, key): # 'list retention' is in this case.. if key is None: for point in serie['values']: - yield Point('$SYSTEM$', serie['columns'], point) + yield Point(None, serie['columns'], point) elif name in (None, serie_name): # by default if no tags was provided then From 9d02c1c59c338ccfad52f5f57f68058903cfda64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 3 Apr 2015 09:27:48 -0400 Subject: [PATCH 088/536] docstrings + some better comments + 1 type check --- influxdb/resultset.py | 57 +++++++++++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 21 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 2840614d..394de607 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -7,7 +7,9 @@ _sentinel = object() +# could it be a namedtuple .. ?? class NamedValues(object): + def __init__(self, point): self._point = point @@ -27,6 +29,15 @@ def __repr__(self): class Point(object): def __init__(self, serie, columns, values, tags=None): + ''' + + :param serie: The name of the serie in which this point resides. + If None then it's a "system" point/result. + :param columns: The ordered list of the columns. + :param values: The actualy list of values of this point. Same order than columns. + :param tags: The eventual tags (dict) associated with the point. + :return: + ''' assert len(columns) == len(values) self.columns = columns self._point_values = values @@ -38,11 +49,11 @@ def __init__(self, serie, columns, values, tags=None): def __getitem__(self, tag_name): """Indexing a Point return the tag value associated with - the given tag name""" + the given tag name, if it exists""" return self._tags[tag_name] def __iter__(self): - """Iterating over a Point will return its tags names one per one""" + """Iterating over a Point will return its eventual tag names one per one""" return iter(self._tags) def __len__(self): @@ -50,7 +61,7 @@ def __len__(self): return len(self.columns) def __repr__(self): - return 'Point(values=%s, tags=%s)' % ( + return 'Point(values=(%s), tags=%s)' % ( ', '.join('%s=%r' % ( k, getattr(self.values, k)) for k in self.columns), self.tags) @@ -61,23 +72,18 @@ class ResultSet(object): """A wrapper around series results """ def __init__(self, series): - self.raw = series # ['results'] - results = series['results'] - if False: - self.have_tags = ( - results - and 'series' in results[0] - and results[0]['series'] - and 'tags' in results[0]['series'][0] - ) - # self.raw.update(series) # use the free update to set keys + self.raw = series def __getitem__(self, key): ''' :param key: Either a serie name or a 2-tuple(serie_name, tags_dict) - If the given serie name is None then any serie (matching the eventual - given tags) will be given its points one after the other. - :return: A generator yielding `Point`s matching the given key + If the given serie name is None then any serie (matching + the eventual given tags) will be given its points one + after the other. + :return: A generator yielding `Point`s matching the given key. + NB: + The order in which the points are yielded is actually undefined but + it might change.. ''' if isinstance(key, tuple): if 2 != len(key): @@ -85,17 +91,20 @@ def __getitem__(self, key): name = key[0] tags = key[1] if not isinstance(tags, dict): - raise TypeError('should be a dict') + raise TypeError('tags should be a dict') else: name = key tags = None + if not isinstance(name, (str, type(None))): + raise TypeError('serie_name must be an str or None') for result in self.raw['results']: for serie in result['series']: serie_name = serie.get('name', None) if serie_name is None: - # this is a "system" query or a query which doesn't returned a named "serie" - # 'list retention' is in this case.. + # this is a "system" query or a query which + # doesn't return a name attribute. + # like 'show retention policies' .. if key is None: for point in serie['values']: yield Point(None, serie['columns'], point) @@ -107,7 +116,13 @@ def __getitem__(self, key): serie_tags = serie.get('tags', {}) if tags: serie_matches = False + # if there are some tags requested, + # let's check them: for tag_name, tag_value in tags.items(): + # using _sentinel as I'm not sure that "None" + # could be used, because it could be a valid + # serie_tags value : when a serie has no such tag + # then I think it's set to /null/None/.. TBC.. serie_tag_value = serie_tags.get(tag_name, _sentinel) if serie_tag_value != tag_value: break @@ -125,8 +140,8 @@ def __iter__(self): ''' Iterating a ResultSet will yield one dict instance per serie result. ''' for results in self.raw['results']: - for many_series in results['series']: - yield many_series + for serie in results['series']: + yield serie #def __len__(self): # return len(self.raw) From a93b2178fe55de5c124477b90f3f7d625c1e51b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 3 Apr 2015 09:28:00 -0400 Subject: [PATCH 089/536] to be decided .. --- influxdb/client.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 989405f7..dc3ae377 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -301,8 +301,11 @@ def get_list_database(self): """ Get the list of databases """ + rsp = self.query("SHOW DATABASES") #['results'][0]['points'] + # TODO: to be decided the format of what we return ;) lrsp = list(rsp) + return [value[0] for value in lrsp[0].get('values', [])] #return rsp['results'][0]['points'] @@ -344,9 +347,13 @@ def get_list_retention_policies(self, database=None): """ Get the list of retention policies """ - return self.query( + rsp = self.query( "SHOW RETENTION POLICIES %s" % (database or self._database) - )['results'][0]['points'] + ) + # TODO: same here + lrsp = list(rsp) + points = list(rsp[None]) + return lrsp def get_list_series(self, database=None): """ From 863a5d4e804783d327f411dd7af83eb0d79a3508 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Apr 2015 10:46:03 -0400 Subject: [PATCH 090/536] Fixed TestResultSet + Added TestPoint --- influxdb/point.py | 71 +++++++++++++++++++++++++++++++ influxdb/resultset.py | 73 +++----------------------------- tests/influxdb/point_test.py | 38 +++++++++++++++++ tests/influxdb/resultset_test.py | 66 +++++++++++------------------ 4 files changed, 138 insertions(+), 110 deletions(-) create mode 100644 influxdb/point.py create mode 100644 tests/influxdb/point_test.py diff --git a/influxdb/point.py b/influxdb/point.py new file mode 100644 index 00000000..51127d81 --- /dev/null +++ b/influxdb/point.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + + +# could it be a namedtuple .. ?? +class NamedValues(object): + + def __init__(self, point): + self._point = point + + def __getattr__(self, item): + try: + index = self._point.columns.index(item) + except ValueError: + raise AttributeError('Point have no such attribute (%r)' % item) + return self._point._point_values[index] + + def __repr__(self): + return 'Values(%s)' % ', '.join( + '%s=%r' % (k, self._point._point_values[k]) + for k in self._point.columns) + + +class Point(object): + + def __init__(self, serie, columns, values, tags=None): + """ + + :param serie: The name of the serie in which this point resides. + If None then it's a "system" point/result. + :param columns: The ordered list of the columns. + :param values: The actualy list of values of this point. Same order than columns. + :param tags: The eventual tags (dict) associated with the point. + :return: + """ + assert len(columns) == len(values) + self.columns = columns + self._point_values = values + if tags is None: + tags = {} + self.serie = serie + self.tags = tags + self.values = NamedValues(self) + + def __getitem__(self, tag_name): + """Indexing a Point return the tag value associated with + the given tag name, if it exists""" + return self._tags[tag_name] + + def __iter__(self): + """Iterating over a Point will return its eventual tag names one per one""" + return iter(self._tags) + + def __len__(self): + """The len of a Point is its number of columns/values""" + return len(self.columns) + + def __repr__(self): + return 'Point(values=(%s), tags=%s)' % ( + ', '.join('%s=%r' % ( + k, getattr(self.values, k)) for k in self.columns), + self.tags) + + def __eq__(self, other): + return (isinstance(other, self.__class__) + and self.tags == other.tags + and self._point_values == other._point_values + and self.serie == other.serie + and self.columns == other.columns) + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 394de607..ce9f665b 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,73 +1,10 @@ # -*- coding: utf-8 -*- -import collections -from collections import namedtuple - +from influxdb.point import Point _sentinel = object() -# could it be a namedtuple .. ?? -class NamedValues(object): - - def __init__(self, point): - self._point = point - - def __getattr__(self, item): - try: - index = self._point.columns.index(item) - except ValueError: - raise AttributeError('Point have no such attribute (%r)' % item) - return self._point._point_values[index] - - def __repr__(self): - return 'Values(%s)' % ', '.join( - '%s=%r' % (k, self._point._point_values[k]) - for k in self._point.columns) - - -class Point(object): - - def __init__(self, serie, columns, values, tags=None): - ''' - - :param serie: The name of the serie in which this point resides. - If None then it's a "system" point/result. - :param columns: The ordered list of the columns. - :param values: The actualy list of values of this point. Same order than columns. - :param tags: The eventual tags (dict) associated with the point. - :return: - ''' - assert len(columns) == len(values) - self.columns = columns - self._point_values = values - if tags is None: - tags = {} - self.serie = serie - self.tags = tags - self.values = NamedValues(self) - - def __getitem__(self, tag_name): - """Indexing a Point return the tag value associated with - the given tag name, if it exists""" - return self._tags[tag_name] - - def __iter__(self): - """Iterating over a Point will return its eventual tag names one per one""" - return iter(self._tags) - - def __len__(self): - """The len of a Point is its number of columns/values""" - return len(self.columns) - - def __repr__(self): - return 'Point(values=(%s), tags=%s)' % ( - ', '.join('%s=%r' % ( - k, getattr(self.values, k)) for k in self.columns), - self.tags) - - - class ResultSet(object): """A wrapper around series results """ @@ -75,7 +12,7 @@ def __init__(self, series): self.raw = series def __getitem__(self, key): - ''' + """ :param key: Either a serie name or a 2-tuple(serie_name, tags_dict) If the given serie name is None then any serie (matching the eventual given tags) will be given its points one @@ -84,7 +21,7 @@ def __getitem__(self, key): NB: The order in which the points are yielded is actually undefined but it might change.. - ''' + """ if isinstance(key, tuple): if 2 != len(key): raise TypeError('only 2-tuples allowed') @@ -137,8 +74,8 @@ def __repr__(self): return str(self.raw) def __iter__(self): - ''' Iterating a ResultSet will yield one dict instance per serie result. - ''' + """ Iterating a ResultSet will yield one dict instance per serie result. + """ for results in self.raw['results']: for serie in results['series']: yield serie diff --git a/tests/influxdb/point_test.py b/tests/influxdb/point_test.py new file mode 100644 index 00000000..b50e2174 --- /dev/null +++ b/tests/influxdb/point_test.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +import unittest + +from influxdb.point import Point + + +class TestPoint(unittest.TestCase): + + def test_point(self): + point = Point( + "serie_name", + ['col1', 'col2'], + [1, '2'], + tags={ + "SWAG": True, + "ALLO": "BYE" + } + ) + + self.assertEqual(point.columns, ['col1', 'col2']) + self.assertEqual(point.tags, {"SWAG": True, "ALLO": "BYE"}) + self.assertEqual(point.values.col1, 1) + self.assertEqual(point.values.col2, '2') + self.assertEqual( + str(point), + "Point(values=(col1=1, col2='2')," + " tags={'ALLO': 'BYE', 'SWAG': True})" + ) + + def test_point_eq(self): + point1 = Point("serie_name", ['col1', 'col2'], [1, '2'], + tags={"SWAG": True, "ALLO": "BYE"}) + + point2 = Point("serie_name", ['col1', 'col2'], [1, '2'], + tags={"SWAG": True, "ALLO": "BYE"}) + + self.assertEqual(point1, point2) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 8f1768e8..f77a5a22 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -3,6 +3,7 @@ import unittest from influxdb.resultset import ResultSet +from influxdb.point import Point class TestResultSet(unittest.TestCase): @@ -36,57 +37,38 @@ def setUp(self): self.rs = ResultSet(self.query_response) def test_filter_by_name(self): - self.assertItemsEqual( - self.rs['cpu_load_short'], + self.assertEqual( + list(self.rs['cpu_load_short']), [ - { - "tags": {"host": "server01", "region": "us-west"}, - "points": [ - {"time": "2015-01-29T21:51:28.968422294Z", - "value": 0.64} - ] - }, - { - "tags": {"host": "server02", "region": "us-west"}, - "points": [ - {"time": "2015-01-29T21:51:28.968422294Z", - "value": 0.64} - ] - } + Point("cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server01", "region": "us-west"}), + Point("cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server02", "region": "us-west"}) ] ) def test_filter_by_tags(self): - self.assertItemsEqual( - self.rs[('cpu_load_short', {"host": "server01"})], + self.assertEqual( + list(self.rs[('cpu_load_short', {"host": "server01"})]), [ - { - "tags": {"host": "server01", "region": "us-west"}, - "points": [ - {"time": "2015-01-29T21:51:28.968422294Z", - "value": 0.64} - ] - } + Point( + "cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server01", "region": "us-west"} + ) ] ) - self.assertItemsEqual( - self.rs[('cpu_load_short', {"region": "us-west"})], + self.assertEqual( + list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ - { - "tags": {"host": "server01", "region": "us-west"}, - "points": [ - {"time": "2015-01-29T21:51:28.968422294Z", - "value": 0.64} - ] - }, - { - "tags": {"host": "server02", "region": "us-west"}, - "points": [ - {"time": "2015-01-29T21:51:28.968422294Z", - "value": 0.64} - ] - } + Point("cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server01", "region": "us-west"}), + Point("cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server02", "region": "us-west"}), ] ) - From 922a0a58c550c2672834e373899db57f93983dee Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Apr 2015 11:08:33 -0400 Subject: [PATCH 091/536] ResultSet: added Keys() and items() functions --- influxdb/resultset.py | 90 +++++++++++++++++++------------- tests/influxdb/resultset_test.py | 44 ++++++++++++++++ 2 files changed, 99 insertions(+), 35 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index ce9f665b..e3e3d37c 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -35,40 +35,23 @@ def __getitem__(self, key): if not isinstance(name, (str, type(None))): raise TypeError('serie_name must be an str or None') - for result in self.raw['results']: - for serie in result['series']: - serie_name = serie.get('name', None) - if serie_name is None: - # this is a "system" query or a query which - # doesn't return a name attribute. - # like 'show retention policies' .. - if key is None: - for point in serie['values']: - yield Point(None, serie['columns'], point) + for serie in self._get_series(): + serie_name = serie.get('name', None) + if serie_name is None: + # this is a "system" query or a query which + # doesn't return a name attribute. + # like 'show retention policies' .. + if key is None: + for point in serie['values']: + yield Point(None, serie['columns'], point) - elif name in (None, serie_name): - # by default if no tags was provided then - # we will matches every returned serie - serie_matches = True - serie_tags = serie.get('tags', {}) - if tags: - serie_matches = False - # if there are some tags requested, - # let's check them: - for tag_name, tag_value in tags.items(): - # using _sentinel as I'm not sure that "None" - # could be used, because it could be a valid - # serie_tags value : when a serie has no such tag - # then I think it's set to /null/None/.. TBC.. - serie_tag_value = serie_tags.get(tag_name, _sentinel) - if serie_tag_value != tag_value: - break - else: - serie_matches = True - - if serie_matches: - for point in serie['values']: - yield Point(serie_name, serie['columns'], point, serie_tags) + elif name in (None, serie_name): + # by default if no tags was provided then + # we will matches every returned serie + serie_tags = serie.get('tags', {}) + if tags is None or self._tag_matches(serie_tags, tags): + for point in serie['values']: + yield Point(serie_name, serie['columns'], point, serie_tags) def __repr__(self): return str(self.raw) @@ -80,5 +63,42 @@ def __iter__(self): for serie in results['series']: yield serie - #def __len__(self): - # return len(self.raw) + def _tag_matches(self, tags, filter): + """Checks if all key/values in filter match in tags""" + for tag_name, tag_value in filter.items(): + # using _sentinel as I'm not sure that "None" + # could be used, because it could be a valid + # serie_tags value : when a serie has no such tag + # then I think it's set to /null/None/.. TBC.. + serie_tag_value = tags.get(tag_name, _sentinel) + if serie_tag_value != tag_value: + return False + return True + + def _get_series(self): + """Returns all series""" + series = [] + try: + for result in self.raw['results']: + series.extend(result['series']) + except KeyError: + pass + return series + + def __len__(self): + return len(self.keys()) + + def keys(self): + keys = [] + for serie in self._get_series(): + keys.append((serie['name'], serie['tags'])) + return keys + + def items(self): + items = [] + for serie in self._get_series(): + serie_key = (serie['name'], serie['tags']) + items.append( + (serie_key, self.__getitem__(serie_key)) + ) + return items \ No newline at end of file diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index f77a5a22..0a727e8d 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -72,3 +72,47 @@ def test_filter_by_tags(self): tags={"host": "server02", "region": "us-west"}), ] ) + + def test_keys(self): + self.assertItemsEqual( + self.rs.keys(), + [ + ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), + ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), + ('other_serie', {'host': 'server01', 'region': 'us-west'}) + ] + ) + + def test_len(self): + self.assertEqual( + len(self.rs), + 3 + ) + + def test_items(self): + items = list(self.rs.items()) + items_lists = [(item[0], list(item[1])) for item in items] + + self.assertEqual( + items_lists, + [ + ( + ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), + [Point("cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server01", "region": "us-west"})] + ), + ( + ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), + [Point("cpu_load_short", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server02", "region": "us-west"})] + ), + ( + ('other_serie', {'host': 'server01', 'region': 'us-west'}), + [Point("other_serie", ["time", "value"], + ["2015-01-29T21:51:28.968422294Z", 0.64], + tags={"host": "server01", "region": "us-west"})] + ) + ] + ) From 75b6e480a38c4e5d8ed20eb5ee02a532b905a412 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Apr 2015 11:19:25 -0400 Subject: [PATCH 092/536] ResultSet: added as_dict() --- influxdb/point.py | 6 ++++++ tests/influxdb/point_test.py | 22 ++++++++++++++-------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/influxdb/point.py b/influxdb/point.py index 51127d81..90aac7d4 100644 --- a/influxdb/point.py +++ b/influxdb/point.py @@ -69,3 +69,9 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + + def as_dict(self): + return { + "serie": self.serie, + "point": [{col: getattr(self.values, col)} for col in self.columns] + } diff --git a/tests/influxdb/point_test.py b/tests/influxdb/point_test.py index b50e2174..96ffb9da 100644 --- a/tests/influxdb/point_test.py +++ b/tests/influxdb/point_test.py @@ -6,9 +6,8 @@ class TestPoint(unittest.TestCase): - - def test_point(self): - point = Point( + def setUp(self): + self.point = Point( "serie_name", ['col1', 'col2'], [1, '2'], @@ -18,12 +17,13 @@ def test_point(self): } ) - self.assertEqual(point.columns, ['col1', 'col2']) - self.assertEqual(point.tags, {"SWAG": True, "ALLO": "BYE"}) - self.assertEqual(point.values.col1, 1) - self.assertEqual(point.values.col2, '2') + def test_point(self): + self.assertEqual(self.point.columns, ['col1', 'col2']) + self.assertEqual(self.point.tags, {"SWAG": True, "ALLO": "BYE"}) + self.assertEqual(self.point.values.col1, 1) + self.assertEqual(self.point.values.col2, '2') self.assertEqual( - str(point), + str(self.point), "Point(values=(col1=1, col2='2')," " tags={'ALLO': 'BYE', 'SWAG': True})" ) @@ -36,3 +36,9 @@ def test_point_eq(self): tags={"SWAG": True, "ALLO": "BYE"}) self.assertEqual(point1, point2) + + def test_as_dict(self): + self.assertEqual( + self.point.as_dict(), + {'point': [{'col1': 1}, {'col2': '2'}], 'serie': 'serie_name'} + ) From 2bf9e421c7909b4f4a02e1656829a47425c5a826 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Apr 2015 10:01:58 -0400 Subject: [PATCH 093/536] Fix: serie can have no associated tags --- influxdb/resultset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index e3e3d37c..8011f80a 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -91,7 +91,7 @@ def __len__(self): def keys(self): keys = [] for serie in self._get_series(): - keys.append((serie['name'], serie['tags'])) + keys.append((serie['name'], serie.get('tags', None))) return keys def items(self): From c2f22844e4666780a202493e62d05263b08593d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Apr 2015 10:15:42 -0400 Subject: [PATCH 094/536] double Fix: serie can miss its 'name' & 'tags' attribute + if a serie is the result of a "system" query: it has no name attribute. + a serie can also have no associated tags. --- influxdb/resultset.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 8011f80a..0a6c99b4 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -91,14 +91,14 @@ def __len__(self): def keys(self): keys = [] for serie in self._get_series(): - keys.append((serie['name'], serie.get('tags', None))) + keys.append((serie.get('name', None), serie.get('tags', None))) return keys def items(self): items = [] for serie in self._get_series(): - serie_key = (serie['name'], serie['tags']) + serie_key = (serie.get('name', None), serie.get('tags', None)) items.append( - (serie_key, self.__getitem__(serie_key)) + (serie_key, self[serie_key]) ) return items \ No newline at end of file From 670fdea1953aa6c7c77c865babf5dd4f4f90f530 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Apr 2015 10:36:05 -0400 Subject: [PATCH 095/536] client_test: Fixed some things: 1) cli.query() which now returns a ResultSet, which must be list()ed in order to obtain the list of series. 2) assertListEqual instead of assertDictEqual for a list result. TODO: but now we have to fix the results format. --- tests/influxdb/client_test.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 5dc03e3d..14c3aa8d 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -265,12 +265,13 @@ def test_query(self): "http://localhost:8086/query", text=example_response ) - self.assertDictEqual( - self.cli.query('select * from foo'), - {'cpu_load_short': + rs = self.cli.query('select * from foo') + self.assertListEqual( + list(rs), + [{'cpu_load_short': [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], 'sdfsdfsdf': - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]} + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}] ) @unittest.skip('Not implemented for 0.9') From 48be83720ade3efc6d54a0ab66ba3946a522baf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Apr 2015 10:36:51 -0400 Subject: [PATCH 096/536] clean --- tests/influxdb/client_test_with_server.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index a54a3125..bad3c0d2 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -287,9 +287,8 @@ def test_fresh_server_no_db(self): def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) - rsp = self.cli.get_list_database() self.assertEqual( - rsp, + self.cli.get_list_database(), ['new_db_1', 'new_db_2'] ) From 8ba6ad1705f4a7c4651e3cdccc2afa0931ac4370 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Apr 2015 10:39:22 -0400 Subject: [PATCH 097/536] workaround see: https://groups.google.com/forum/?hl=fr#!topic/influxdb/ZgwXfh1sqRU https://github.com/influxdb/influxdb/issues/2169 --- tests/influxdb/client_test_with_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index bad3c0d2..a0d2cb75 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -701,7 +701,7 @@ def test_query_multiple_series(self): ] self.cli.write_points(pts) - rsp = self.cli.query('SELECT * FROM serie1, serie2') + rsp = self.cli.query('SELECT * FROM serie1; SELECT * FROM serie2') print(rsp) # same but with the tags given : From f487e0ea6bfff68d6ede64e2f58caed58296f285 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Apr 2015 10:40:48 -0400 Subject: [PATCH 098/536] Proposed Fix: expand the ResultSet response by using list() on it. Other solution is to return the ResultSet instance but then we must adapt all call sites.. To be decided.. --- influxdb/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index dc3ae377..db5bbdfc 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -362,13 +362,13 @@ def get_list_series(self, database=None): rsp = self.query("SHOW SERIES", database=database) print "RSP", rsp.raw print "RSP", rsp['results'] - return rsp + return list(rsp) def get_list_users(self): """ Get the list of users """ - return self.query("SHOW USERS") + return list(self.query("SHOW USERS")) def delete_series(self, name, database=None): database = database or self._database From c226c13e091034b4767b8fbf60b83b2b00de53a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Wed, 8 Apr 2015 10:30:53 -0400 Subject: [PATCH 099/536] Changed/decided of format for response of "system" queries show series show list retention --- tests/influxdb/client_test_with_server.py | 35 ++++++++++++----------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index a0d2cb75..0961455f 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -521,30 +521,34 @@ def test_get_list_series_empty(self): def test_get_list_series_non_empty(self): self.cli.write_points(dummy_point) rsp = self.cli.get_list_series() - self.assertEqual( - {'cpu_load_short': [ - {'region': 'us-west', 'host': 'server01', '_id': 1}]}, - rsp - ) + self.assertEqual([ + { + 'serie_name': 'cpu_load_short', + '_id': 1, + "tags": { + "host": "server01", + "region": "us-west" + } + }], + rsp) def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}], + {'name': 'default', 'duration': '0', 'replicaN': 1, 'default': True} + ], rsp ) def test_create_retention_policy_default(self): - rsp = self.cli.create_retention_policy('somename', '1d', 4, - default=True) - self.assertIsNone(rsp) + self.cli.create_retention_policy('somename', '1d', 4, default=True) + self.cli.create_retention_policy('another', '2d', 3, default=False) rsp = self.cli.get_list_retention_policies() self.assertEqual([ - {'columns': ['name', 'duration', 'replicaN', 'default'], - 'values': [['default', '0', 1, False], - ['somename', '24h0m0s', 4, True]]}], + {'name': 'somename', 'duration': '1d', 'replicaN': 4, 'default': True}, + {'name': 'another', 'duration': '2d', 'replicaN': 3, 'default': False} + ], rsp ) @@ -553,10 +557,7 @@ def test_create_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', 'default': True, 'replicaN': 1, - 'name': 'default'}, - {'duration': '24h0m0s', 'default': False, 'replicaN': 4, - 'name': 'somename'} + {'name': 'somename', 'duration': '1d', 'replicaN': 4, 'default': False}, ], rsp ) From 141e664b0f9f6f181a6d8dbff1bced85bab85405 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 8 Apr 2015 10:34:51 -0400 Subject: [PATCH 100/536] Removed Points object --- influxdb/point.py | 77 -------------------------------- influxdb/resultset.py | 15 ++++--- tests/influxdb/point_test.py | 44 ------------------ tests/influxdb/resultset_test.py | 47 ++++++++----------- 4 files changed, 28 insertions(+), 155 deletions(-) delete mode 100644 influxdb/point.py delete mode 100644 tests/influxdb/point_test.py diff --git a/influxdb/point.py b/influxdb/point.py deleted file mode 100644 index 90aac7d4..00000000 --- a/influxdb/point.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- - - -# could it be a namedtuple .. ?? -class NamedValues(object): - - def __init__(self, point): - self._point = point - - def __getattr__(self, item): - try: - index = self._point.columns.index(item) - except ValueError: - raise AttributeError('Point have no such attribute (%r)' % item) - return self._point._point_values[index] - - def __repr__(self): - return 'Values(%s)' % ', '.join( - '%s=%r' % (k, self._point._point_values[k]) - for k in self._point.columns) - - -class Point(object): - - def __init__(self, serie, columns, values, tags=None): - """ - - :param serie: The name of the serie in which this point resides. - If None then it's a "system" point/result. - :param columns: The ordered list of the columns. - :param values: The actualy list of values of this point. Same order than columns. - :param tags: The eventual tags (dict) associated with the point. - :return: - """ - assert len(columns) == len(values) - self.columns = columns - self._point_values = values - if tags is None: - tags = {} - self.serie = serie - self.tags = tags - self.values = NamedValues(self) - - def __getitem__(self, tag_name): - """Indexing a Point return the tag value associated with - the given tag name, if it exists""" - return self._tags[tag_name] - - def __iter__(self): - """Iterating over a Point will return its eventual tag names one per one""" - return iter(self._tags) - - def __len__(self): - """The len of a Point is its number of columns/values""" - return len(self.columns) - - def __repr__(self): - return 'Point(values=(%s), tags=%s)' % ( - ', '.join('%s=%r' % ( - k, getattr(self.values, k)) for k in self.columns), - self.tags) - - def __eq__(self, other): - return (isinstance(other, self.__class__) - and self.tags == other.tags - and self._point_values == other._point_values - and self.serie == other.serie - and self.columns == other.columns) - - def __ne__(self, other): - return not self.__eq__(other) - - def as_dict(self): - return { - "serie": self.serie, - "point": [{col: getattr(self.values, col)} for col in self.columns] - } diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 0a6c99b4..0d8ce1c3 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -from influxdb.point import Point - _sentinel = object() @@ -43,7 +41,7 @@ def __getitem__(self, key): # like 'show retention policies' .. if key is None: for point in serie['values']: - yield Point(None, serie['columns'], point) + yield self.point_from_cols_vals(serie['columns'], point) elif name in (None, serie_name): # by default if no tags was provided then @@ -51,7 +49,7 @@ def __getitem__(self, key): serie_tags = serie.get('tags', {}) if tags is None or self._tag_matches(serie_tags, tags): for point in serie['values']: - yield Point(serie_name, serie['columns'], point, serie_tags) + yield self.point_from_cols_vals(serie['columns'], point) def __repr__(self): return str(self.raw) @@ -101,4 +99,11 @@ def items(self): items.append( (serie_key, self[serie_key]) ) - return items \ No newline at end of file + return items + + @staticmethod + def point_from_cols_vals(cols, vals): + point = {} + for col_index, col_name in enumerate(cols): + point[col_name] = vals[col_index] + return point diff --git a/tests/influxdb/point_test.py b/tests/influxdb/point_test.py deleted file mode 100644 index 96ffb9da..00000000 --- a/tests/influxdb/point_test.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest - -from influxdb.point import Point - - -class TestPoint(unittest.TestCase): - def setUp(self): - self.point = Point( - "serie_name", - ['col1', 'col2'], - [1, '2'], - tags={ - "SWAG": True, - "ALLO": "BYE" - } - ) - - def test_point(self): - self.assertEqual(self.point.columns, ['col1', 'col2']) - self.assertEqual(self.point.tags, {"SWAG": True, "ALLO": "BYE"}) - self.assertEqual(self.point.values.col1, 1) - self.assertEqual(self.point.values.col2, '2') - self.assertEqual( - str(self.point), - "Point(values=(col1=1, col2='2')," - " tags={'ALLO': 'BYE', 'SWAG': True})" - ) - - def test_point_eq(self): - point1 = Point("serie_name", ['col1', 'col2'], [1, '2'], - tags={"SWAG": True, "ALLO": "BYE"}) - - point2 = Point("serie_name", ['col1', 'col2'], [1, '2'], - tags={"SWAG": True, "ALLO": "BYE"}) - - self.assertEqual(point1, point2) - - def test_as_dict(self): - self.assertEqual( - self.point.as_dict(), - {'point': [{'col1': 1}, {'col2': '2'}], 'serie': 'serie_name'} - ) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 0a727e8d..00872f15 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -3,7 +3,6 @@ import unittest from influxdb.resultset import ResultSet -from influxdb.point import Point class TestResultSet(unittest.TestCase): @@ -40,36 +39,22 @@ def test_filter_by_name(self): self.assertEqual( list(self.rs['cpu_load_short']), [ - Point("cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server01", "region": "us-west"}), - Point("cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server02", "region": "us-west"}) + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'} ] ) def test_filter_by_tags(self): self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), - [ - Point( - "cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server01", "region": "us-west"} - ) - ] + [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] ) self.assertEqual( list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ - Point("cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server01", "region": "us-west"}), - Point("cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server02", "region": "us-west"}), + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'} ] ) @@ -98,21 +83,25 @@ def test_items(self): [ ( ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), - [Point("cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server01", "region": "us-west"})] + [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), - [Point("cpu_load_short", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server02", "region": "us-west"})] + [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('other_serie', {'host': 'server01', 'region': 'us-west'}), - [Point("other_serie", ["time", "value"], - ["2015-01-29T21:51:28.968422294Z", 0.64], - tags={"host": "server01", "region": "us-west"})] + [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ) ] ) + + def test_point_from_cols_vals(self): + cols = ['col1', 'col2'] + vals = [1, '2'] + + point = ResultSet.point_from_cols_vals(cols, vals) + self.assertDictEqual( + point, + {'col1': 1, 'col2': '2'} + ) From 728a4882b57e668f760ee86062cc7b88cb99cd38 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 8 Apr 2015 14:52:13 -0400 Subject: [PATCH 101/536] Finalize ResultSet --- influxdb/client.py | 25 ++- influxdb/resultset.py | 29 +-- tests/influxdb/client_test.py | 26 +-- tests/influxdb/client_test_with_server.py | 226 +++++++--------------- tests/influxdb/resultset_test.py | 47 ++++- 5 files changed, 156 insertions(+), 197 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index db5bbdfc..4438b091 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -301,13 +301,7 @@ def get_list_database(self): """ Get the list of databases """ - - rsp = self.query("SHOW DATABASES") #['results'][0]['points'] - # TODO: to be decided the format of what we return ;) - lrsp = list(rsp) - - return [value[0] for value in lrsp[0].get('values', [])] - #return rsp['results'][0]['points'] + return list(self.query("SHOW DATABASES")['results']) def create_database(self, dbname): """ @@ -350,19 +344,22 @@ def get_list_retention_policies(self, database=None): rsp = self.query( "SHOW RETENTION POLICIES %s" % (database or self._database) ) - # TODO: same here - lrsp = list(rsp) - points = list(rsp[None]) - return lrsp + return list(rsp['results']) def get_list_series(self, database=None): """ Get the list of series """ rsp = self.query("SHOW SERIES", database=database) - print "RSP", rsp.raw - print "RSP", rsp['results'] - return list(rsp) + series = [] + for serie in rsp.items(): + series.append( + { + "name": serie[0][0], + "tags": list(serie[1]) + } + ) + return series def get_list_users(self): """ diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 0d8ce1c3..1a0cd0b5 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -25,31 +25,37 @@ def __getitem__(self, key): raise TypeError('only 2-tuples allowed') name = key[0] tags = key[1] - if not isinstance(tags, dict): + if not isinstance(tags, dict) and tags is not None: raise TypeError('tags should be a dict') else: name = key tags = None - if not isinstance(name, (str, type(None))): + if not isinstance(name, (str, type(None), type(u''))): raise TypeError('serie_name must be an str or None') for serie in self._get_series(): - serie_name = serie.get('name', None) + serie_name = serie.get('name', 'results') if serie_name is None: # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. if key is None: for point in serie['values']: - yield self.point_from_cols_vals(serie['columns'], point) + yield self.point_from_cols_vals( + serie['columns'], + point + ) elif name in (None, serie_name): # by default if no tags was provided then # we will matches every returned serie serie_tags = serie.get('tags', {}) if tags is None or self._tag_matches(serie_tags, tags): - for point in serie['values']: - yield self.point_from_cols_vals(serie['columns'], point) + for point in serie.get('values', []): + yield self.point_from_cols_vals( + serie['columns'], + point + ) def __repr__(self): return str(self.raw) @@ -57,9 +63,8 @@ def __repr__(self): def __iter__(self): """ Iterating a ResultSet will yield one dict instance per serie result. """ - for results in self.raw['results']: - for serie in results['series']: - yield serie + for key in self.keys(): + yield list(self.__getitem__(key)) def _tag_matches(self, tags, filter): """Checks if all key/values in filter match in tags""" @@ -89,13 +94,15 @@ def __len__(self): def keys(self): keys = [] for serie in self._get_series(): - keys.append((serie.get('name', None), serie.get('tags', None))) + keys.append( + (serie.get('name', 'results'), serie.get('tags', None)) + ) return keys def items(self): items = [] for serie in self._get_series(): - serie_key = (serie.get('name', None), serie.get('tags', None)) + serie_key = (serie.get('name', 'results'), serie.get('tags', None)) items.append( (serie_key, self[serie_key]) ) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 14c3aa8d..c9bfbb26 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -266,12 +266,10 @@ def test_query(self): text=example_response ) rs = self.cli.query('select * from foo') + self.assertListEqual( - list(rs), - [{'cpu_load_short': - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], - 'sdfsdfsdf': - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}] + list(rs['cpu_load_short']), + [{'value': 0.64, 'time': u'2009-11-10T23:00:00Z'}] ) @unittest.skip('Not implemented for 0.9') @@ -350,14 +348,18 @@ def test_drop_database_fails(self): cli.drop_database('old_db') def test_get_list_database(self): - data = {'results': [{'series': [ - {'name': 'databases', 'columns': ['name'], - 'values': [['mydb'], ['myotherdb']]}]}]} + data = {'results': [ + {'series': [ + {'values': [ + ['new_db_1'], + ['new_db_2']], + 'columns': ['name']}]} + ]} with _mocked_session(self.cli, 'get', 200, json.dumps(data)): self.assertListEqual( self.cli.get_list_database(), - ['mydb', 'myotherdb'] + [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) @raises(Exception) @@ -381,8 +383,10 @@ def test_get_list_series(self): self.assertListEqual( self.cli.get_list_series(), - [{'duration': '24h0m0s', - 'name': 'fsfdsdf', 'replicaN': 2}] + [{'name': 'cpu_load_short', + 'tags': [ + {'host': 'server01', '_id': 1, 'region': u'us-west'} + ]}] ) def test_create_retention_policy_default(self): diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 0961455f..fc016398 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -11,12 +11,9 @@ """ from __future__ import print_function -import random -from collections import OrderedDict import datetime import distutils.spawn from functools import partial -import itertools import os import re import shutil @@ -289,7 +286,7 @@ def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( self.cli.get_list_database(), - ['new_db_1', 'new_db_2'] + [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) def test_create_database_fails(self): @@ -303,7 +300,7 @@ def test_create_database_fails(self): def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) - self.assertEqual(['new_db_2'], self.cli.get_list_database()) + self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: @@ -352,28 +349,27 @@ def test_write_check_read(self): ) def test_write_points(self): - ''' same as test_write() but with write_points \o/ ''' + """ same as test_write() but with write_points \o/ """ self.assertIs(True, self.cli.write_points(dummy_point)) def test_write_points_check_read(self): - ''' same as test_write_check_read() but with write_points \o/ ''' + """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points() time.sleep(1) # same as test_write_check_read() rsp = self.cli.query('SELECT * FROM cpu_load_short') + self.assertEqual( - [{'values': [['2009-11-10T23:00:00Z', 0.64]], - 'name': 'cpu_load_short', - 'columns': ['time', 'value']}], - list(rsp) - ) + list(rsp), + [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]] + ) rsp2 = list(rsp['cpu_load_short']) self.assertEqual(len(rsp2), 1) pt = rsp2[0] self.assertEqual( - ['cpu_load_short', ['time', 'value'], {}, ['2009-11-10T23:00:00Z', 0.64]], - [pt.serie, pt.columns, pt.tags, [pt.values.time, pt.values.value]] + pt, + {'time': '2009-11-10T23:00:00Z', 'value': 0.64} ) def test_write_multiple_points_different_series(self): @@ -381,18 +377,18 @@ def test_write_multiple_points_different_series(self): time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short') lrsp = list(rsp) + self.assertEqual( - [{'values': [['2009-11-10T23:00:00Z', 0.64]], - 'name': 'cpu_load_short', - 'columns': ['time', 'value']}], - lrsp) + [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]], + lrsp + ) + rsp = list(self.cli.query('SELECT * FROM memory')) + self.assertEqual( - [{ - 'values': [['2009-11-10T23:01:35Z', 33]], - 'name': 'memory', 'columns': ['time', 'value']}], - rsp - ) + rsp, + [[{'value': 33, 'time': '2009-11-10T23:01:35Z'}]] + ) @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): @@ -437,7 +433,6 @@ def test_write_points_with_precision(self): ('u', base_s_regex + '\.\d{6}Z', 1), ('ms', base_s_regex + '\.\d{3}Z', 1), ('s', base_s_regex + 'Z', 1), - ('m', base_regex + '\d{2}:00Z', 60), # ('h', base_regex + '00:00Z', ), # that would require a sleep of possibly up to 3600 secs (/ 2 ?).. @@ -475,17 +470,20 @@ def test_write_points_with_precision(self): else: pass # sys.stderr.write('ok !\n') - sleep_time = 0 + + # sys.stderr.write('sleeping %s..\n' % sleep_time) if sleep_time: - # sys.stderr.write('sleeping %s..\n' % sleep_time) time.sleep(sleep_time) rsp = self.cli.query('SELECT * FROM cpu_load_short', database=db) - rsp = list(rsp)[0] # sys.stderr.write('precision=%s rsp_timestamp = %r\n' % ( # precision, rsp['cpu_load_short'][0]['time'])) - m = re.match(expected_regex, rsp['cpu_load_short'][0]['time']) + + m = re.match( + expected_regex, + list(rsp['cpu_load_short'])[0]['time'] + ) self.assertIsNotNone(m) self.cli.drop_database(db) @@ -521,22 +519,24 @@ def test_get_list_series_empty(self): def test_get_list_series_non_empty(self): self.cli.write_points(dummy_point) rsp = self.cli.get_list_series() - self.assertEqual([ - { - 'serie_name': 'cpu_load_short', - '_id': 1, - "tags": { - "host": "server01", - "region": "us-west" - } - }], - rsp) + + self.assertEqual( + [ + {'name': 'cpu_load_short', + 'tags': [{u'host': u'server01', u'_id': 1, + u'region': u'us-west'}]} + ], + rsp + ) def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'name': 'default', 'duration': '0', 'replicaN': 1, 'default': True} + {'name': 'default', + 'duration': '0', + 'replicaN': 1, + 'default': True} ], rsp ) @@ -545,10 +545,14 @@ def test_create_retention_policy_default(self): self.cli.create_retention_policy('somename', '1d', 4, default=True) self.cli.create_retention_policy('another', '2d', 3, default=False) rsp = self.cli.get_list_retention_policies() - self.assertEqual([ - {'name': 'somename', 'duration': '1d', 'replicaN': 4, 'default': True}, - {'name': 'another', 'duration': '2d', 'replicaN': 3, 'default': False} - ], + + self.assertEqual( + [{'duration': '48h0m0s', 'default': False, + 'replicaN': 3, 'name': 'another'}, + {'duration': '0', 'default': False, + 'replicaN': 1, 'name': 'default'}, + {'duration': '24h0m0s', 'default': True, + 'replicaN': 4, 'name': 'somename'}], rsp ) @@ -556,9 +560,10 @@ def test_create_retention_policy(self): self.cli.create_retention_policy('somename', '1d', 4) rsp = self.cli.get_list_retention_policies() self.assertEqual( - [ - {'name': 'somename', 'duration': '1d', 'replicaN': 4, 'default': False}, - ], + [{'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}, + {'duration': '24h0m0s', 'default': False, + 'replicaN': 4, 'name': 'somename'}], rsp ) @@ -572,18 +577,13 @@ def test_issue_143(self): self.cli.write_points(pts) time.sleep(1) rsp = list(self.cli.query('SELECT * FROM a_serie_name GROUP BY tag_1')) - # print(rsp, file=sys.stderr) - - self.assertEqual([ - {'name': 'a_serie_name', 'columns': ['time', 'value'], - 'values': [['2015-03-30T16:16:37Z', 15]], - 'tags': {'tag_1': ''}}, - {'name': 'a_serie_name', 'columns': ['time', 'value'], - 'values': [['2015-03-30T16:16:37Z', 5]], - 'tags': {'tag_1': 'value1'}}, - {'name': 'a_serie_name', 'columns': ['time', 'value'], - 'values': [['2015-03-30T16:16:37Z', 10]], - 'tags': {'tag_1': 'value2'}}], + + self.assertEqual( + [ + [{'value': 15, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] + ], rsp ) @@ -597,119 +597,38 @@ def test_issue_143(self): self.cli.write_points(pts) time.sleep(1) rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2') - # print(rsp, file=sys.stderr) - self.assertEqual([ - {'name': 'serie2', 'columns': ['time', 'value'], - 'values': [['2015-03-30T16:16:37Z', 0]], - 'tags': {'tag2': 'v1', 'tag1': 'value1'}}, - {'name': 'serie2', 'columns': ['time', 'value'], - 'values': [['2015-03-30T16:16:37Z', 5]], - 'tags': {'tag2': 'v2', 'tag1': 'value1'}}, - {'name': 'serie2', 'columns': ['time', 'value'], - 'values': [['2015-03-30T16:16:37Z', 10]], - 'tags': {'tag2': 'v1', 'tag1': 'value2'}}], - list(rsp) - ) - d = all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}]) self.assertEqual( [ - 2, - ['time', 'value'], - {'tag2': 'v1', 'tag1': 'value1'}, - ['2015-03-30T16:16:37Z', 0], - {'tag2': 'v1', 'tag1': 'value2'}, - ['2015-03-30T16:16:37Z', 10] + [{'value': 0, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] ], - [ - len(d), - d[0].columns, - d[0].tags, [d[0].values.time, d[0].values.value], - d[1].tags, [d[1].values.time, d[1].values.value] - ] + list(rsp) ) + all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}]) - def test_tags_json_order(self): - n_pts = 100 - n_tags = 5 # that will make 120 possible orders (fact(5) == 120) - all_tags = ['tag%s' % i for i in range(n_tags)] - n_tags_values = 1 + n_tags // 3 - all_tags_values = ['value%s' % random.randint(0, i) - for i in range(n_tags_values)] - pt = partial(point, 'serie', timestamp='2015-03-30T16:16:37Z') - pts = [ - pt(value=random.randint(0, 100)) - for _ in range(n_pts) - ] - for pt in pts: - tags = pt['tags'] = {} - for tag in all_tags: - tags[tag] = random.choice(all_tags_values) - - self.cli.write_points(pts) - time.sleep(1) - - # Influxd, when queried with a "group by tag1(, tag2, ..)" and as far - # as we currently see, always returns the tags (alphabetically-) - # ordered by their name in the json response.. - # That might not always be the case so here we will also be - # asserting that behavior. - expected_ordered_tags = tuple(sorted(all_tags)) - - # try all the possible orders of tags for the group by in the query: - for tags in itertools.permutations(all_tags): - query = ('SELECT * FROM serie ' - 'GROUP BY %s' % ','.join(tags)) - rsp = self.cli.query(query) - # and verify that, for each "serie_key" in the response, - # the tags names are ordered as we expect it: - for serie_key in rsp: - # first also asserts that the serie key is a 2-tuple: - self.assertTrue(isinstance(serie_key, tuple)) - self.assertEqual(2, len(serie_key)) - # also assert that the first component is an str instance: - self.assertIsInstance(serie_key[0], type(b''.decode())) - self.assertIsInstance(serie_key[1], tuple) - # also assert that the number of items in the second component - # is the number of tags requested in the group by actually, - # and that each one has correct format/type/.. - self.assertEqual(n_tags, len(serie_key[1])) - for tag_data in serie_key[1]: - self.assertIsInstance(tag_data, tuple) - self.assertEqual(2, len(tag_data)) - tag_name = tag_data[0] - self.assertIsInstance(tag_name, type(b''.decode())) - # then check the tags order: - rsp_tags = tuple(t[0] for t in serie_key[1]) - self.assertEqual(expected_ordered_tags, rsp_tags) - + self.assertEqual( + [{'value': 0, 'time': '2015-03-30T16:16:37Z'}, + {'value': 10, 'time': '2015-03-30T16:16:37Z'}], + all_tag2_equal_v1, + ) def test_query_multiple_series(self): pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), - #pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), - #pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10), ] self.cli.write_points(pts) pt = partial(point, 'serie2', timestamp='1970-03-30T16:16:37Z') pts = [ - pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0, data1=33, data2="bla"), - #pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), - #pt(tags={'tag1': 'value2', 'tag2': 'v3'}, value=10), # data2="what"), + pt(tags={'tag1': 'value1', 'tag2': 'v1'}, + value=0, data1=33, data2="bla"), ] self.cli.write_points(pts) - rsp = self.cli.query('SELECT * FROM serie1; SELECT * FROM serie2') - print(rsp) - - # same but with the tags given : - #rsp = self.cli.query('SELECT * FROM serie1, serie2 GROUP BY *') - print(rsp) - - ############################################################################ @@ -738,7 +657,6 @@ def test_write_points_udp(self): self.assertEqual( # this is dummy_points : - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - rsp + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], + list(rsp['cpu_load_short']) ) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 00872f15..7c7b1650 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -16,21 +16,21 @@ def setUp(self): "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64] - ]}, + ]}, {"name": "cpu_load_short", "tags": {"host": "server02", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64] - ]}, + ]}, {"name": "other_serie", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64] - ]}]} + ]}]} ] } self.rs = ResultSet(self.query_response) @@ -59,7 +59,7 @@ def test_filter_by_tags(self): ) def test_keys(self): - self.assertItemsEqual( + self.assertEqual( self.rs.keys(), [ ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), @@ -82,15 +82,18 @@ def test_items(self): items_lists, [ ( - ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), + ('cpu_load_short', + {'host': 'server01', 'region': 'us-west'}), [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( - ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), + ('cpu_load_short', + {'host': 'server02', 'region': 'us-west'}), [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( - ('other_serie', {'host': 'server01', 'region': 'us-west'}), + ('other_serie', + {'host': 'server01', 'region': 'us-west'}), [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ) ] @@ -105,3 +108,33 @@ def test_point_from_cols_vals(self): point, {'col1': 1, 'col2': '2'} ) + + def test_system_query(self): + rs = ResultSet( + {u'results': [ + {u'series': [ + {u'values': [[u'another', u'48h0m0s', 3, False], + [u'default', u'0', 1, False], + [u'somename', u'24h0m0s', 4, True]], + u'columns': [u'name', u'duration', + u'replicaN', u'default']}]} + ] + } + ) + + self.assertEqual( + rs.keys(), + [('results', None)] + ) + + self.assertEqual( + list(rs['results']), + [ + {'duration': u'48h0m0s', u'default': False, u'replicaN': 3, + u'name': u'another'}, + {u'duration': u'0', u'default': False, u'replicaN': 1, + u'name': u'default'}, + {u'duration': u'24h0m0s', u'default': True, u'replicaN': 4, + u'name': u'somename'} + ] + ) From 2402662d6f7e2cc9467d988f2f4d6f389c04c42b Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 8 Apr 2015 16:03:51 -0400 Subject: [PATCH 102/536] SHOW DATABASES now returns a databases serie --- influxdb/client.py | 2 +- tests/influxdb/client_test.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 4438b091..da14887d 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -301,7 +301,7 @@ def get_list_database(self): """ Get the list of databases """ - return list(self.query("SHOW DATABASES")['results']) + return list(self.query("SHOW DATABASES")['databases']) def create_database(self, dbname): """ diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index c9bfbb26..f2cca2f0 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -350,9 +350,10 @@ def test_drop_database_fails(self): def test_get_list_database(self): data = {'results': [ {'series': [ - {'values': [ - ['new_db_1'], - ['new_db_2']], + {'name': 'databases', + 'values': [ + ['new_db_1'], + ['new_db_2']], 'columns': ['name']}]} ]} From f32c8fad186368453bf43f7504d7ac92df944b10 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 8 Apr 2015 23:25:18 +0100 Subject: [PATCH 103/536] Adds kwargs to from_DSN. --- influxdb/client.py | 15 ++++++++++----- influxdb/influxdb08/client.py | 15 ++++++++++----- tests/influxdb/client_test.py | 3 +++ tests/influxdb/influxdb08/client_test.py | 3 +++ 4 files changed, 26 insertions(+), 10 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 6670b93a..05d5a262 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -108,16 +108,19 @@ def __init__(self, 'Accept': 'text/plain'} @staticmethod - def from_DSN(dsn): + def from_DSN(dsn, **kwargs): """ Returns an instance of InfluxDBClient from the provided data source name. :param dsn: data source name :type dsn: string + :param **kwargs: additional parameters for InfluxDBClient. + :type **kwargs: dict + :note: parameters provided in **kwargs may override dsn parameters. :raise ValueError: if the provided DSN has any unexpected value. """ dsn = dsn.lower() - + init_args = {} conn_params = urlparse(dsn) scheme_info = conn_params.scheme.split('+') @@ -126,7 +129,7 @@ def from_DSN(dsn): modifier = None else: modifier, scheme = scheme_info - + if scheme != 'influxdb': raise ValueError('Unknown scheme "{}".'.format(scheme)) if modifier: @@ -136,7 +139,7 @@ def from_DSN(dsn): init_args['ssl'] = True else: raise ValueError('Unknown scheme modifier "{}".'.format(modifier)) - + if conn_params.hostname: init_args['host'] = conn_params.hostname if conn_params.port: @@ -147,7 +150,9 @@ def from_DSN(dsn): init_args['password'] = conn_params.password if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] - + + init_args.update(kwargs) + return InfluxDBClient(**init_args) # diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index c24ce30e..11e20355 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -107,16 +107,19 @@ def __init__(self, 'Accept': 'text/plain'} @staticmethod - def from_DSN(dsn): + def from_DSN(dsn, **kwargs): """ Returns an instance of InfluxDBClient from the provided data source name. :param dsn: data source name :type dsn: string + :param **kwargs: additional parameters for InfluxDBClient. + :type **kwargs: dict + :note: parameters provided in **kwargs may override dsn parameters. :raise ValueError: if the provided DSN has any unexpected value. """ dsn = dsn.lower() - + init_args = {} conn_params = urlparse(dsn) scheme_info = conn_params.scheme.split('+') @@ -125,7 +128,7 @@ def from_DSN(dsn): modifier = None else: modifier, scheme = scheme_info - + if scheme != 'influxdb': raise ValueError('Unknown scheme "{}".'.format(scheme)) if modifier: @@ -135,7 +138,7 @@ def from_DSN(dsn): init_args['ssl'] = True else: raise ValueError('Unknown scheme modifier "{}".'.format(modifier)) - + if conn_params.hostname: init_args['host'] = conn_params.hostname if conn_params.port: @@ -146,7 +149,9 @@ def from_DSN(dsn): init_args['password'] = conn_params.password if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] - + + init_args.update(kwargs) + return InfluxDBClient(**init_args) # Change member variables diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 4388b214..19269658 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -117,6 +117,9 @@ def test_dsn(self): cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') assert cli._baseurl == 'https://host:1886' + cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', **{'ssl': False}) + assert cli._baseurl == 'http://host:1886' + def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 468ba86b..e8992eca 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -102,6 +102,9 @@ def test_dsn(self): cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') assert cli._baseurl == 'https://host:1886' + cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', **{'ssl': False}) + assert cli._baseurl == 'http://host:1886' + def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') From b9430f9ad6369cab8561fef392cfaebae835335b Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 8 Apr 2015 19:00:43 -0400 Subject: [PATCH 104/536] Python3.2 fixes --- influxdb/resultset.py | 6 ++++-- tests/influxdb/client_test.py | 4 ++-- tests/influxdb/client_test_with_server.py | 4 ++-- tests/influxdb/resultset_test.py | 26 +++++++++++------------ 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 1a0cd0b5..9be87020 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -30,8 +30,10 @@ def __getitem__(self, key): else: name = key tags = None - if not isinstance(name, (str, type(None), type(u''))): - raise TypeError('serie_name must be an str or None') + # TODO(aviau): Fix for python 3.2 + # if not isinstance(name, (str, bytes, type(None))) \ + # and not isinstance(name, type("".decode("utf-8"))): + # raise TypeError('serie_name must be an str or None') for serie in self._get_series(): serie_name = serie.get('name', 'results') diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index f2cca2f0..9bb8bec8 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -269,7 +269,7 @@ def test_query(self): self.assertListEqual( list(rs['cpu_load_short']), - [{'value': 0.64, 'time': u'2009-11-10T23:00:00Z'}] + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] ) @unittest.skip('Not implemented for 0.9') @@ -386,7 +386,7 @@ def test_get_list_series(self): self.cli.get_list_series(), [{'name': 'cpu_load_short', 'tags': [ - {'host': 'server01', '_id': 1, 'region': u'us-west'} + {'host': 'server01', '_id': 1, 'region': 'us-west'} ]}] ) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index fc016398..6bdbb4af 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -523,8 +523,8 @@ def test_get_list_series_non_empty(self): self.assertEqual( [ {'name': 'cpu_load_short', - 'tags': [{u'host': u'server01', u'_id': 1, - u'region': u'us-west'}]} + 'tags': [{'host': 'server01', '_id': 1, + 'region': 'us-west'}]} ], rsp ) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 7c7b1650..0f047c8b 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -111,13 +111,13 @@ def test_point_from_cols_vals(self): def test_system_query(self): rs = ResultSet( - {u'results': [ - {u'series': [ - {u'values': [[u'another', u'48h0m0s', 3, False], - [u'default', u'0', 1, False], - [u'somename', u'24h0m0s', 4, True]], - u'columns': [u'name', u'duration', - u'replicaN', u'default']}]} + {'results': [ + {'series': [ + {'values': [['another', '48h0m0s', 3, False], + ['default', '0', 1, False], + ['somename', '24h0m0s', 4, True]], + 'columns': ['name', 'duration', + 'replicaN', 'default']}]} ] } ) @@ -130,11 +130,11 @@ def test_system_query(self): self.assertEqual( list(rs['results']), [ - {'duration': u'48h0m0s', u'default': False, u'replicaN': 3, - u'name': u'another'}, - {u'duration': u'0', u'default': False, u'replicaN': 1, - u'name': u'default'}, - {u'duration': u'24h0m0s', u'default': True, u'replicaN': 4, - u'name': u'somename'} + {'duration': '48h0m0s', 'default': False, 'replicaN': 3, + 'name': 'another'}, + {'duration': '0', 'default': False, 'replicaN': 1, + 'name': 'default'}, + {'duration': '24h0m0s', 'default': True, 'replicaN': 4, + 'name': 'somename'} ] ) From 2130d0f6579392b3d4fd4d2ac90f493659a72892 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Thu, 9 Apr 2015 11:16:11 +0100 Subject: [PATCH 105/536] Fixes for flake8 complains. --- influxdb/client.py | 4 ++-- influxdb/influxdb08/client.py | 4 ++-- tests/influxdb/client_test.py | 7 ++++--- tests/influxdb/influxdb08/client_test.py | 7 ++++--- 4 files changed, 12 insertions(+), 10 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 05d5a262..888426c1 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -110,7 +110,7 @@ def __init__(self, @staticmethod def from_DSN(dsn, **kwargs): """ - Returns an instance of InfluxDBClient from the provided data source + Returns an instance of InfluxDBClient from the provided data source name. :param dsn: data source name :type dsn: string @@ -138,7 +138,7 @@ def from_DSN(dsn, **kwargs): elif modifier == 'https': init_args['ssl'] = True else: - raise ValueError('Unknown scheme modifier "{}".'.format(modifier)) + raise ValueError('Unknown modifier "{}".'.format(modifier)) if conn_params.hostname: init_args['host'] = conn_params.hostname diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 11e20355..03bf0196 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -109,7 +109,7 @@ def __init__(self, @staticmethod def from_DSN(dsn, **kwargs): """ - Returns an instance of InfluxDBClient from the provided data source + Returns an instance of InfluxDBClient from the provided data source name. :param dsn: data source name :type dsn: string @@ -137,7 +137,7 @@ def from_DSN(dsn, **kwargs): elif modifier == 'https': init_args['ssl'] = True else: - raise ValueError('Unknown scheme modifier "{}".'.format(modifier)) + raise ValueError('Unknown modifier "{}".'.format(modifier)) if conn_params.hostname: init_args['host'] = conn_params.hostname diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 19269658..2dcbfa96 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -109,15 +109,16 @@ def test_dsn(self): assert cli._username == 'usr' assert cli._password == 'pwd' assert cli._database == 'db' - assert cli.use_udp == False + assert cli.use_udp is False cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') - assert cli.use_udp == True + assert cli.use_udp is True cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') assert cli._baseurl == 'https://host:1886' - cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', **{'ssl': False}) + cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', + **{'ssl': False}) assert cli._baseurl == 'http://host:1886' def test_switch_database(self): diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index e8992eca..010de673 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -94,15 +94,16 @@ def test_dsn(self): assert cli._username == 'usr' assert cli._password == 'pwd' assert cli._database == 'db' - assert cli.use_udp == False + assert cli.use_udp is False cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') - assert cli.use_udp == True + assert cli.use_udp is True cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') assert cli._baseurl == 'https://host:1886' - cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', **{'ssl': False}) + cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', + **{'ssl': False}) assert cli._baseurl == 'http://host:1886' def test_switch_database(self): From 3ab875aa70b117208746d3b21f76ae038e7cf657 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 9 Apr 2015 09:13:55 -0400 Subject: [PATCH 106/536] Allow top-level tags with write_points --- influxdb/client.py | 10 ++++++++-- tests/influxdb/client_test.py | 24 ++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index da14887d..da1921c9 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -243,6 +243,7 @@ def write_points(self, time_precision=None, database=None, retention_policy=None, + tags=None, ): """ Write to multiple time series names. @@ -258,13 +259,15 @@ def write_points(self, return self._write_points(points=points, time_precision=time_precision, database=database, - retention_policy=retention_policy) + retention_policy=retention_policy, + tags=tags) def _write_points(self, points, time_precision, database, - retention_policy): + retention_policy, + tags): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise ValueError( "Invalid time precision is given. " @@ -285,6 +288,9 @@ def _write_points(self, if retention_policy: data['retentionPolicy'] = retention_policy + if tags: + data['tags'] = tags + data['database'] = database or self._database if self.use_udp: diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 9bb8bec8..5579d212 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -161,6 +161,30 @@ def test_write_points(self): json.loads(m.last_request.body) ) + def test_write_points_toplevel_attributes(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write" + ) + + cli = InfluxDBClient(database='db') + cli.write_points( + self.dummy_points, + database='testdb', + tags={"tag": "hello"}, + retention_policy="somepolicy" + ) + self.assertDictEqual( + { + "database": "testdb", + "tags": {"tag": "hello"}, + "points": self.dummy_points, + "retentionPolicy": "somepolicy" + }, + json.loads(m.last_request.body) + ) + @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') From d61f1d090a52c2f12c9b771b4defe94b3aa3dc24 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 9 Apr 2015 09:51:51 -0400 Subject: [PATCH 107/536] ResultSet: query using only tags --- influxdb/resultset.py | 4 ++++ tests/influxdb/resultset_test.py | 21 +++++++++++++++------ 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 9be87020..69c25370 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -27,9 +27,13 @@ def __getitem__(self, key): tags = key[1] if not isinstance(tags, dict) and tags is not None: raise TypeError('tags should be a dict') + elif isinstance(key, dict): + name = None + tags = key else: name = key tags = None + # TODO(aviau): Fix for python 3.2 # if not isinstance(name, (str, bytes, type(None))) \ # and not isinstance(name, type("".decode("utf-8"))): diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index 0f047c8b..2ba647d8 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -22,14 +22,14 @@ def setUp(self): "region": "us-west"}, "columns": ["time", "value"], "values": [ - ["2015-01-29T21:51:28.968422294Z", 0.64] + ["2015-01-29T21:51:28.968422294Z", 0.65] ]}, {"name": "other_serie", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ - ["2015-01-29T21:51:28.968422294Z", 0.64] + ["2015-01-29T21:51:28.968422294Z", 0.66] ]}]} ] } @@ -40,11 +40,20 @@ def test_filter_by_name(self): list(self.rs['cpu_load_short']), [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, - {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'} + {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} ] ) def test_filter_by_tags(self): + self.assertEqual( + list(self.rs[{"host": "server01"}]), + [ + {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, + {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} + ] + ) + + def test_filter_by_name_and_tags(self): self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] @@ -54,7 +63,7 @@ def test_filter_by_tags(self): list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, - {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'} + {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} ] ) @@ -89,12 +98,12 @@ def test_items(self): ( ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), - [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] + [{'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('other_serie', {'host': 'server01', 'region': 'us-west'}), - [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] + [{'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z'}] ) ] ) From 3e0858c4a6c895c203a2632efa04ba355e5efc1d Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 9 Apr 2015 10:30:41 -0400 Subject: [PATCH 108/536] Documented ResultSet --- docs/source/api-documentation.rst | 9 ++++++++ docs/source/index.rst | 1 + docs/source/resultset.rst | 36 +++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+) create mode 100644 docs/source/resultset.rst diff --git a/docs/source/api-documentation.rst b/docs/source/api-documentation.rst index 92a023c0..8a84e1d9 100644 --- a/docs/source/api-documentation.rst +++ b/docs/source/api-documentation.rst @@ -64,3 +64,12 @@ These clients are initiated in the same way as the .. autoclass:: influxdb.SeriesHelper :members: :undoc-members: + +----------------------- +:class:`ResultSet` +----------------------- + +.. currentmodule:: influxdb.ResultSet +.. autoclass:: influxdb.resultset.ResultSet + :members: + :undoc-members: diff --git a/docs/source/index.rst b/docs/source/index.rst index 077f681b..a110ad2a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -19,6 +19,7 @@ Contents include-readme api-documentation + resultset examples diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst new file mode 100644 index 00000000..b5cac4cf --- /dev/null +++ b/docs/source/resultset.rst @@ -0,0 +1,36 @@ +================================ +Query response object: ResultSet +================================ + +Using the ``InfluxDBClient.query()`` function will return a ``ResultSet`` Object. + +A ResultSet behaves like a dict. Its keys are series and values are points. However, it is a little bit smarter than a regular dict. Its ``__getitem__`` method can be used to query the ResultSet in several ways. + +Filtering by serie name +----------------------- + +Using ``rs['cpu']`` will return a generator for all the points that are in a serie named ``cpu``, no matter the tags. +:: + + rs = cli.query("SELECT * from cpu") + cpu_points = list(rs['cpu']) + +Filtering by tags +----------------- + +Using ``rs[{'host_name': 'influxdb.com'}]`` will return a generator for all the points that are tagged with the specified tags, no matter the serie name. +:: + + rs = cli.query("SELECT * from cpu") + cpu_influxdb_com_points = list(rs[{"host_name": "influxdb.com"}]) + +Filtering by serie name and tags +-------------------------------- + +Using a tuple with a serie name and a dict will return a generator for all the points that are in a serie with the given name AND whose tags match the given tags. +:: + + rs = cli.query("SELECT * from cpu") + points = list(rs[('cpu', {'host_name': 'influxdb.com'})]) + +See the :ref:`api-documentation` page for more information. From dc84223c0178f58402174c881559d929bcbd0b0e Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Thu, 9 Apr 2015 16:07:39 +0100 Subject: [PATCH 109/536] Adds documentation to from_DSN for v0.8 and v0.9 clients. --- influxdb/client.py | 18 +++++++++++++++++- influxdb/influxdb08/client.py | 18 +++++++++++++++++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 888426c1..59e4c30c 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -111,12 +111,28 @@ def __init__(self, def from_DSN(dsn, **kwargs): """ Returns an instance of InfluxDBClient from the provided data source - name. + name. Supported schemes are "influxdb", "https+influxdb", + "udp+influxdb". Parameters for the InfluxDBClient constructor may be + also be passed to this function. + + Examples: + >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ + ... localhost:8086/databasename', timeout=5) + >>> type(cli) + + >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ + ... localhost:8086/databasename', timeout=5, udp_port=159) + >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) + http://localhost:8086 - True 159 + :param dsn: data source name :type dsn: string :param **kwargs: additional parameters for InfluxDBClient. :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. + :note: when using "udp+influxdb" the specified port (if any) will be + used for the TCP connection; specify the udp port with the additional + udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ dsn = dsn.lower() diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 03bf0196..a4c73c17 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -110,12 +110,28 @@ def __init__(self, def from_DSN(dsn, **kwargs): """ Returns an instance of InfluxDBClient from the provided data source - name. + name. Supported schemes are "influxdb", "https+influxdb", + "udp+influxdb". Parameters for the InfluxDBClient constructor may be + also be passed to this function. + + Examples: + >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ + ... localhost:8086/databasename', timeout=5) + >>> type(cli) + + >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ + ... localhost:8086/databasename', timeout=5, udp_port=159) + >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) + http://localhost:8086 - True 159 + :param dsn: data source name :type dsn: string :param **kwargs: additional parameters for InfluxDBClient. :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. + :note: when using "udp+influxdb" the specified port (if any) will be + used for the TCP connection; specify the udp port with the additional + udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ dsn = dsn.lower() From 0e4bf34add039d3d261b99ac9d7d09f476c900e1 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 9 Apr 2015 11:50:44 -0400 Subject: [PATCH 110/536] from_dns: docstring fixes for sphinx --- influxdb/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 35030a71..33aeceef 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -117,12 +117,12 @@ def from_DSN(dsn, **kwargs): also be passed to this function. Examples: - >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ - ... localhost:8086/databasename', timeout=5) + >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@ + localhost:8086/databasename', timeout=5) >>> type(cli) >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ - ... localhost:8086/databasename', timeout=5, udp_port=159) + localhost:8086/databasename', timeout=5, udp_port=159) >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 From 54eb8bcfa486ae13463823588d84e99424b4745a Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 9 Apr 2015 12:08:59 -0400 Subject: [PATCH 111/536] from_DSN: Improved docstring for sphinx --- influxdb/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 33aeceef..48adcf9b 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -117,12 +117,12 @@ def from_DSN(dsn, **kwargs): also be passed to this function. Examples: - >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@ - localhost:8086/databasename', timeout=5) + >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ +localhost:8086/databasename', timeout=5) >>> type(cli) >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ - localhost:8086/databasename', timeout=5, udp_port=159) +localhost:8086/databasename', timeout=5, udp_port=159) >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 From 3772a711d907cea1dec99a64cfeda45a812a07b7 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 9 Apr 2015 12:23:07 -0400 Subject: [PATCH 112/536] More improvements to from_DSN docstring --- influxdb/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 48adcf9b..fe48fc3c 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -132,8 +132,8 @@ def from_DSN(dsn, **kwargs): :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. :note: when using "udp+influxdb" the specified port (if any) will be - used for the TCP connection; specify the udp port with the additional - udp_port parameter (cf. examples). + used for the TCP connection; specify the udp port with the + additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ dsn = dsn.lower() From 05104ff54cd64b4d3f3172313d4cbf755b23ec2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Thu, 9 Apr 2015 16:13:37 -0400 Subject: [PATCH 113/536] Correctly check that the given serie name has the correct type type(b''.decode()) should give unicode or str depending if on python<3 or python>=3. --- influxdb/resultset.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 69c25370..94aac663 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -11,10 +11,13 @@ def __init__(self, series): def __getitem__(self, key): """ - :param key: Either a serie name or a 2-tuple(serie_name, tags_dict) - If the given serie name is None then any serie (matching - the eventual given tags) will be given its points one - after the other. + :param key: Either a serie name, or a tags_dict, or + a 2-tuple(serie_name, tags_dict). + If the serie name is None (or not given) then any serie + matching the eventual given tags will be given its points + one after the other. + To get the points of every serie in this resultset then + you have to provide None as key. :return: A generator yielding `Point`s matching the given key. NB: The order in which the points are yielded is actually undefined but @@ -34,10 +37,8 @@ def __getitem__(self, key): name = key tags = None - # TODO(aviau): Fix for python 3.2 - # if not isinstance(name, (str, bytes, type(None))) \ - # and not isinstance(name, type("".decode("utf-8"))): - # raise TypeError('serie_name must be an str or None') + if not isinstance(name, (bytes, type(b''.decode()), type(None))): + raise TypeError('serie_name must be an str or None') for serie in self._get_series(): serie_name = serie.get('name', 'results') From e44f74ee029d6510941f8a310b7683becc973804 Mon Sep 17 00:00:00 2001 From: Can ZHANG Date: Thu, 2 Apr 2015 11:45:40 +0800 Subject: [PATCH 114/536] Add client `InfluxDBClusterClient` to handle a cluster of InfluxDB servers Signed-off-by: Can ZHANG --- influxdb/__init__.py | 2 + influxdb/client.py | 88 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 77c3f582..034e1d7a 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- from .client import InfluxDBClient +from .client import InfluxDBClusterClient from .dataframe_client import DataFrameClient from .helper import SeriesHelper __all__ = [ 'InfluxDBClient', + 'InfluxDBClusterClient', 'DataFrameClient', 'SeriesHelper', ] diff --git a/influxdb/client.py b/influxdb/client.py index fe48fc3c..be53e598 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -5,6 +5,7 @@ from collections import OrderedDict import json import socket +import random import requests import requests.exceptions from sys import version_info @@ -33,6 +34,12 @@ def __init__(self, content, code): self.code = code +class InfluxDBServerError(Exception): + """Raised when server error occurs""" + def __init__(self, content): + super(InfluxDBServerError, self).__init__(content) + + class InfluxDBClient(object): """ @@ -451,3 +458,84 @@ def send_packet(self, packet): data = json.dumps(packet) byte = data.encode('utf-8') self.udp_socket.sendto(byte, (self._host, self.udp_port)) + + +class InfluxDBClusterClient(object): + """ + The ``InfluxDBClusterClient`` is the client for connecting to a cluster of + InfluxDB Servers. It basically is a proxy to multiple ``InfluxDBClient``s. + + :param hosts: A list of hosts, where a host should be in format + (address, port) + e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)] + """ + + def __init__(self, + hosts=[('localhost', 8086)], + username='root', + password='root', + database=None, + ssl=False, + verify_ssl=False, + timeout=None, + use_udp=False, + udp_port=4444, + shuffle=True, + client_base_class=InfluxDBClient, + ): + self.clients = [] + self.bad_clients = [] # Corresponding server has failures in history + self.shuffle = shuffle # if true, queries will hit servers evenly + self.client_base_class = client_base_class # For simpler test code + for h in hosts: + self.clients.append(client_base_class(host=h[0], port=h[1], + username=username, + password=password, + database=database, + ssl=ssl, + verify_ssl=verify_ssl, + timeout=timeout, + use_udp=use_udp, + udp_port=udp_port)) + for method in dir(client_base_class): + if method.startswith('_'): + continue + if not callable(getattr(client_base_class, method)): + continue + setattr(self, method, self._make_func(method)) + + def _make_func(self, func_name): + orig_func = getattr(self.client_base_class, func_name) + + def func(*args, **kwargs): + if self.shuffle: + random.shuffle(self.clients) + clients = self.clients + self.bad_clients + for c in clients: + bad_client = False + try: + return orig_func(c, *args, **kwargs) + except InfluxDBClientError as e: + # Errors caused by user's requests, re-raise + raise e + except Exception as e: + # Errors that might caused by server failure, try another + bad_client = True + finally: + if bad_client: + if c not in self.bad_clients: + self.bad_clients.append(c) + for idx, val in enumerate(self.clients): + if val == c: + del self.clients[idx] + break + else: + if c not in self.clients: + self.clients.append(c) + for idx, val in enumerate(self.bad_clients): + if val == c: + del self.bad_clients[idx] + break + raise InfluxDBServerError("InfluxDB: no viable server!") + + return func From e213ecd57a903fc0cb144c57f26827648d84f914 Mon Sep 17 00:00:00 2001 From: Can ZHANG Date: Fri, 10 Apr 2015 17:27:19 +0800 Subject: [PATCH 115/536] tests for InfluxDBClusterClient Signed-off-by: Can ZHANG --- tests/influxdb/client_test.py | 97 ++++++++++++++++++++++++++++++++++- 1 file changed, 96 insertions(+), 1 deletion(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 2bf8eb19..81ec6784 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -26,7 +26,8 @@ import warnings import mock -from influxdb import InfluxDBClient +from influxdb import InfluxDBClient, InfluxDBClusterClient +from influxdb.client import InfluxDBServerError def _build_response_object(status_code=200, content=""): @@ -534,3 +535,97 @@ def connection_error(self, *args, **kwargs): with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points) + + +class FakeClient(InfluxDBClient): + fail = False + + def query(self, + query, + params={}, + expected_response_code=200, + database=None): + if query == 'Fail': + raise Exception("Fail") + + if self.fail: + raise Exception("Fail") + else: + return "Success" + + +class TestInfluxDBClusterClient(unittest.TestCase): + + def setUp(self): + # By default, raise exceptions on warnings + warnings.simplefilter('error', FutureWarning) + + self.hosts = [('host1', 8086), ('host2', 8086), ('host3', 8086)] + + def test_init(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + username='username', + password='password', + database='database', + shuffle=False, + client_base_class=FakeClient) + assert len(cluster.clients) == 3 + assert len(cluster.bad_clients) == 0 + for idx, client in enumerate(cluster.clients): + assert client._host == self.hosts[idx][0] + assert client._port == self.hosts[idx][1] + + def test_one_server_fails(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + database='database', + shuffle=False, + client_base_class=FakeClient) + cluster.clients[0].fail = True + assert cluster.query('') == 'Success' + assert len(cluster.clients) == 2 + assert len(cluster.bad_clients) == 1 + + def test_two_servers_fail(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + database='database', + shuffle=False, + client_base_class=FakeClient) + cluster.clients[0].fail = True + cluster.clients[1].fail = True + assert cluster.query('') == 'Success' + assert len(cluster.clients) == 1 + assert len(cluster.bad_clients) == 2 + + def test_all_fail(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + database='database', + shuffle=True, + client_base_class=FakeClient) + try: + cluster.query('Fail') + except InfluxDBServerError: + pass + assert len(cluster.clients) == 0 + assert len(cluster.bad_clients) == 3 + + def test_all_good(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + database='database', + shuffle=True, + client_base_class=FakeClient) + assert cluster.query('') == 'Success' + assert len(cluster.clients) == 3 + assert len(cluster.bad_clients) == 0 + + def test_recovery(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + database='database', + shuffle=True, + client_base_class=FakeClient) + try: + cluster.query('Fail') + except InfluxDBServerError: + pass + assert cluster.query('') == 'Success' + assert len(cluster.clients) == 1 + assert len(cluster.bad_clients) == 2 From 91eb6d47207bf01aaa7bde279adaf0e7df7b37d5 Mon Sep 17 00:00:00 2001 From: Can ZHANG Date: Fri, 10 Apr 2015 18:33:59 +0800 Subject: [PATCH 116/536] docs for InfluxDBClusterClient Signed-off-by: Can ZHANG --- README.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.rst b/README.rst index 85d496ac..32840c5a 100644 --- a/README.rst +++ b/README.rst @@ -126,6 +126,20 @@ Here's a basic example (for more see the examples directory):: >>> print("Result: {0}".format(result)) +If you want to connect to a cluster, you could initialize a ``InfluxDBClusterClient``:: + + $ python + + >>> from influxdb import InfluxDBClusterClient + + >>> cc = InfluxDBClusterClient(hosts = [('192.168.0.1', 8086), + ('192.168.0.2', 8086), + ('192.168.0.3', 8086)], + username='root', + password='root', + database='example') + +``InfluxDBClusterClient`` has the same methods as ``InfluxDBClient``, it basically is a proxy to multiple InfluxDBClients. Testing ======= From ca486fcb84c51c368178395f45d4423b808be0d4 Mon Sep 17 00:00:00 2001 From: aviau Date: Sat, 11 Apr 2015 12:01:08 -0400 Subject: [PATCH 117/536] Various documentation improvements --- docs/source/api-documentation.rst | 2 ++ docs/source/resultset.rst | 3 +++ influxdb/client.py | 4 ++-- influxdb/resultset.py | 12 ++++++++++++ 4 files changed, 19 insertions(+), 2 deletions(-) diff --git a/docs/source/api-documentation.rst b/docs/source/api-documentation.rst index 8a84e1d9..5449cef5 100644 --- a/docs/source/api-documentation.rst +++ b/docs/source/api-documentation.rst @@ -69,6 +69,8 @@ These clients are initiated in the same way as the :class:`ResultSet` ----------------------- +See the :ref:`resultset` page for more information. + .. currentmodule:: influxdb.ResultSet .. autoclass:: influxdb.resultset.ResultSet :members: diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst index b5cac4cf..48ddf709 100644 --- a/docs/source/resultset.rst +++ b/docs/source/resultset.rst @@ -1,3 +1,6 @@ + +.. _resultset: + ================================ Query response object: ResultSet ================================ diff --git a/influxdb/client.py b/influxdb/client.py index fe48fc3c..5191a571 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -321,9 +321,9 @@ def write_points(self, :param points: A list of dicts. :param time_precision: [Optional, default None] Either 's', 'm', 'ms' or 'u'. - :param database The database to write the points to. Defaults to + :param database: The database to write the points to. Defaults to the client's current db. - :param retention_policy The retention policy for the points. + :param retention_policy: The retention policy for the points. """ # TODO: re-implement chunks. return self._write_points(points=points, diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 69c25370..40ac4af2 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -98,6 +98,9 @@ def __len__(self): return len(self.keys()) def keys(self): + """ + :return: List of keys. Keys are tuples (serie_name, tags) + """ keys = [] for serie in self._get_series(): keys.append( @@ -106,6 +109,9 @@ def keys(self): return keys def items(self): + """ + :return: List of tuples, (key, generator) + """ items = [] for serie in self._get_series(): serie_key = (serie.get('name', 'results'), serie.get('tags', None)) @@ -116,6 +122,12 @@ def items(self): @staticmethod def point_from_cols_vals(cols, vals): + """ Creates a dict from columns and values lists + + :param cols: List of columns + :param vals: List of values + :return: Dict where keys are columns. + """ point = {} for col_index, col_name in enumerate(cols): point[col_name] = vals[col_index] From 7cc5cc79ead60baaf17a20195c35e2df275f8123 Mon Sep 17 00:00:00 2001 From: aviau Date: Sat, 11 Apr 2015 15:08:14 -0400 Subject: [PATCH 118/536] README: Fixed query in example --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 85d496ac..158d7c5d 100644 --- a/README.rst +++ b/README.rst @@ -122,7 +122,7 @@ Here's a basic example (for more see the examples directory):: >>> client.write_points(json_body) - >>> result = client.query('select column_one from foo;') + >>> result = client.query('select value from cpu_load_short;') >>> print("Result: {0}".format(result)) From 59ca6a69904bd52c4ea6775dc3be3e67946e6c0c Mon Sep 17 00:00:00 2001 From: Can ZHANG Date: Tue, 14 Apr 2015 13:50:46 +0800 Subject: [PATCH 119/536] Improvements based on code review. --- influxdb/client.py | 34 +++++++--------- tests/influxdb/client_test.py | 76 ++++++++++++++++------------------- 2 files changed, 49 insertions(+), 61 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index be53e598..e47bd326 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -3,6 +3,7 @@ Python client for InfluxDB """ from collections import OrderedDict +from functools import wraps import json import socket import random @@ -481,12 +482,11 @@ def __init__(self, use_udp=False, udp_port=4444, shuffle=True, - client_base_class=InfluxDBClient, + client_base_class=InfluxDBClient, # For simpler test code ): self.clients = [] self.bad_clients = [] # Corresponding server has failures in history self.shuffle = shuffle # if true, queries will hit servers evenly - self.client_base_class = client_base_class # For simpler test code for h in hosts: self.clients.append(client_base_class(host=h[0], port=h[1], username=username, @@ -500,13 +500,14 @@ def __init__(self, for method in dir(client_base_class): if method.startswith('_'): continue - if not callable(getattr(client_base_class, method)): + orig_func = getattr(client_base_class, method) + if not callable(orig_func): continue - setattr(self, method, self._make_func(method)) + setattr(self, method, self._make_func(orig_func)) - def _make_func(self, func_name): - orig_func = getattr(self.client_base_class, func_name) + def _make_func(self, orig_func): + @wraps(orig_func) def func(*args, **kwargs): if self.shuffle: random.shuffle(self.clients) @@ -521,21 +522,14 @@ def func(*args, **kwargs): except Exception as e: # Errors that might caused by server failure, try another bad_client = True + if c in self.clients: + self.clients.remove(c) + self.bad_clients.append(c) finally: - if bad_client: - if c not in self.bad_clients: - self.bad_clients.append(c) - for idx, val in enumerate(self.clients): - if val == c: - del self.clients[idx] - break - else: - if c not in self.clients: - self.clients.append(c) - for idx, val in enumerate(self.bad_clients): - if val == c: - del self.bad_clients[idx] - break + if not bad_client and c in self.bad_clients: + self.bad_clients.remove(c) + self.clients.append(c) + raise InfluxDBServerError("InfluxDB: no viable server!") return func diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 81ec6784..a55c65ee 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -97,41 +97,41 @@ def setUp(self): def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') - assert cli._baseurl == 'http://host:8086' + self.assertEqual('http://host:8086', cli._baseurl) cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) - assert cli._baseurl == 'https://host:8086' + self.assertEqual('https://host:8086', cli._baseurl) def test_dsn(self): cli = InfluxDBClient.from_DSN('influxdb://usr:pwd@host:1886/db') - assert cli._baseurl == 'http://host:1886' - assert cli._username == 'usr' - assert cli._password == 'pwd' - assert cli._database == 'db' - assert cli.use_udp is False + self.assertEqual('http://host:1886', cli._baseurl) + self.assertEqual('usr', cli._username) + self.assertEqual('pwd', cli._password) + self.assertEqual('db', cli._database) + self.assertFalse(cli.use_udp) cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') - assert cli.use_udp is True + self.assertTrue(cli.use_udp) cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') - assert cli._baseurl == 'https://host:1886' + self.assertEqual('https://host:1886', cli._baseurl) cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', **{'ssl': False}) - assert cli._baseurl == 'http://host:1886' + self.assertEqual('http://host:1886', cli._baseurl) def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') - assert cli._database == 'another_database' + self.assertEqual('another_database', cli._database) def test_switch_user(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') - assert cli._username == 'another_username' - assert cli._password == 'another_password' + self.assertEqual('another_username', cli._username) + self.assertEqual('another_password', cli._password) def test_write(self): with requests_mock.Mocker() as m: @@ -208,10 +208,8 @@ def test_write_points_toplevel_attributes(self): def test_write_points_batch(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'post', 200, self.dummy_points): - assert cli.write_points( - data=self.dummy_points, - batch_size=2 - ) is True + self.assertTrue(cli.write_points(data=self.dummy_points, + batch_size=2)) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -569,11 +567,11 @@ def test_init(self): database='database', shuffle=False, client_base_class=FakeClient) - assert len(cluster.clients) == 3 - assert len(cluster.bad_clients) == 0 + self.assertEqual(3, len(cluster.clients)) + self.assertEqual(0, len(cluster.bad_clients)) for idx, client in enumerate(cluster.clients): - assert client._host == self.hosts[idx][0] - assert client._port == self.hosts[idx][1] + self.assertEqual(self.hosts[idx][0], client._host) + self.assertEqual(self.hosts[idx][1], client._port) def test_one_server_fails(self): cluster = InfluxDBClusterClient(hosts=self.hosts, @@ -581,9 +579,9 @@ def test_one_server_fails(self): shuffle=False, client_base_class=FakeClient) cluster.clients[0].fail = True - assert cluster.query('') == 'Success' - assert len(cluster.clients) == 2 - assert len(cluster.bad_clients) == 1 + self.assertEqual('Success', cluster.query('')) + self.assertEqual(2, len(cluster.clients)) + self.assertEqual(1, len(cluster.bad_clients)) def test_two_servers_fail(self): cluster = InfluxDBClusterClient(hosts=self.hosts, @@ -592,40 +590,36 @@ def test_two_servers_fail(self): client_base_class=FakeClient) cluster.clients[0].fail = True cluster.clients[1].fail = True - assert cluster.query('') == 'Success' - assert len(cluster.clients) == 1 - assert len(cluster.bad_clients) == 2 + self.assertEqual('Success', cluster.query('')) + self.assertEqual(1, len(cluster.clients)) + self.assertEqual(2, len(cluster.bad_clients)) def test_all_fail(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, client_base_class=FakeClient) - try: + with self.assertRaises(InfluxDBServerError): cluster.query('Fail') - except InfluxDBServerError: - pass - assert len(cluster.clients) == 0 - assert len(cluster.bad_clients) == 3 + self.assertEqual(0, len(cluster.clients)) + self.assertEqual(3, len(cluster.bad_clients)) def test_all_good(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, client_base_class=FakeClient) - assert cluster.query('') == 'Success' - assert len(cluster.clients) == 3 - assert len(cluster.bad_clients) == 0 + self.assertEqual('Success', cluster.query('')) + self.assertEqual(3, len(cluster.clients)) + self.assertEqual(0, len(cluster.bad_clients)) def test_recovery(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, client_base_class=FakeClient) - try: + with self.assertRaises(InfluxDBServerError): cluster.query('Fail') - except InfluxDBServerError: - pass - assert cluster.query('') == 'Success' - assert len(cluster.clients) == 1 - assert len(cluster.bad_clients) == 2 + self.assertEqual('Success', cluster.query('')) + self.assertEqual(1, len(cluster.clients)) + self.assertEqual(2, len(cluster.bad_clients)) From 9777010bb88064102081e21df821e19e075404c8 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 16 Apr 2015 09:30:20 -0400 Subject: [PATCH 120/536] ResultSet: Added __repr__ --- influxdb/resultset.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index f46cf56f..fa2e7ee3 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -65,7 +65,12 @@ def __getitem__(self, key): ) def __repr__(self): - return str(self.raw) + items = [] + + for item in self.items(): + items.append("'%s': %s" % (item[0], list(item[1]))) + + return "ResultSet({%s})" % ", ".join(items) def __iter__(self): """ Iterating a ResultSet will yield one dict instance per serie result. From 818699b50d8b056a67b992b2cafefccf4e5df477 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 17 Apr 2015 17:29:00 -0400 Subject: [PATCH 121/536] Released 2.0.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 77c3f582..fe829feb 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '1.0.2' +__version__ = '2.0.0' From aaee3fe3684189dbcf4d55c55e0ee4bdfb2fb5e3 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 17 Apr 2015 17:50:38 -0400 Subject: [PATCH 122/536] Documented ResultSet.raw --- influxdb/resultset.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index fa2e7ee3..b084a3ae 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -9,6 +9,15 @@ class ResultSet(object): def __init__(self, series): self.raw = series + @property + def raw(self): + """Raw JSON from InfluxDB""" + return self.raw + + @raw.setter + def raw(self, value): + self.raw = value + def __getitem__(self, key): """ :param key: Either a serie name, or a tags_dict, or From ec4fdcf446e80c0f8a7d8c0f18c9c4e261a592e2 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 17 Apr 2015 20:10:51 -0400 Subject: [PATCH 123/536] Fixed recursion issue with raw attribute --- influxdb/__init__.py | 2 +- influxdb/resultset.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index fe829feb..edb024b1 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '2.0.0' +__version__ = '2.0.1' diff --git a/influxdb/resultset.py b/influxdb/resultset.py index b084a3ae..1c7bdc6e 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -7,16 +7,16 @@ class ResultSet(object): """A wrapper around series results """ def __init__(self, series): - self.raw = series + self._raw = series @property def raw(self): """Raw JSON from InfluxDB""" - return self.raw + return self._raw @raw.setter def raw(self, value): - self.raw = value + self._raw = value def __getitem__(self, key): """ From 43550b39bb0aabb559f9bf53ad02ec383dce6a96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 13 Apr 2015 13:53:36 -0400 Subject: [PATCH 124/536] Revert "Revert "Sync tests against last master update"" This reverts commit d3bb67d4793435274a6096169835715179cbe8d4. --- tests/influxdb/client_test_with_server.py | 63 +++++++------ tests/influxdb/influxdb.conf.template | 59 +++++++------ tests/influxdb/influxdb.udp_conf.template | 103 ---------------------- 3 files changed, 66 insertions(+), 159 deletions(-) delete mode 100644 tests/influxdb/influxdb.udp_conf.template diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 6bdbb4af..24fc5181 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -130,7 +130,7 @@ class InfluxDbInstance(object): in a temporary place, using a config file template. ''' - def __init__(self, conf_template): + def __init__(self, conf_template, udp_enabled=False): # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() @@ -139,28 +139,28 @@ def __init__(self, conf_template): tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( dir=self.temp_dir_base) # we need some "free" ports : - self.broker_port = get_free_port() - self.admin_port = get_free_port() - self.udp_port = get_free_port() - self.snapshot_port = get_free_port() - - self.logs_file = os.path.join(self.temp_dir_base, 'logs.txt') - - with open(conf_template) as fh: - conf = fh.read().format( - broker_port=self.broker_port, - admin_port=self.admin_port, - udp_port=self.udp_port, - broker_raft_dir=os.path.join(tempdir, 'raft'), - broker_node_dir=os.path.join(tempdir, 'db'), - cluster_dir=os.path.join(tempdir, 'state'), - logfile=self.logs_file, - snapshot_port=self.snapshot_port, - ) + + ports = dict( + broker_port=get_free_port(), + webui_port=get_free_port(), + admin_port=get_free_port(), + udp_port=get_free_port() if udp_enabled else -1, + ) + + conf_data = dict( + broker_raft_dir=os.path.join(tempdir, 'raft'), + broker_node_dir=os.path.join(tempdir, 'db'), + cluster_dir=os.path.join(tempdir, 'state'), + logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), + udp_enabled='true' if udp_enabled else 'false', + ) + conf_data.update(ports) + self.__dict__.update(conf_data) conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') with open(conf_file, "w") as fh: - fh.write(conf) + with open(conf_template) as fh_template: + fh.write(fh_template.read().format(**conf_data)) # now start the server instance: proc = self.proc = subprocess.Popen( @@ -179,8 +179,13 @@ def __init__(self, conf_template): # or you run a 286 @ 1Mhz ? try: while time.time() < timeout: - if (is_port_open(self.broker_port) + if (is_port_open(self.webui_port) and is_port_open(self.admin_port)): + # it's hard to check if a UDP port is open.. + if udp_enabled: + # so let's just sleep 0.5 sec in this case + # to be sure that the server has open the port + time.sleep(0.5) break time.sleep(0.5) if proc.poll() is not None: @@ -189,13 +194,13 @@ def __init__(self, conf_template): proc.terminate() proc.wait() raise RuntimeError('Timeout waiting for influxdb to listen' - ' on its broker port') + ' on its ports (%s)' % ports) except RuntimeError as err: data = self.get_logs_and_output() data['reason'] = str(err) data['now'] = datetime.datetime.now() raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" - "stdout=%(out)r\nstderr=%(err)r\nlogs=%(logs)r" + "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r" % data) def get_logs_and_output(self): @@ -222,9 +227,11 @@ def close(self, remove_tree=True): def _setup_influxdb_server(inst): - inst.influxd_inst = InfluxDbInstance(inst.influxdb_template_conf) + inst.influxd_inst = InfluxDbInstance( + inst.influxdb_template_conf, + udp_enabled=getattr(inst, 'influxdb_udp_enabled', False)) inst.cli = InfluxDBClient('localhost', - inst.influxd_inst.broker_port, + inst.influxd_inst.webui_port, 'root', '', database='db') @@ -637,12 +644,14 @@ def test_query_multiple_series(self): class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): + influxdb_udp_enabled = True + influxdb_template_conf = os.path.join(THIS_DIR, - 'influxdb.udp_conf.template') + 'influxdb.conf.template') def test_write_points_udp(self): cli = InfluxDBClient( - 'localhost', self.influxd_inst.broker_port, + 'localhost', self.influxd_inst.webui_port, 'dont', 'care', database='db', use_udp=True, udp_port=self.influxd_inst.udp_port diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template index 82608f73..dde1a4ae 100644 --- a/tests/influxdb/influxdb.conf.template +++ b/tests/influxdb/influxdb.conf.template @@ -5,6 +5,7 @@ # that can be resolved here. # hostname = "" bind-address = "0.0.0.0" +port = {webui_port} # Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com # The data includes raft id (random 8 bytes), os, arch and version @@ -14,11 +15,6 @@ bind-address = "0.0.0.0" # Change this option to true to disable reporting. reporting-disabled = false -# Controls settings for initial start-up. Once a node a successfully started, -# these settings are ignored. -[initialization] -join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. - # Control authentication # If not set authetication is DISABLED. Be sure to explicitly set this flag to # true if you want authentication. @@ -53,52 +49,57 @@ enabled = false #database = "collectd_database" #typesdb = "types.db" +# Configure the OpenTSDB input. +[opentsdb] +enabled = false +#address = "0.0.0.0" # If not set, is actually set to bind-address. +#port = 4242 +#database = "opentsdb_database" + # Configure UDP listener for series data. [udp] -enabled = false +enabled = {udp_enabled} #bind-address = "0.0.0.0" #port = 4444 +port = {udp_port} # Broker configuration. Brokers are nodes which participate in distributed # consensus. [broker] +enabled = true # Where the Raft logs are stored. The user running InfluxDB will need read/write access. +#dir = "/var/opt/influxdb/raft" dir = "{broker_raft_dir}" port = {broker_port} # Data node configuration. Data nodes are where the time-series data, in the form of # shards, is stored. [data] - dir = "{broker_node_dir}" - port = {broker_port} +enabled = true +#dir = "/var/opt/influxdb/db" +dir = "{broker_node_dir}" - # Auto-create a retention policy when a database is created. Defaults to true. - retention-auto-create = true +# Auto-create a retention policy when a database is created. Defaults to true. +retention-auto-create = true - # Control whether retention policies are enforced and how long the system waits between - # enforcing those policies. - retention-check-enabled = true - retention-check-period = "10m" +# Control whether retention policies are enforced and how long the system waits between +# enforcing those policies. +retention-check-enabled = true +retention-check-period = "10m" -[cluster] -# Location for cluster state storage. For storing state persistently across restarts. -dir = "{cluster_dir}" +# Configuration for snapshot endpoint. +[snapshot] +enabled = false # Enabled by default if not set. +bind-address = "127.0.0.1" +port = 8087 [logging] -file = "{logfile}" # Leave blank to redirect logs to stderr. write-tracing = false # If true, enables detailed logging of the write system. raft-tracing = false # If true, enables detailed logging of Raft consensus. -# InfluxDB can store statistics about itself. This is useful for monitoring purposes. -# This feature is disabled by default, but if enabled, these statistics can be queried -# as any other data. -[statistics] +# InfluxDB can store statistical and diagnostic information about itself. This is useful for +# monitoring purposes. This feature is disabled by default, but if enabled, these data can be +# queried like any other data. +[monitoring] enabled = false -database = "internal" # The database to which the data is written. -retention-policy = "default" # The retention policy within the database. write-interval = "1m" # Period between writing the data. - - -[snapshot] -bind-address = "127.0.0.1" -port = {snapshot_port} diff --git a/tests/influxdb/influxdb.udp_conf.template b/tests/influxdb/influxdb.udp_conf.template deleted file mode 100644 index 4134172f..00000000 --- a/tests/influxdb/influxdb.udp_conf.template +++ /dev/null @@ -1,103 +0,0 @@ -# Welcome to the InfluxDB configuration file. - -# If hostname (on the OS) doesn't return a name that can be resolved by the other -# systems in the cluster, you'll have to set the hostname to an IP or something -# that can be resolved here. -# hostname = "" -bind-address = "0.0.0.0" - -# Once every 24 hours InfluxDB will report anonymous data to m.influxdb.com -# The data includes raft id (random 8 bytes), os, arch and version -# We don't track ip addresses of servers reporting. This is only used -# to track the number of instances running and the versions, which -# is very helpful for us. -# Change this option to true to disable reporting. -reporting-disabled = false - -# Controls settings for initial start-up. Once a node a successfully started, -# these settings are ignored. -[initialization] -join-urls = "" # Comma-delimited URLs, in the form http://host:port, for joining another cluster. - -# Control authentication -# If not set authetication is DISABLED. Be sure to explicitly set this flag to -# true if you want authentication. -[authentication] -enabled = false - -# Configure the admin server -[admin] -enabled = true -port = {admin_port} - -# Configure the HTTP API endpoint. All time-series data and queries uses this endpoint. -[api] -# ssl-port = 8087 # SSL support is enabled if you set a port and cert -# ssl-cert = "/path/to/cert.pem" - -# Configure the Graphite plugins. -[[graphite]] # 1 or more of these sections may be present. -enabled = false -# protocol = "" # Set to "tcp" or "udp" -# address = "0.0.0.0" # If not set, is actually set to bind-address. -# port = 2003 -# name-position = "last" -# name-separator = "-" -# database = "" # store graphite data in this database - -# Configure the collectd input. -[collectd] -enabled = false -#address = "0.0.0.0" # If not set, is actually set to bind-address. -#port = 25827 -#database = "collectd_database" -#typesdb = "types.db" - -# Configure UDP listener for series data. -[udp] -enabled = true -#bind-address = "0.0.0.0" -port = {udp_port} - -# Broker configuration. Brokers are nodes which participate in distributed -# consensus. -[broker] -# Where the Raft logs are stored. The user running InfluxDB will need read/write access. -dir = "{broker_raft_dir}" -port = {broker_port} - -# Data node configuration. Data nodes are where the time-series data, in the form of -# shards, is stored. -[data] - dir = "{broker_node_dir}" - port = {broker_port} - - # Auto-create a retention policy when a database is created. Defaults to true. - retention-auto-create = true - - # Control whether retention policies are enforced and how long the system waits between - # enforcing those policies. - retention-check-enabled = true - retention-check-period = "10m" - -[cluster] -# Location for cluster state storage. For storing state persistently across restarts. -dir = "{cluster_dir}" - -[logging] -file = "{logfile}" # Leave blank to redirect logs to stderr. -write-tracing = false # If true, enables detailed logging of the write system. -raft-tracing = false # If true, enables detailed logging of Raft consensus. - -# InfluxDB can store statistics about itself. This is useful for monitoring purposes. -# This feature is disabled by default, but if enabled, these statistics can be queried -# as any other data. -[statistics] -enabled = false -database = "internal" # The database to which the data is written. -retention-policy = "default" # The retention policy within the database. -write-interval = "1m" # Period between writing the data. - -[snapshot] -bind-address = "127.0.0.1" -port = {snapshot_port} From cb510ec345c1e3a292940f81742684b1c7f706a1 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Fri, 17 Apr 2015 19:51:52 -0400 Subject: [PATCH 125/536] Updated InfluxDB to 0.9.0-rc25 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8c8b81d2..9e67562a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls - - wget http://get.influxdb.org/influxdb_0.9.0-rc18_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc18_amd64.deb + - wget http://get.influxdb.org/influxdb_0.9.0-rc25_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc25_amd64.deb script: - travis_wait tox -e $TOX_ENV after_success: From 39d61c689d4b6602ce71086606b5ebbf533aaefe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 20 Apr 2015 13:55:48 -0400 Subject: [PATCH 126/536] Test workaround: server changed its response content in this case That may still change in final release.. --- tests/influxdb/client_test_with_server.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 24fc5181..6448fbf7 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -313,15 +313,15 @@ def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_database('db') self.assertEqual(500, ctx.exception.code) - self.assertEqual('{"results":[{"error":"database not found"}]}', - ctx.exception.content) + self.assertIn('{"results":[{"error":"database not found: db', + ctx.exception.content) def test_query_fail(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.query('select column_one from foo') - self.assertEqual( - ('500: {"results":[{"error":"database not found: db"}]}',), - ctx.exception.args) + self.assertEqual(500, ctx.exception.code) + self.assertIn('{"results":[{"error":"database not found: db', + ctx.exception.content) ############################################################################ From 9e61b29687ed2d9c66801462c38b1d7cc1b4729c Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 00:49:32 +0100 Subject: [PATCH 127/536] Fix get_list_users method return value. To be consistent with the other get methods, this should return a list of dictionaries, not a list of list of dictionaries. --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 5191a571..40a4bdc7 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -441,7 +441,7 @@ def get_list_users(self): """ Get the list of users """ - return list(self.query("SHOW USERS")) + return list(self.query("SHOW USERS")["results"]) def delete_series(self, name, database=None): database = database or self._database From 398c94dd06225dec7e3354a95b45abdc567e8d28 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 00:50:33 +0100 Subject: [PATCH 128/536] Add tests for get_list_users method. --- tests/influxdb/client_test.py | 31 +++++++++++++++++++++++ tests/influxdb/client_test_with_server.py | 13 ++++++++++ 2 files changed, 44 insertions(+) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 2bf8eb19..d227ce1b 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -534,3 +534,34 @@ def connection_error(self, *args, **kwargs): with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points) + + def test_get_list_users(self): + example_response = ( + '{"results":[{"series":[{"columns":["user","admin"],' + '"values":[["test",false]]}]}]}' + ) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + + self.assertListEqual( + self.cli.get_list_users(), + [{'user': 'test', 'admin': False}] + ) + + def test_get_list_users_empty(self): + example_response = ( + '{"results":[{"series":[{"columns":["user","admin"]}]}]}' + ) + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + + self.assertListEqual(self.cli.get_list_users(), []) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 6448fbf7..73677460 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -536,6 +536,19 @@ def test_get_list_series_non_empty(self): rsp ) + def test_get_list_users_empty(self): + rsp = self.cli.get_list_users() + self.assertEqual([], rsp) + + def test_get_list_users_non_empty(self): + self.cli.query("CREATE USER test WITH PASSWORD 'test'") + rsp = self.cli.get_list_users() + + self.assertEqual( + [{'user': 'test', 'admin': False}], + rsp + ) + def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( From 4555e6a560e911a0f52982dd9b7c50e79db51673 Mon Sep 17 00:00:00 2001 From: Can ZHANG Date: Wed, 22 Apr 2015 17:37:47 +0800 Subject: [PATCH 129/536] Add `from_DSN` to InfluxDBClusterClient --- influxdb/client.py | 43 +++++++++++++++++++++++++++-- tests/influxdb/client_test.py | 52 +++++++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index e47bd326..f13bb116 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -18,7 +18,7 @@ except NameError: xrange = range -if version_info.major == 3: +if version_info[0] == 3: from urllib.parse import urlparse else: from urlparse import urlparse @@ -469,6 +469,9 @@ class InfluxDBClusterClient(object): :param hosts: A list of hosts, where a host should be in format (address, port) e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)] + :param shuffle: If true, queries will hit servers evenly(randomly) + :param client_base_class: In order to support different clients, + default to InfluxDBClient """ def __init__(self, @@ -482,11 +485,11 @@ def __init__(self, use_udp=False, udp_port=4444, shuffle=True, - client_base_class=InfluxDBClient, # For simpler test code + client_base_class=InfluxDBClient, ): self.clients = [] self.bad_clients = [] # Corresponding server has failures in history - self.shuffle = shuffle # if true, queries will hit servers evenly + self.shuffle = shuffle for h in hosts: self.clients.append(client_base_class(host=h[0], port=h[1], username=username, @@ -505,6 +508,40 @@ def __init__(self, continue setattr(self, method, self._make_func(orig_func)) + @staticmethod + def from_DSN(dsn, client_base_class=InfluxDBClient, + shuffle=True, **kwargs): + """ + Same as InfluxDBClient.from_DSN, and supports multiple servers. + + Example DSN: + influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db_name + udp+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db_name + https+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db_name + + :param shuffle: If true, queries will hit servers evenly(randomly) + :param client_base_class: In order to support different clients, + default to InfluxDBClient + """ + dsn = dsn.lower() + conn_params = urlparse(dsn) + netlocs = conn_params.netloc.split(',') + cluster_client = InfluxDBClusterClient( + hosts=[], + client_base_class=client_base_class, + shuffle=shuffle, + **kwargs) + for netloc in netlocs: + single_dsn = '%(scheme)s://%(netloc)s%(path)s' % ( + {'scheme': conn_params.scheme, + 'netloc': netloc, + 'path': conn_params.path} + ) + cluster_client.clients.append(client_base_class.from_DSN( + single_dsn, + **kwargs)) + return cluster_client + def _make_func(self, orig_func): @wraps(orig_func) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index a55c65ee..e4eed104 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -623,3 +623,55 @@ def test_recovery(self): self.assertEqual('Success', cluster.query('')) self.assertEqual(1, len(cluster.clients)) self.assertEqual(2, len(cluster.bad_clients)) + + def test_dsn(self): + cli = InfluxDBClusterClient.from_DSN( + 'influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + self.assertEqual(2, len(cli.clients)) + self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) + self.assertEqual('usr', cli.clients[0]._username) + self.assertEqual('pwd', cli.clients[0]._password) + self.assertEqual('db', cli.clients[0]._database) + self.assertFalse(cli.clients[0].use_udp) + self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) + self.assertEqual('usr', cli.clients[1]._username) + self.assertEqual('pwd', cli.clients[1]._password) + self.assertEqual('db', cli.clients[1]._database) + self.assertFalse(cli.clients[1].use_udp) + + cli = InfluxDBClusterClient.from_DSN( + 'udp+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + self.assertTrue(cli.clients[0].use_udp) + self.assertTrue(cli.clients[1].use_udp) + + cli = InfluxDBClusterClient.from_DSN( + 'https+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + self.assertEqual('https://host1:8086', cli.clients[0]._baseurl) + self.assertEqual('https://host2:8086', cli.clients[1]._baseurl) + + cli = InfluxDBClusterClient.from_DSN( + 'https+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db', + **{'ssl': False}) + self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) + self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) + + def test_dsn_single_client(self): + cli = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd@host:8086/db') + self.assertEqual('http://host:8086', cli.clients[0]._baseurl) + self.assertEqual('usr', cli.clients[0]._username) + self.assertEqual('pwd', cli.clients[0]._password) + self.assertEqual('db', cli.clients[0]._database) + self.assertFalse(cli.clients[0].use_udp) + + cli = InfluxDBClusterClient.from_DSN( + 'udp+influxdb://usr:pwd@host:8086/db') + self.assertTrue(cli.clients[0].use_udp) + + cli = InfluxDBClusterClient.from_DSN( + 'https+influxdb://usr:pwd@host:8086/db') + self.assertEqual('https://host:8086', cli.clients[0]._baseurl) + + cli = InfluxDBClusterClient.from_DSN( + 'https+influxdb://usr:pwd@host:8086/db', + **{'ssl': False}) + self.assertEqual('http://host:8086', cli.clients[0]._baseurl) From 3a68ec6d3d9c98f118c713621176964e6a93c024 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Wed, 22 Apr 2015 08:29:34 -0400 Subject: [PATCH 130/536] No longer deploy with Travis --- .travis.yml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9e67562a..1cb6e7e9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,14 +17,5 @@ script: - travis_wait tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi -deploy: - provider: pypi - user: errplane - password: - secure: C20cSqrCtd7Ng2oxSy9YSQS72aeqMjrRaZTYKIIF4eSR4JzCawasFhof6Pq/mUqx6fJCBTZ7yMUqfK22JAQ2iUoUnBF04IHASR3iwqjdCRbXGtzX1J9Bw//6iCHBE5fgGEHQc8Mw5wKDIy5RvbjiR9ADCW/cIlpVSF9QzH/RA24= - on: - tags: true - all_branches: true - repo: influxdb/influxdb-python notifications: email: false From d05359fd21ba39721e5e3346dac76d30f16de1bd Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 22 Apr 2015 08:36:20 -0400 Subject: [PATCH 131/536] Released 2.0.2 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index edb024b1..31c2d29f 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -11,4 +11,4 @@ ] -__version__ = '2.0.1' +__version__ = '2.0.2' From cb17518a91e49d65b5cac0762b45d95547675220 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Wed, 22 Apr 2015 10:38:23 -0400 Subject: [PATCH 132/536] Travis: updated to influxdb_0.9.0-rc26 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 1cb6e7e9..3e26eaf9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls - - wget http://get.influxdb.org/influxdb_0.9.0-rc25_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc25_amd64.deb + - wget http://get.influxdb.org/influxdb_0.9.0-rc26_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc26_amd64.deb script: - travis_wait tox -e $TOX_ENV after_success: From b6352ca16f0c6f743edbc2c317cff3d292c93259 Mon Sep 17 00:00:00 2001 From: "Sergey V. Yudin" Date: Wed, 22 Apr 2015 09:53:37 -0700 Subject: [PATCH 133/536] Get syntax compatible with python <2.7 --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 40a4bdc7..d9c1e37f 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -16,7 +16,7 @@ except NameError: xrange = range -if version_info.major == 3: +if version_info[0] == 3: from urllib.parse import urlparse else: from urlparse import urlparse From 8f0dc39be659f9a459e8d95b7ec096a467bf5327 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 21:58:59 +0100 Subject: [PATCH 134/536] Add create_user method for InfluxDBClient. --- influxdb/client.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 40a4bdc7..861a864e 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -443,6 +443,18 @@ def get_list_users(self): """ return list(self.query("SHOW USERS")["results"]) + def create_user(self, username, password): + """ + Create a new user + + :param username: the new username to create + :type username: string + :param password: the password for the new user + :type password: string + """ + text = "CREATE USER {} WITH PASSWORD '{}'".format(username, password) + self.query(text) + def delete_series(self, name, database=None): database = database or self._database self.query('DROP SERIES \"%s\"' % name, database=database) From 8aa05eaa8942e5994f78f6a4cfe7f580a942f07d Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 21:59:52 +0100 Subject: [PATCH 135/536] Add tests for create_user method. --- tests/influxdb/client_test_with_server.py | 33 +++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 73677460..10c631fb 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -323,6 +323,39 @@ def test_query_fail(self): self.assertIn('{"results":[{"error":"database not found: db', ctx.exception.content) + def test_create_user(self): + self.cli.create_user('test_user', 'secret_password') + rsp = list(self.cli.query("SHOW USERS")['results']) + self.assertIn({'user': 'test_user', 'admin': False}, + rsp) + + def test_create_user_blank_password(self): + self.cli.create_user('test_user', '') + rsp = list(self.cli.query("SHOW USERS")['results']) + self.assertIn({'user': 'test_user', 'admin': False}, + rsp) + + def test_create_user_blank_username(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.create_user('', 'secret_password') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ' + 'found WITH, expected identifier', + ctx.exception.content) + rsp = list(self.cli.query("SHOW USERS")['results']) + self.assertEqual(rsp, []) + + def test_create_user_invalid_username(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.create_user('very invalid', 'secret_password') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ' + 'found invalid, expected WITH', + ctx.exception.content) + rsp = list(self.cli.query("SHOW USERS")['results']) + self.assertEqual(rsp, []) + + ############################################################################ From 72f66d43153ad5cc940d19954331b579537350c0 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 22:15:51 +0100 Subject: [PATCH 136/536] Add drop_user method for InfluxDBClient. --- influxdb/client.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 861a864e..a68904d9 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -455,6 +455,16 @@ def create_user(self, username, password): text = "CREATE USER {} WITH PASSWORD '{}'".format(username, password) self.query(text) + def drop_user(self, username): + """ + Drop an user + + :param username: the username to drop + :type username: string + """ + text = "DROP USER {}".format(username) + self.query(text) + def delete_series(self, name, database=None): database = database or self._database self.query('DROP SERIES \"%s\"' % name, database=database) From b3c04e1bc0cfd8f2f335e00df007cea32db51403 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 22:16:12 +0100 Subject: [PATCH 137/536] Add tests for drop_user method. --- tests/influxdb/client_test_with_server.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 10c631fb..2d1ff44c 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -355,6 +355,27 @@ def test_create_user_invalid_username(self): rsp = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(rsp, []) + def test_drop_user(self): + self.cli.query("CREATE USER test WITH PASSWORD 'test'") + self.cli.drop_user('test') + users = list(self.cli.query("SHOW USERS")['results']) + self.assertEqual(users, []) + + def test_drop_user_nonexisting(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.drop_user('test') + self.assertEqual(500, ctx.exception.code) + self.assertIn('{"results":[{"error":"user not found"}]}', + ctx.exception.content) + + def test_drop_user_invalid(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.drop_user('very invalid') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ' + 'found invalid, expected', + ctx.exception.content) + ############################################################################ From 629509a0eabe1effcb917cb9a0750ce47082bc3f Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Wed, 22 Apr 2015 22:51:18 +0100 Subject: [PATCH 138/536] Add set_user_password method for InfluxDBClient. --- influxdb/client.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index a68904d9..72ff517e 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -465,6 +465,18 @@ def drop_user(self, username): text = "DROP USER {}".format(username) self.query(text) + def set_user_password(self, username, password): + """ + Change the password of an existing user + + :param username: the username who's password is being changed + :type username: string + :param password: the new password for the user + :type password: string + """ + text = "SET PASSWORD FOR {} = '{}'".format(username, password) + self.query(text) + def delete_series(self, name, database=None): database = database or self._database self.query('DROP SERIES \"%s\"' % name, database=database) From d51354277f4809bf036f9180658eeabb193388fe Mon Sep 17 00:00:00 2001 From: "Sergey V. Yudin" Date: Thu, 23 Apr 2015 00:21:02 -0700 Subject: [PATCH 139/536] Get 0.8 client compatible with python 2.6 too --- influxdb/influxdb08/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index a4c73c17..65b7caac 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -16,7 +16,7 @@ except NameError: xrange = range -if version_info.major == 3: +if version_info[0] == 3: from urllib.parse import urlparse else: from urlparse import urlparse From 7fd1138f47c745e5e86e8976f308b3875052e592 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 12:43:12 -0400 Subject: [PATCH 140/536] Tox to accept posargs --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 05338bb0..4ec36717 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt py27,py32,py33,py34: pandas # Only install pandas with non-pypy interpreters -commands = nosetests -v +commands = nosetests -v {posargs} [testenv:flake8] deps = From d68c29f19bda508a1a4b4e7a99119badd7e5c42b Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 12:44:02 -0400 Subject: [PATCH 141/536] DataFrameClient for InfluxDB 0.9 --- influxdb/_dataframe_client.py | 137 ++++++----- tests/influxdb/dataframe_client_test.py | 287 +++++++++++++++--------- 2 files changed, 256 insertions(+), 168 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 959ecb98..93d51fbe 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -3,11 +3,22 @@ DataFrame client for InfluxDB """ import math -import warnings + +import pandas as pd from .client import InfluxDBClient -import pandas as pd + +def _pandas_time_unit(time_precision): + unit = time_precision + if time_precision == 'm': + unit = 'ms' + elif time_precision == 'u': + unit = 'us' + elif time_precision == 'n': + unit = 'ns' + assert unit in ('s', 'ms', 'us', 'ns') + return unit class DataFrameClient(InfluxDBClient): @@ -19,13 +30,18 @@ class DataFrameClient(InfluxDBClient): EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') - def write_points(self, data, *args, **kwargs): + def write_points(self, + data, + time_precision=None, + database=None, + retention_policy=None, + tags=None, **kwargs): """ Write to multiple time series names. - :param data: A dictionary mapping series names to pandas DataFrames - :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' - or 'u'. + :param data: A dictionary mapping series to pandas DataFrames + :param time_precision: [Optional, default 's'] Either 's', 'ms', 'u' + or 'n'. :param batch_size: [Optional] Value to write the points in batches instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation @@ -42,31 +58,19 @@ def write_points(self, data, *args, **kwargs): for batch in range(number_batches): start_index = batch * batch_size end_index = (batch + 1) * batch_size - data = [self._convert_dataframe_to_json( - name=key, + data = self._convert_dataframe_to_json( + key=key, dataframe=data_frame.ix[start_index:end_index].copy(), - time_precision=time_precision)] - super(DataFrameClient, self).write_points(data, - *args, **kwargs) + time_precision=time_precision) + super(DataFrameClient, self).write_points(data, **kwargs) return True else: - data = [self._convert_dataframe_to_json( - name=key, dataframe=dataframe, time_precision=time_precision) - for key, dataframe in data.items()] - return super(DataFrameClient, self).write_points(data, - *args, **kwargs) - - def write_points_with_precision(self, data, time_precision='s'): - """ - DEPRECATED. Write to multiple time series names - - """ - warnings.warn( - "write_points_with_precision is deprecated, and will be removed " - "in future versions. Please use " - "``DataFrameClient.write_points(time_precision='..')`` instead.", - FutureWarning) - return self.write_points(data, time_precision='s') + for key, data_frame in data.items(): + data = self._convert_dataframe_to_json( + key=key, dataframe=data_frame, + time_precision=time_precision) + super(DataFrameClient, self).write_points(data, **kwargs) + return True def query(self, query, time_precision='s', chunked=False, database=None): """ @@ -80,29 +84,38 @@ def query(self, query, time_precision='s', chunked=False, database=None): """ results = super(DataFrameClient, self).query(query, database=database) if len(results) > 0: - return self._to_dataframe(results, time_precision) + return self._to_dataframe(results.raw, time_precision) else: - return results + return {} + + def get_list_series(self, database=None): + """ + Get the list of series, in DataFrame + + """ + results = super(DataFrameClient, self)\ + .query("SHOW SERIES", database=database) + return dict( + (s['name'], pd.DataFrame(s['values'], columns=s['columns'])) for + s in results.raw['results'][0]['series'] + ) def _to_dataframe(self, json_result, time_precision): - dataframe = pd.DataFrame(data=json_result['points'], - columns=json_result['columns']) - if 'sequence_number' in dataframe: - dataframe.sort(['time', 'sequence_number'], inplace=True) - else: - dataframe.sort(['time'], inplace=True) - pandas_time_unit = time_precision - if time_precision == 'm': - pandas_time_unit = 'ms' - elif time_precision == 'u': - pandas_time_unit = 'us' - dataframe.index = pd.to_datetime(list(dataframe['time']), - unit=pandas_time_unit, - utc=True) - del dataframe['time'] - return dataframe - - def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): + + result = {} + series = json_result['results'][0]['series'] + for s in series: + tags = s.get('tags') + key = (s['name'], tuple(tags.items()) if tags else None) + df = pd.DataFrame(s['values'], columns=s['columns']) + df.time = pd.to_datetime( + df.time, unit=_pandas_time_unit(time_precision), utc=True) + df.set_index(['time'], inplace=True) + result[key] = df + return result + + def _convert_dataframe_to_json(self, key, dataframe, time_precision='s'): + if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {}.' .format(type(dataframe))) @@ -110,21 +123,31 @@ def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') + dataframe.index = dataframe.index.to_datetime() if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') - dataframe['time'] = [self._datetime_to_epoch(dt, time_precision) - for dt in dataframe.index] - data = {'name': name, - 'columns': [str(column) for column in dataframe.columns], - 'points': list([list(x) for x in dataframe.values])} - return data + + # Convert column to strings + dataframe.columns = dataframe.columns.astype('str') + + name, tags = key + points = [ + {'name': name, + 'tags': dict(tags) if tags else {}, + 'fields': rec, + 'timestamp': ts.isoformat() + } + for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))] + return points def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() if time_precision == 's': return seconds - elif time_precision == 'm' or time_precision == 'ms': - return seconds * 1000 + elif time_precision == 'ms': + return seconds * 10 ** 3 elif time_precision == 'u': - return seconds * 1000000 + return seconds * 10 ** 6 + elif time_precision == 'n': + return seconds * 10 ** 9 diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index fd4af039..0d354286 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -10,7 +10,6 @@ from nose.tools import raises from datetime import timedelta from tests import skipIfPYpy, using_pypy -import copy import warnings if not using_pypy: @@ -19,7 +18,6 @@ from influxdb import DataFrameClient -@unittest.skip('Not updated for 0.9') @skipIfPYpy class TestDataFrameClient(unittest.TestCase): @@ -33,25 +31,33 @@ def test_write_points_from_dataframe(self): index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) - points = [ - { - "points": [ - ["1", 1, 1.0, 0], - ["2", 2, 2.0, 3600] - ], - "name": "foo", - "columns": ["column_one", "column_two", "column_three", "time"] - } - ] + expected = { + u'database': u'db', + u'points': [ + {u'timestamp': u'1970-01-01T00:00:00+00:00', + u'fields': { + u'column_two': 1, + u'column_three': 1.0, + u'column_one': u'1'}, + u'tags': {}, + u'name': u'foo'}, + {u'timestamp': u'1970-01-01T01:00:00+00:00', + u'fields': { + u'column_two': 2, + u'column_three': 2.0, + u'column_one': u'2'}, + u'tags': {}, + u'name': u'foo'}] + } with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points({("foo", None): dataframe}) - self.assertListEqual(json.loads(m.last_request.body), points) + self.assertEqual(json.loads(m.last_request.body), expected) def test_write_points_from_dataframe_in_batches(self): now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -61,10 +67,10 @@ def test_write_points_from_dataframe_in_batches(self): "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/write") cli = DataFrameClient(database='db') - assert cli.write_points({"foo": dataframe}, + assert cli.write_points({("foo", None): dataframe}, batch_size=1) is True def test_write_points_from_dataframe_with_numeric_column_names(self): @@ -72,25 +78,34 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): # df with numeric column names dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)]) - points = [ - { - "points": [ - ["1", 1, 1.0, 0], - ["2", 2, 2.0, 3600] - ], - "name": "foo", - "columns": ['0', '1', '2', "time"] - } - ] + + expected = { + u'database': u'db', + u'points': [ + {u'fields': { + u'0': u'1', + u'1': 1, + u'2': 1.0}, + u'tags': {u'hello': u'there'}, + u'timestamp': u'1970-01-01T00:00:00+00:00', + u'name': u'foo'}, + {u'fields': { + u'0': u'2', + u'1': 2, + u'2': 2.0}, + u'tags': {u'hello': u'there'}, + u'timestamp': u'1970-01-01T01:00:00+00:00', + u'name': u'foo'}], + } with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points({("foo", (('hello', 'there'),)): dataframe}) - self.assertListEqual(json.loads(m.last_request.body), points) + self.assertEqual(json.loads(m.last_request.body), expected) def test_write_points_from_dataframe_with_period_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], @@ -98,25 +113,33 @@ def test_write_points_from_dataframe_with_period_index(self): pd.Period('1970-01-02')], columns=["column_one", "column_two", "column_three"]) - points = [ - { - "points": [ - ["1", 1, 1.0, 0], - ["2", 2, 2.0, 86400] - ], - "name": "foo", - "columns": ["column_one", "column_two", "column_three", "time"] - } - ] + expected = { + u'points': [ + {u'name': u'foo', + u'tags': {}, + u'fields': { + u'column_one': u'1', + u'column_two': 1, + u'column_three': 1.0}, + u'timestamp': u'1970-01-01T00:00:00+00:00'}, + {u'name': u'foo', + u'tags': {}, + u'fields': { + u'column_one': u'2', + u'column_two': 2, + u'column_three': 2.0}, + u'timestamp': u'1970-01-02T00:00:00+00:00'}], + u'database': u'db', + } with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points({("foo", None): dataframe}) - self.assertListEqual(json.loads(m.last_request.body), points) + self.assertEqual(json.loads(m.last_request.body), expected) def test_write_points_from_dataframe_with_time_precision(self): now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -124,37 +147,41 @@ def test_write_points_from_dataframe_with_time_precision(self): index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) - points = [ - { - "points": [ - ["1", 1, 1.0, 0], - ["2", 2, 2.0, 3600] - ], - "name": "foo", - "columns": ["column_one", "column_two", "column_three", "time"] - } - ] - - points_ms = copy.deepcopy(points) - points_ms[0]["points"][1][-1] = 3600 * 1000 - - points_us = copy.deepcopy(points) - points_us[0]["points"][1][-1] = 3600 * 1000000 with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/write") + + points = { + u'database': u'db', + u'points': [ + {u'timestamp': u'1970-01-01T00:00:00+00:00', + u'fields': { + u'column_one': u'1', + u'column_three': 1.0, + u'column_two': 1}, + u'tags': {}, + u'name': u'foo'}, + {u'timestamp': u'1970-01-01T01:00:00+00:00', + u'fields': { + u'column_one': u'2', + u'column_three': 2.0, + u'column_two': 2}, + u'tags': {}, + u'name': u'foo'}] + } cli = DataFrameClient(database='db') + key = ("foo", None) - cli.write_points({"foo": dataframe}, time_precision='s') - self.assertListEqual(json.loads(m.last_request.body), points) + cli.write_points({key: dataframe}, time_precision='s') + self.assertEqual(json.loads(m.last_request.body), points) - cli.write_points({"foo": dataframe}, time_precision='m') - self.assertListEqual(json.loads(m.last_request.body), points_ms) + cli.write_points({key: dataframe}, time_precision='m') + self.assertEqual(json.loads(m.last_request.body), points) - cli.write_points({"foo": dataframe}, time_precision='u') - self.assertListEqual(json.loads(m.last_request.body), points_us) + cli.write_points({key: dataframe}, time_precision='u') + self.assertEqual(json.loads(m.last_request.body), points) @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): @@ -183,56 +210,94 @@ def test_write_points_from_dataframe_fails_with_series(self): cli.write_points({"foo": dataframe}) def test_query_into_dataframe(self): - data = [ - { - "name": "foo", - "columns": ["time", "sequence_number", "column_one"], - "points": [ - [3600, 16, 2], [3600, 15, 1], - [0, 14, 2], [0, 13, 1] + data = { + "results": [{ + "series": [ + {"name": "network", + "tags": {"direction": ""}, + "columns": ["time", "value"], + "values":[["2009-11-10T23:00:00Z", 23422]] + }, + {"name": "network", + "tags": {"direction": "in"}, + "columns": ["time", "value"], + "values": [["2009-11-10T23:00:00Z", 23422], + ["2009-11-10T23:00:00Z", 23422], + ["2009-11-10T23:00:00Z", 23422]] + } ] - } - ] - # dataframe sorted ascending by time first, then sequence_number - dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]], - index=pd.to_datetime([0, 0, - 3600, 3600], - unit='s', utc=True), - columns=['sequence_number', 'column_one']) - with _mocked_session('get', 200, data): - cli = DataFrameClient('host', 8086, 'username', 'password', 'db') - result = cli.query('select column_one from foo;') - assert_frame_equal(dataframe, result) + }] + } + + pd1 = pd.DataFrame( + [[23422]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:00:00Z"])) + pd1.index.name = 'time' + pd2 = pd.DataFrame( + [[23422], [23422], [23422]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:00:00Z", + "2009-11-10T23:00:00Z", + "2009-11-10T23:00:00Z"])) + pd2.index.name = 'time' + expected = { + ('network', (('direction', ''),)): pd1, + ('network', (('direction', 'in'),)): pd2 + } + + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'GET', 200, data): + result = cli.query('select value from network group by direction;') + for k in expected: + assert_frame_equal(expected[k], result[k]) def test_query_with_empty_result(self): - with _mocked_session('get', 200, []): - cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'GET', 200, {"results": [{}]}): result = cli.query('select column_one from foo;') - assert result == [] + assert result == {} def test_list_series(self): response = { - 'results': [ - { - 'series': [{ - 'columns': ['id'], - 'name': 'seriesA', - 'values': [[0]], - }] - }, - { - 'series': [{ - 'columns': ['id'], - 'name': 'seriesB', - 'values': [[1]], - }] - }, + u'results': [ + {u'series': [ + { + u'columns': [u'host'], + u'name': u'cpu', + u'values': [ + [u'server01']] + }, + { + u'columns': [ + u'host', + u'region' + ], + u'name': u'network', + u'values': [ + [ + u'server01', + u'us-west' + ], + [ + u'server01', + u'us-east' + ] + ] + } + ]} ] } - with _mocked_session('get', 200, response): - cli = DataFrameClient('host', 8086, 'username', 'password', 'db') - series_list = cli.get_list_series() - assert series_list == ['seriesA', 'seriesB'] + + expected = { + 'cpu': pd.DataFrame([['server01']], columns=['host']), + 'network': pd.DataFrame( + [['server01', 'us-west'], ['server01', 'us-east']], + columns=['host', 'region'])} + + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'GET', 200, response): + series = cli.get_list_series() + assert_frame_equal(series['cpu'], expected['cpu']) + assert_frame_equal(series['network'], expected['network']) def test_datetime_to_epoch(self): timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') @@ -246,10 +311,6 @@ def test_datetime_to_epoch(self): cli._datetime_to_epoch(timestamp, time_precision='s'), 1356998400.0 ) - self.assertEqual( - cli._datetime_to_epoch(timestamp, time_precision='m'), - 1356998400000.0 - ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='ms'), 1356998400000.0 @@ -258,3 +319,7 @@ def test_datetime_to_epoch(self): cli._datetime_to_epoch(timestamp, time_precision='u'), 1356998400000000.0 ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='n'), + 1356998400000000000.0 + ) From 58c3770ebb31dfbf146028b16ea557af24443e9b Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 13:37:58 -0400 Subject: [PATCH 142/536] Replace unicode with plain string syntax. --- tests/influxdb/dataframe_client_test.py | 158 ++++++++++++------------ 1 file changed, 79 insertions(+), 79 deletions(-) diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 0d354286..c9a92801 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -32,22 +32,22 @@ def test_write_points_from_dataframe(self): columns=["column_one", "column_two", "column_three"]) expected = { - u'database': u'db', - u'points': [ - {u'timestamp': u'1970-01-01T00:00:00+00:00', - u'fields': { - u'column_two': 1, - u'column_three': 1.0, - u'column_one': u'1'}, - u'tags': {}, - u'name': u'foo'}, - {u'timestamp': u'1970-01-01T01:00:00+00:00', - u'fields': { - u'column_two': 2, - u'column_three': 2.0, - u'column_one': u'2'}, - u'tags': {}, - u'name': u'foo'}] + 'database': 'db', + 'points': [ + {'timestamp': '1970-01-01T00:00:00+00:00', + 'fields': { + 'column_two': 1, + 'column_three': 1.0, + 'column_one': '1'}, + 'tags': {}, + 'name': 'foo'}, + {'timestamp': '1970-01-01T01:00:00+00:00', + 'fields': { + 'column_two': 2, + 'column_three': 2.0, + 'column_one': '2'}, + 'tags': {}, + 'name': 'foo'}] } with requests_mock.Mocker() as m: @@ -80,22 +80,22 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): index=[now, now + timedelta(hours=1)]) expected = { - u'database': u'db', - u'points': [ - {u'fields': { - u'0': u'1', - u'1': 1, - u'2': 1.0}, - u'tags': {u'hello': u'there'}, - u'timestamp': u'1970-01-01T00:00:00+00:00', - u'name': u'foo'}, - {u'fields': { - u'0': u'2', - u'1': 2, - u'2': 2.0}, - u'tags': {u'hello': u'there'}, - u'timestamp': u'1970-01-01T01:00:00+00:00', - u'name': u'foo'}], + 'database': 'db', + 'points': [ + {'fields': { + '0': '1', + '1': 1, + '2': 1.0}, + 'tags': {'hello': 'there'}, + 'timestamp': '1970-01-01T00:00:00+00:00', + 'name': 'foo'}, + {'fields': { + '0': '2', + '1': 2, + '2': 2.0}, + 'tags': {'hello': 'there'}, + 'timestamp': '1970-01-01T01:00:00+00:00', + 'name': 'foo'}], } with requests_mock.Mocker() as m: @@ -114,22 +114,22 @@ def test_write_points_from_dataframe_with_period_index(self): columns=["column_one", "column_two", "column_three"]) expected = { - u'points': [ - {u'name': u'foo', - u'tags': {}, - u'fields': { - u'column_one': u'1', - u'column_two': 1, - u'column_three': 1.0}, - u'timestamp': u'1970-01-01T00:00:00+00:00'}, - {u'name': u'foo', - u'tags': {}, - u'fields': { - u'column_one': u'2', - u'column_two': 2, - u'column_three': 2.0}, - u'timestamp': u'1970-01-02T00:00:00+00:00'}], - u'database': u'db', + 'points': [ + {'name': 'foo', + 'tags': {}, + 'fields': { + 'column_one': '1', + 'column_two': 1, + 'column_three': 1.0}, + 'timestamp': '1970-01-01T00:00:00+00:00'}, + {'name': 'foo', + 'tags': {}, + 'fields': { + 'column_one': '2', + 'column_two': 2, + 'column_three': 2.0}, + 'timestamp': '1970-01-02T00:00:00+00:00'}], + 'database': 'db', } with requests_mock.Mocker() as m: @@ -153,22 +153,22 @@ def test_write_points_from_dataframe_with_time_precision(self): "http://localhost:8086/write") points = { - u'database': u'db', - u'points': [ - {u'timestamp': u'1970-01-01T00:00:00+00:00', - u'fields': { - u'column_one': u'1', - u'column_three': 1.0, - u'column_two': 1}, - u'tags': {}, - u'name': u'foo'}, - {u'timestamp': u'1970-01-01T01:00:00+00:00', - u'fields': { - u'column_one': u'2', - u'column_three': 2.0, - u'column_two': 2}, - u'tags': {}, - u'name': u'foo'}] + 'database': 'db', + 'points': [ + {'timestamp': '1970-01-01T00:00:00+00:00', + 'fields': { + 'column_one': '1', + 'column_three': 1.0, + 'column_two': 1}, + 'tags': {}, + 'name': 'foo'}, + {'timestamp': '1970-01-01T01:00:00+00:00', + 'fields': { + 'column_one': '2', + 'column_three': 2.0, + 'column_two': 2}, + 'tags': {}, + 'name': 'foo'}] } cli = DataFrameClient(database='db') @@ -258,28 +258,28 @@ def test_query_with_empty_result(self): def test_list_series(self): response = { - u'results': [ - {u'series': [ + 'results': [ + {'series': [ { - u'columns': [u'host'], - u'name': u'cpu', - u'values': [ - [u'server01']] + 'columns': ['host'], + 'name': 'cpu', + 'values': [ + ['server01']] }, { - u'columns': [ - u'host', - u'region' + 'columns': [ + 'host', + 'region' ], - u'name': u'network', - u'values': [ + 'name': 'network', + 'values': [ [ - u'server01', - u'us-west' + 'server01', + 'us-west' ], [ - u'server01', - u'us-east' + 'server01', + 'us-east' ] ] } From 214be2956d54d33526310ce4208b35a5f3ecbf68 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 15:37:20 -0400 Subject: [PATCH 143/536] Fix timezone, time_precision in DataFrameClient --- influxdb/_dataframe_client.py | 48 ++++++++++++------------- tests/influxdb/dataframe_client_test.py | 7 ++-- 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 93d51fbe..bb21568d 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -30,12 +30,8 @@ class DataFrameClient(InfluxDBClient): EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') - def write_points(self, - data, - time_precision=None, - database=None, - retention_policy=None, - tags=None, **kwargs): + def write_points(self, data, time_precision=None, database=None, + retention_policy=None, tags=None, batch_size=None): """ Write to multiple time series names. @@ -48,10 +44,7 @@ def write_points(self, :type batch_size: int """ - batch_size = kwargs.get('batch_size') - time_precision = kwargs.get('time_precision', 's') if batch_size: - kwargs.pop('batch_size') # don't hand over to InfluxDBClient for key, data_frame in data.items(): number_batches = int(math.ceil( len(data_frame) / float(batch_size))) @@ -61,30 +54,30 @@ def write_points(self, data = self._convert_dataframe_to_json( key=key, dataframe=data_frame.ix[start_index:end_index].copy(), - time_precision=time_precision) - super(DataFrameClient, self).write_points(data, **kwargs) + ) + super(DataFrameClient, self).write_points( + data, time_precision, database, retention_policy, tags) return True else: for key, data_frame in data.items(): data = self._convert_dataframe_to_json( key=key, dataframe=data_frame, - time_precision=time_precision) - super(DataFrameClient, self).write_points(data, **kwargs) + ) + super(DataFrameClient, self).write_points( + data, time_precision, database, retention_policy, tags) return True - def query(self, query, time_precision='s', chunked=False, database=None): + def query(self, query, chunked=False, database=None): """ Quering data into a DataFrame. - :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' - or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ results = super(DataFrameClient, self).query(query, database=database) if len(results) > 0: - return self._to_dataframe(results.raw, time_precision) + return self._to_dataframe(results.raw) else: return {} @@ -95,26 +88,29 @@ def get_list_series(self, database=None): """ results = super(DataFrameClient, self)\ .query("SHOW SERIES", database=database) - return dict( - (s['name'], pd.DataFrame(s['values'], columns=s['columns'])) for - s in results.raw['results'][0]['series'] - ) - - def _to_dataframe(self, json_result, time_precision): + if len(results): + return dict( + (s['name'], pd.DataFrame(s['values'], columns=s['columns'])) for + s in results.raw['results'][0]['series'] + ) + else: + return {} + def _to_dataframe(self, json_result): result = {} series = json_result['results'][0]['series'] for s in series: tags = s.get('tags') key = (s['name'], tuple(tags.items()) if tags else None) df = pd.DataFrame(s['values'], columns=s['columns']) - df.time = pd.to_datetime( - df.time, unit=_pandas_time_unit(time_precision), utc=True) + df.time = pd.to_datetime(df.time) df.set_index(['time'], inplace=True) + df.index = df.index.tz_localize('UTC') + df.index.name = None result[key] = df return result - def _convert_dataframe_to_json(self, key, dataframe, time_precision='s'): + def _convert_dataframe_to_json(self, key, dataframe): if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {}.' diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index c9a92801..63f7bb06 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -175,12 +175,15 @@ def test_write_points_from_dataframe_with_time_precision(self): key = ("foo", None) cli.write_points({key: dataframe}, time_precision='s') + points.update(precision='s') self.assertEqual(json.loads(m.last_request.body), points) cli.write_points({key: dataframe}, time_precision='m') + points.update(precision='m') self.assertEqual(json.loads(m.last_request.body), points) cli.write_points({key: dataframe}, time_precision='u') + points.update(precision='u') self.assertEqual(json.loads(m.last_request.body), points) @raises(TypeError) @@ -232,13 +235,13 @@ def test_query_into_dataframe(self): pd1 = pd.DataFrame( [[23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) - pd1.index.name = 'time' + pd1.index = pd1.index.tz_localize('UTC') pd2 = pd.DataFrame( [[23422], [23422], [23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z"])) - pd2.index.name = 'time' + pd2.index = pd2.index.tz_localize('UTC') expected = { ('network', (('direction', ''),)): pd1, ('network', (('direction', 'in'),)): pd2 From 34d56885fa3ddf3d259c10b94227ae5071697366 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 15:38:07 -0400 Subject: [PATCH 144/536] Add DataFrameClient server tests. --- tests/influxdb/client_test_with_server.py | 81 ++++++++++++++++++++++- 1 file changed, 78 insertions(+), 3 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 2d1ff44c..f2956374 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -28,7 +28,10 @@ # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) -from influxdb import InfluxDBClient +import pandas as pd +from pandas.util.testing import assert_frame_equal + +from influxdb import InfluxDBClient, DataFrameClient from influxdb.client import InfluxDBClientError from tests.influxdb.misc import get_free_port, is_port_open @@ -94,6 +97,13 @@ def point(serie_name, timestamp=None, tags=None, **fields): } ] +dummy_pointDF = { + ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): + pd.DataFrame( + [[0.64]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:00:00Z"])) +} + dummy_points = [ # some dummy points dummy_point[0], { @@ -109,6 +119,19 @@ def point(serie_name, timestamp=None, tags=None, **fields): } ] +dummy_pointsDF = { + ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): + pd.DataFrame( + [[0.64]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:00:00Z"])), + ("memory", (("host", "server01"), ("region", "us-west"))): + pd.DataFrame( + [[33]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:01:35Z"]) + ) +} + + dummy_point_without_timestamp = [ { "name": "cpu_load_short", @@ -233,6 +256,9 @@ def _setup_influxdb_server(inst): inst.cli = InfluxDBClient('localhost', inst.influxd_inst.webui_port, 'root', '', database='db') + inst.cliDF = DataFrameClient('localhost', + inst.influxd_inst.webui_port, + 'root', '', database='db') def _unsetup_influxdb_server(inst): @@ -413,6 +439,10 @@ def test_write_points(self): """ same as test_write() but with write_points \o/ """ self.assertIs(True, self.cli.write_points(dummy_point)) + def test_write_points_DF(self): + """ same as test_write() but with write_points \o/ """ + self.assertIs(True, self.cliDF.write_points(dummy_pointDF)) + def test_write_points_check_read(self): """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points() @@ -433,6 +463,17 @@ def test_write_points_check_read(self): {'time': '2009-11-10T23:00:00Z', 'value': 0.64} ) + def test_write_points_check_read_DF(self): + """ same as test_write_check_read() but with write_points \o/ """ + self.test_write_points_DF() + time.sleep(1) # same as test_write_check_read() + rsp = self.cliDF.query('SELECT * FROM cpu_load_short') + + assert_frame_equal( + rsp[('cpu_load_short', None)], + dummy_pointDF.values()[0] + ) + def test_write_multiple_points_different_series(self): self.assertIs(True, self.cli.write_points(dummy_points)) time.sleep(1) @@ -451,6 +492,28 @@ def test_write_multiple_points_different_series(self): [[{'value': 33, 'time': '2009-11-10T23:01:35Z'}]] ) + def test_write_multiple_points_different_series_DF(self): + self.assertIs(True, self.cliDF.write_points(dummy_pointsDF)) + time.sleep(1) + rsp = self.cliDF.query('SELECT * FROM cpu_load_short') + + assert_frame_equal( + rsp[('cpu_load_short', None)], + dummy_pointsDF[ + ('cpu_load_short', (('host', 'server01'), + ('region', 'us-west'))) + ] + ) + + rsp = self.cliDF.query('SELECT * FROM memory') + assert_frame_equal( + rsp[('memory', None)], + dummy_pointsDF[ + ('memory', (('host', 'server01'), + ('region', 'us-west'))) + ] + ) + @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): self.cli.write_points( @@ -577,10 +640,13 @@ def test_get_list_series_empty(self): rsp = self.cli.get_list_series() self.assertEqual([], rsp) - def test_get_list_series_non_empty(self): + def test_get_list_series_empty_DF(self): + rsp = self.cliDF.get_list_series() + self.assertEqual({}, rsp) + + def test_get_list_series(self): self.cli.write_points(dummy_point) rsp = self.cli.get_list_series() - self.assertEqual( [ {'name': 'cpu_load_short', @@ -590,6 +656,15 @@ def test_get_list_series_non_empty(self): rsp ) + def test_get_list_series_DF(self): + self.cli.write_points(dummy_point) + rsp = self.cliDF.get_list_series() + + expected = pd.DataFrame( + [[1, 'server01', 'us-west']], + columns=['_id', 'host', 'region']) + assert_frame_equal(rsp['cpu_load_short'], expected) + def test_get_list_users_empty(self): rsp = self.cli.get_list_users() self.assertEqual([], rsp) From 6c5554c1da488b11fa1227c0c2a10ec0932d2590 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 16:32:46 -0400 Subject: [PATCH 145/536] Fix Py34 compatibility. --- influxdb/_dataframe_client.py | 7 +++++-- tests/influxdb/client_test_with_server.py | 4 +++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index bb21568d..65370198 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -90,8 +90,8 @@ def get_list_series(self, database=None): .query("SHOW SERIES", database=database) if len(results): return dict( - (s['name'], pd.DataFrame(s['values'], columns=s['columns'])) for - s in results.raw['results'][0]['series'] + (s['name'], pd.DataFrame(s['values'], columns=s['columns'])) + for s in results.raw['results'][0]['series'] ) else: return {} @@ -127,6 +127,9 @@ def _convert_dataframe_to_json(self, key, dataframe): # Convert column to strings dataframe.columns = dataframe.columns.astype('str') + # Convert dtype for json serialization + dataframe = dataframe.astype('object') + name, tags = key points = [ {'name': name, diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index f2956374..86d609c8 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -471,7 +471,9 @@ def test_write_points_check_read_DF(self): assert_frame_equal( rsp[('cpu_load_short', None)], - dummy_pointDF.values()[0] + dummy_pointDF[ + ('cpu_load_short', + (('host', 'server01'), ('region', 'us-west')))] ) def test_write_multiple_points_different_series(self): From 906de8fbdebef12a5329cba5180b20780c671451 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Thu, 23 Apr 2015 22:54:11 +0100 Subject: [PATCH 146/536] Fix chunk decoding in influxdb08 client. If the response contains unicode characters, they should be decoded properly. --- influxdb/influxdb08/client.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index a4c73c17..8f88dc2d 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -448,7 +448,13 @@ def _query(self, query, time_precision='s', chunked=False): ) if chunked: - return list(chunked_json.loads(response.content.decode())) + decoded = {} + try: + decoded = chunked_json.loads(response.content.decode()) + except UnicodeDecodeError: + decoded = chunked_json.loads(response.content.decode('utf-8')) + finally: + return list(decoded) else: return response.json() From 6c832289b7b884eaa6125aad24b5d6e8ba9b2968 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Thu, 23 Apr 2015 22:55:12 +0100 Subject: [PATCH 147/536] Add a test for chunk decoding in influxdb08 client --- tests/influxdb/influxdb08/client_test.py | 29 ++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 010de673..85382984 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -332,6 +332,35 @@ def test_query_chunked(self): [example_object, example_object] ) + def test_query_chunked_unicode(self): + cli = InfluxDBClient(database='db') + example_object = { + 'points': [ + [1415206212980, 10001, u'unicode-ω'], + [1415197271586, 10001, u'more-unicode-ノ'] + ], + 'name': 'foo', + 'columns': [ + 'time', + 'sequence_number', + 'val' + ] + } + example_response = \ + json.dumps(example_object) + json.dumps(example_object) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/db/db/series", + text=example_response + ) + + self.assertListEqual( + cli.query('select * from foo', chunked=True), + [example_object, example_object] + ) + @raises(Exception) def test_query_fail(self): with _mocked_session('get', 401): From 67af8ad51aa6922ed86b5b402535ad76842ba938 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 23 Apr 2015 18:09:20 -0400 Subject: [PATCH 148/536] Skip DataFrameClient tests if PYPY. --- tests/influxdb/client_test_with_server.py | 59 +++++++++++++---------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 86d609c8..5c23f302 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -28,13 +28,16 @@ # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) -import pandas as pd -from pandas.util.testing import assert_frame_equal - -from influxdb import InfluxDBClient, DataFrameClient +from influxdb import InfluxDBClient from influxdb.client import InfluxDBClientError from tests.influxdb.misc import get_free_port, is_port_open +from tests import skipIfPYpy, using_pypy + +if not using_pypy: + import pandas as pd + from pandas.util.testing import assert_frame_equal + from influxdb import DataFrameClient THIS_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -97,13 +100,6 @@ def point(serie_name, timestamp=None, tags=None, **fields): } ] -dummy_pointDF = { - ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): - pd.DataFrame( - [[0.64]], columns=['value'], - index=pd.to_datetime(["2009-11-10T23:00:00Z"])) -} - dummy_points = [ # some dummy points dummy_point[0], { @@ -119,17 +115,24 @@ def point(serie_name, timestamp=None, tags=None, **fields): } ] -dummy_pointsDF = { - ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): - pd.DataFrame( - [[0.64]], columns=['value'], - index=pd.to_datetime(["2009-11-10T23:00:00Z"])), - ("memory", (("host", "server01"), ("region", "us-west"))): - pd.DataFrame( - [[33]], columns=['value'], - index=pd.to_datetime(["2009-11-10T23:01:35Z"]) - ) -} +if not using_pypy: + dummy_pointDF = { + ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): + pd.DataFrame( + [[0.64]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:00:00Z"])) + } + dummy_pointsDF = { + ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): + pd.DataFrame( + [[0.64]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:00:00Z"])), + ("memory", (("host", "server01"), ("region", "us-west"))): + pd.DataFrame( + [[33]], columns=['value'], + index=pd.to_datetime(["2009-11-10T23:01:35Z"]) + ) + } dummy_point_without_timestamp = [ @@ -256,9 +259,10 @@ def _setup_influxdb_server(inst): inst.cli = InfluxDBClient('localhost', inst.influxd_inst.webui_port, 'root', '', database='db') - inst.cliDF = DataFrameClient('localhost', - inst.influxd_inst.webui_port, - 'root', '', database='db') + if not using_pypy: + inst.cliDF = DataFrameClient('localhost', + inst.influxd_inst.webui_port, + 'root', '', database='db') def _unsetup_influxdb_server(inst): @@ -439,6 +443,7 @@ def test_write_points(self): """ same as test_write() but with write_points \o/ """ self.assertIs(True, self.cli.write_points(dummy_point)) + @skipIfPYpy def test_write_points_DF(self): """ same as test_write() but with write_points \o/ """ self.assertIs(True, self.cliDF.write_points(dummy_pointDF)) @@ -463,6 +468,7 @@ def test_write_points_check_read(self): {'time': '2009-11-10T23:00:00Z', 'value': 0.64} ) + @skipIfPYpy def test_write_points_check_read_DF(self): """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points_DF() @@ -494,6 +500,7 @@ def test_write_multiple_points_different_series(self): [[{'value': 33, 'time': '2009-11-10T23:01:35Z'}]] ) + @skipIfPYpy def test_write_multiple_points_different_series_DF(self): self.assertIs(True, self.cliDF.write_points(dummy_pointsDF)) time.sleep(1) @@ -642,6 +649,7 @@ def test_get_list_series_empty(self): rsp = self.cli.get_list_series() self.assertEqual([], rsp) + @skipIfPYpy def test_get_list_series_empty_DF(self): rsp = self.cliDF.get_list_series() self.assertEqual({}, rsp) @@ -658,6 +666,7 @@ def test_get_list_series(self): rsp ) + @skipIfPYpy def test_get_list_series_DF(self): self.cli.write_points(dummy_point) rsp = self.cliDF.get_list_series() From f0690f039459a54fc2a112d94a04415c16b90c37 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Thu, 23 Apr 2015 23:26:52 +0100 Subject: [PATCH 149/536] Replace unicode with decoded literals. --- tests/influxdb/influxdb08/client_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 85382984..455ce066 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -336,8 +336,8 @@ def test_query_chunked_unicode(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ - [1415206212980, 10001, u'unicode-ω'], - [1415197271586, 10001, u'more-unicode-ノ'] + [1415206212980, 10001, 'unicode-\xcf\x89'.decode('utf-8')], + [1415197271586, 10001, 'more-unicode-\xcf\x90'.decode('utf-8')] ], 'name': 'foo', 'columns': [ From a69bc78bf9f3bcec1cb27a638b2d8d2b6fe355c8 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Fri, 24 Apr 2015 01:36:24 +0100 Subject: [PATCH 150/536] Fix unicode literals to work on all python versions --- tests/influxdb/influxdb08/client_test.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 455ce066..3439acd2 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -17,6 +17,16 @@ from influxdb.influxdb08 import InfluxDBClient from influxdb.influxdb08.client import session +import sys +if sys.version < '3': + import codecs + + def u(x): + return codecs.unicode_escape_decode(x)[0] +else: + def u(x): + return x + def _build_response_object(status_code=200, content=""): resp = requests.Response() @@ -336,8 +346,8 @@ def test_query_chunked_unicode(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ - [1415206212980, 10001, 'unicode-\xcf\x89'.decode('utf-8')], - [1415197271586, 10001, 'more-unicode-\xcf\x90'.decode('utf-8')] + [1415206212980, 10001, u('unicode-\xcf\x89')], + [1415197271586, 10001, u('more-unicode-\xcf\x90')] ], 'name': 'foo', 'columns': [ From b58aeed864ef1fc0d47c1c0787958f3096cc6811 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 24 Apr 2015 08:39:52 -0400 Subject: [PATCH 151/536] Released 2.1.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 1c319d2e..cff4c122 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.0.2' +__version__ = '2.1.0' From 9d0b4f0c9fda967cc6d11f75ee305caa0719792f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 24 Apr 2015 13:19:27 -0400 Subject: [PATCH 152/536] clean: not anymore used/necessary. was preventing influxdb-python to be importable in python2.6 .. (because of OrderedDict) --- influxdb/client.py | 28 ++-------------------------- 1 file changed, 2 insertions(+), 26 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 99a6abe4..09a035fe 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -2,7 +2,7 @@ """ Python client for InfluxDB """ -from collections import OrderedDict + from functools import wraps import json import socket @@ -180,27 +180,6 @@ def from_DSN(dsn, **kwargs): return InfluxDBClient(**init_args) - # - # By default we keep the "order" of the json responses: - # more clearly: any dict contained in the json response will have - # its key-value items order kept as in the raw answer, thanks to - # `collections.OrderedDict`. - # if one doesn't care in that, then it can simply change its client - # instance 'keep_json_response_order' attribute value (to a falsy one). - # This will then eventually help for performance considerations. - _keep_json_response_order = False - # NB: For "group by" query type : - # This setting is actually necessary in order to have a consistent and - # reproducible rsp format if you "group by" on more than 1 tag. - - @property - def keep_json_response_order(self): - return self._keep_json_response_order - - @keep_json_response_order.setter - def keep_json_response_order(self, new_value): - self._keep_json_response_order = new_value - def switch_database(self, database): """ switch_database() @@ -309,10 +288,7 @@ def query(self, expected_response_code=expected_response_code ) - json_kw = {} - if self.keep_json_response_order: - json_kw.update(object_pairs_hook=OrderedDict) - data = response.json(**json_kw) + data = response.json() return ResultSet(data) From b60e8b70b33f88730228e2cbd095f9ec6914fc28 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Fri, 24 Apr 2015 13:26:13 -0400 Subject: [PATCH 153/536] DataFrameClient query without tags can use measurement name as keys. --- examples/tutorial_pandas.py | 3 +++ influxdb/_dataframe_client.py | 11 +++++++++-- tests/influxdb/client_test_with_server.py | 17 ++++++++++++++--- tests/influxdb/dataframe_client_test.py | 9 ++++++--- 4 files changed, 32 insertions(+), 8 deletions(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 6f483e55..f6a193cf 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -22,6 +22,9 @@ def main(host='localhost', port=8086): print("Write DataFrame") client.write_points({'demo': df}) + print("Write DataFrame with Tags") + client.write_points({('demo', (('k1', 'v1'), ('k2', 'v2'))): df}) + print("Read DataFrame") client.query("select * from demo") diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 65370198..86703f60 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -101,7 +101,10 @@ def _to_dataframe(self, json_result): series = json_result['results'][0]['series'] for s in series: tags = s.get('tags') - key = (s['name'], tuple(tags.items()) if tags else None) + if tags is None: + key = s['name'] + else: + key = (s['name'], tuple(sorted(tags.items()))) df = pd.DataFrame(s['values'], columns=s['columns']) df.time = pd.to_datetime(df.time) df.set_index(['time'], inplace=True) @@ -130,7 +133,11 @@ def _convert_dataframe_to_json(self, key, dataframe): # Convert dtype for json serialization dataframe = dataframe.astype('object') - name, tags = key + if isinstance(key, str): + name = key + tags = None + else: + name, tags = key points = [ {'name': name, 'tags': dict(tags) if tags else {}, diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 5c23f302..5d43d816 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -473,10 +473,21 @@ def test_write_points_check_read_DF(self): """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points_DF() time.sleep(1) # same as test_write_check_read() + rsp = self.cliDF.query('SELECT * FROM cpu_load_short') + assert_frame_equal( + rsp['cpu_load_short'], + dummy_pointDF[ + ('cpu_load_short', + (('host', 'server01'), ('region', 'us-west')))] + ) + # Query with Tags + rsp = self.cliDF.query( + "SELECT * FROM cpu_load_short GROUP BY *") assert_frame_equal( - rsp[('cpu_load_short', None)], + rsp[('cpu_load_short', + (('host', 'server01'), ('region', 'us-west')))], dummy_pointDF[ ('cpu_load_short', (('host', 'server01'), ('region', 'us-west')))] @@ -507,7 +518,7 @@ def test_write_multiple_points_different_series_DF(self): rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( - rsp[('cpu_load_short', None)], + rsp['cpu_load_short'], dummy_pointsDF[ ('cpu_load_short', (('host', 'server01'), ('region', 'us-west'))) @@ -516,7 +527,7 @@ def test_write_multiple_points_different_series_DF(self): rsp = self.cliDF.query('SELECT * FROM memory') assert_frame_equal( - rsp[('memory', None)], + rsp['memory'], dummy_pointsDF[ ('memory', (('host', 'server01'), ('region', 'us-west'))) diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 63f7bb06..743f8bed 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -55,8 +55,11 @@ def test_write_points_from_dataframe(self): "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({("foo", None): dataframe}) + cli.write_points({"foo": dataframe}) + self.assertEqual(json.loads(m.last_request.body), expected) + + cli.write_points({("foo", None): dataframe}) self.assertEqual(json.loads(m.last_request.body), expected) def test_write_points_from_dataframe_in_batches(self): @@ -137,7 +140,7 @@ def test_write_points_from_dataframe_with_period_index(self): "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({("foo", None): dataframe}) + cli.write_points({"foo": dataframe}) self.assertEqual(json.loads(m.last_request.body), expected) @@ -172,7 +175,7 @@ def test_write_points_from_dataframe_with_time_precision(self): } cli = DataFrameClient(database='db') - key = ("foo", None) + key = "foo" cli.write_points({key: dataframe}, time_precision='s') points.update(precision='s') From 56560369c1311a5cbb65d5284042a2e103819cb6 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Fri, 24 Apr 2015 17:38:28 -0400 Subject: [PATCH 154/536] DataFrameClient.query only specialize "SELECT" queries. --- influxdb/_dataframe_client.py | 9 ++++++--- tests/influxdb/dataframe_client_test.py | 17 +++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 86703f60..122d9934 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -76,10 +76,13 @@ def query(self, query, chunked=False, database=None): """ results = super(DataFrameClient, self).query(query, database=database) - if len(results) > 0: - return self._to_dataframe(results.raw) + if query.upper().startswith("SELECT"): + if len(results) > 0: + return self._to_dataframe(results.raw) + else: + return {} else: - return {} + return results def get_list_series(self, database=None): """ diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 743f8bed..830458b2 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -305,6 +305,23 @@ def test_list_series(self): assert_frame_equal(series['cpu'], expected['cpu']) assert_frame_equal(series['network'], expected['network']) + def test_get_list_database(self): + data = {'results': [ + {'series': [ + {'name': 'databases', + 'values': [ + ['new_db_1'], + ['new_db_2']], + 'columns': ['name']}]} + ]} + + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + cli.get_list_database(), + [{'name': 'new_db_1'}, {'name': 'new_db_2'}] + ) + def test_datetime_to_epoch(self): timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') cli = DataFrameClient('host', 8086, 'username', 'password', 'db') From a74224d50969eab1e5ffb6959663682d9858eb30 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sat, 25 Apr 2015 01:53:02 +0100 Subject: [PATCH 155/536] Fix batch writing for influxdb08 client. This fixes issue #102 --- influxdb/influxdb08/client.py | 12 +++---- tests/influxdb/influxdb08/client_test.py | 46 ++++++++++++++++++++---- 2 files changed, 46 insertions(+), 12 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 65b7caac..96ff6102 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -291,11 +291,11 @@ def list_chunks(l, n): yield l[i:i + n] batch_size = kwargs.get('batch_size') - if batch_size: + if batch_size and batch_size > 0: for item in data: name = item.get('name') columns = item.get('columns') - point_list = item.get('points') + point_list = item.get('points', []) for batch in list_chunks(point_list, batch_size): item = [{ @@ -306,10 +306,10 @@ def list_chunks(l, n): self._write_points( data=item, time_precision=time_precision) - - return True - - return self._write_points(data=data, time_precision=time_precision) + return True + else: + return self._write_points(data=data, + time_precision=time_precision) def write_points_with_precision(self, data, time_precision='s'): """ diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 010de673..587a896b 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -186,12 +186,46 @@ def test_write_points_string(self): ) def test_write_points_batch(self): - with _mocked_session('post', 200, self.dummy_points): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.write_points( - data=self.dummy_points, - batch_size=2 - ) is True + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + cli = InfluxDBClient('localhost', 8086, + 'username', 'password', 'db') + cli.write_points(data=self.dummy_points, batch_size=2) + self.assertEqual(1, m.call_count) + + def test_write_points_batch_invalid_size(self): + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + cli = InfluxDBClient('localhost', 8086, + 'username', 'password', 'db') + cli.write_points(data=self.dummy_points, batch_size=-2) + self.assertEqual(1, m.call_count) + + def test_write_points_batch_multiple_series(self): + dummy_points = [ + {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], + ["4", 4, 4.0], ["5", 5, 5.0]], + "name": "foo", + "columns": ["val1", "val2", "val3"]}, + {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], + ["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0], + ["7", 7, 7.0], ["8", 8, 8.0]], + "name": "bar", + "columns": ["val1", "val2", "val3"]}, + ] + expected_last_body = [{'points': [['7', 7, 7.0], ['8', 8, 8.0]], + 'name': 'bar', + 'columns': ['val1', 'val2', 'val3']}] + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + cli = InfluxDBClient('localhost', 8086, + 'username', 'password', 'db') + cli.write_points(data=dummy_points, batch_size=3) + self.assertEqual(m.call_count, 5) + self.assertEqual(expected_last_body, m.request_history[4].json()) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) From c23f3ff3416fb0bcdd945fefc0348403541689d8 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sat, 25 Apr 2015 02:00:49 +0100 Subject: [PATCH 156/536] Refactor write_methods method for InflixDBClient * Implement batch writing. * Update the docstring. --- influxdb/client.py | 42 ++++++++++++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 8 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 99a6abe4..89d72261 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -322,23 +322,49 @@ def write_points(self, database=None, retention_policy=None, tags=None, + batch_size=None, ): """ Write to multiple time series names. - :param points: A list of dicts. + :param points: the list of points to be written in the database + :type points: list of dictionaries, each dictionary represents a point :param time_precision: [Optional, default None] Either 's', 'm', 'ms' or 'u'. + :type time_precision: string :param database: The database to write the points to. Defaults to the client's current db. + :type database: string + :param tags: a set of key-value pairs associated with each point. Both + keys and values must be strings. These are shared tags and will be + merged with point-specific tags. + :type tags: dictionary :param retention_policy: The retention policy for the points. - """ - # TODO: re-implement chunks. - return self._write_points(points=points, - time_precision=time_precision, - database=database, - retention_policy=retention_policy, - tags=tags) + :type retention_policy: string + :param batch_size: [Optional] Value to write the points in batches + instead of all at one time. Useful for when doing data dumps from + one database to another or when doing a massive write operation + :type batch_size: int + """ + + if batch_size and batch_size > 0: + for batch in self._batches(points, batch_size): + self._write_points(points=batch, + time_precision=time_precision, + database=database, + retention_policy=retention_policy, + tags=tags) + return True + else: + return self._write_points(points=points, + time_precision=time_precision, + database=database, + retention_policy=retention_policy, + tags=tags) + + def _batches(self, iterable, size): + for i in xrange(0, len(iterable), size): + yield iterable[i:i + size] def _write_points(self, points, From 540befeb3737a1e3918b01dcadd9766ce3aa91a9 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sat, 25 Apr 2015 02:02:28 +0100 Subject: [PATCH 157/536] Add tests for batched writing. --- tests/influxdb/client_test.py | 31 +++++++++++++++++++---- tests/influxdb/client_test_with_server.py | 26 +++++++++++++++---- 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index b4115fbf..c81c3c30 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -204,12 +204,33 @@ def test_write_points_toplevel_attributes(self): json.loads(m.last_request.body) ) - @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - with _mocked_session(cli, 'post', 200, self.dummy_points): - self.assertTrue(cli.write_points(data=self.dummy_points, - batch_size=2)) + dummy_points = [ + {"name": "cpu_usage", "tags": {"unit": "percent"}, + "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + {"name": "network", "tags": {"direction": "in"}, + "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + {"name": "network", "tags": {"direction": "out"}, + "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + ] + expected_last_body = {"tags": {"host": "server01", + "region": "us-west"}, + "database": "db", + "points": [{"name": "network", + "tags": {"direction": "out"}, + "timestamp": "2009-11-10T23:00:00Z", + "fields": {"value": 12.00}}]} + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write") + cli = InfluxDBClient(database='db') + cli.write_points(points=dummy_points, + database='db', + tags={"host": "server01", + "region": "us-west"}, + batch_size=2) + self.assertEqual(m.call_count, 2) + self.assertEqual(expected_last_body, m.last_request.json()) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 2d1ff44c..7b9a70ee 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -451,12 +451,28 @@ def test_write_multiple_points_different_series(self): [[{'value': 33, 'time': '2009-11-10T23:01:35Z'}]] ) - @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): - self.cli.write_points( - points=dummy_point * 3, - batch_size=2 - ) + dummy_points = [ + {"name": "cpu_usage", "tags": {"unit": "percent"}, + "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + {"name": "network", "tags": {"direction": "in"}, + "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + {"name": "network", "tags": {"direction": "out"}, + "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + ] + self.cli.write_points(points=dummy_points, + tags={"host": "server01", + "region": "us-west"}, + batch_size=2) + time.sleep(5) + net_in = self.cli.query("SELECT value FROM network " + "WHERE direction='in'").raw['results'][0] + net_out = self.cli.query("SELECT value FROM network " + "WHERE direction='out'").raw['results'][0] + cpu = self.cli.query("SELECT value FROM cpu_usage").raw['results'][0] + self.assertIn(123, net_in['series'][0]['values'][0]) + self.assertIn(12, net_out['series'][0]['values'][0]) + self.assertIn(12.34, cpu['series'][0]['values'][0]) def test_write_points_with_precision(self): ''' check that points written with an explicit precision have From 269549e81748253fb6921404ec4ff0ad62439368 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Apr 2015 21:40:51 -0400 Subject: [PATCH 158/536] Added 'env' to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d88a1c42..7720b658 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ debian/python3-influxdb/ docs/build/ .coverage cover +env From 941b5eb45647e03038b50a28f63580a088723349 Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 27 Apr 2015 09:53:35 -0400 Subject: [PATCH 159/536] Updated tutorial_sine_wave.py --- examples/tutorial_sine_wave.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/examples/tutorial_sine_wave.py b/examples/tutorial_sine_wave.py index ea8aee63..49e8973b 100644 --- a/examples/tutorial_sine_wave.py +++ b/examples/tutorial_sine_wave.py @@ -3,6 +3,7 @@ from influxdb import InfluxDBClient import math import datetime +import time USER = 'root' @@ -15,28 +16,34 @@ def main(host='localhost', port=8086): main function to generate the sin wave """ now = datetime.datetime.today() - data = [{ - 'name': "foobar", - 'columns': ["time", "value"], - 'points': [] - }] + points = [] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 - point = [int(now.strftime('%s')) + angle, y] - data[0]['points'].append(point) + + point = { + "name": 'foobar', + "timestamp": int(now.strftime('%s')) + angle, + "fields": { + "value": y + } + } + points.append(point) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) + client.switch_database(DBNAME) #Write points - client.write_points(data) + client.write_points(points) + + time.sleep(3) - query = 'SELECT time, value FROM foobar GROUP BY value, time(1s)' + query = 'SELECT * FROM foobar' print("Queying data: " + query) - result = client.query(query) + result = client.query(query, database=DBNAME) print("Result: {0}".format(result)) """ @@ -46,11 +53,11 @@ def main(host='localhost', port=8086): Then run the following query: - SELECT time, value FROM foobar GROUP BY value, time(1s) + SELECT * from foobar """ print("Delete database: " + DBNAME) - client.delete_database(DBNAME) + client.drop_database(DBNAME) def parse_args(): From cd44d9e324be5614bcbd21bfecb5ab00671aaa0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Mon, 27 Apr 2015 12:59:46 -0400 Subject: [PATCH 160/536] Enh: updated tests to be less strict / more robust. We had spurious/random test failures with travis because there had other warnings generated. (see for instance: https://travis-ci.org/savoirfairelinux/influxdb-python/jobs/59929464) Unfortunately we couldn't reproduce. So this just changed the assert to be less strict: search for the expected message/warning in the rec_warnings list. example of the failure: ====================================================================== FAIL: Tests warning for an invalid bulk size. ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/travis/build/savoirfairelinux/influxdb-python/tests/influxdb/influxdb08/helper_test.py", line 172, in testWarnBulkSizeZero .format(WarnBulkSizeZero, '\n'.join(map(str, w)))) nose.proxy.AssertionError: 5 != 1 : call should have generated one warning.Actual generated warnings: {message : UserWarning('Definition of bulk_size in WarnBulkSizeZero forced to 1, was less than 1.',), category : 'UserWarning', filename : '/home/travis/build/savoirfairelinux/influxdb-python/influxdb/influxdb08/helper.py', lineno : 79, line : None} {message : ResourceWarning('unclosed ',), category : 'ResourceWarning', filename : '/home/travis/build/savoirfairelinux/influxdb-python/.tox/pypy3/lib-python/3/os.py', lineno : 477, line : None} {message : ResourceWarning('unclosed ',), category : 'ResourceWarning', filename : '/home/travis/build/savoirfairelinux/influxdb-python/.tox/pypy3/lib-python/3/os.py', lineno : 477, line : None} {message : ResourceWarning('unclosed ',), category : 'ResourceWarning', filename : '/home/travis/build/savoirfairelinux/influxdb-python/.tox/pypy3/lib-python/3/os.py', lineno : 477, line : None} {message : ResourceWarning('unclosed ',), category : 'ResourceWarning', filename : '/home/travis/build/savoirfairelinux/influxdb-python/.tox/pypy3/lib-python/3/os.py', lineno : 477, line : None} --- tests/influxdb/influxdb08/helper_test.py | 54 ++++++++++++++++-------- 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/tests/influxdb/influxdb08/helper_test.py b/tests/influxdb/influxdb08/helper_test.py index 3f546a99..b0a7ef21 100644 --- a/tests/influxdb/influxdb08/helper_test.py +++ b/tests/influxdb/influxdb08/helper_test.py @@ -158,19 +158,26 @@ class Meta: bulk_size = 0 autocommit = True - with warnings.catch_warnings(record=True) as w: + with warnings.catch_warnings(record=True) as rec_warnings: warnings.simplefilter("always") - try: + # Server defined in the client is invalid, we're testing + # the warning only. + with self.assertRaises(ConnectionError): WarnBulkSizeZero(time=159, server_name='us.east-1') - except ConnectionError: - # Server defined in the client is invalid, we're testing - # the warning only. - pass - self.assertEqual(len(w), 1, - '{} call should have generated one warning.' - .format(WarnBulkSizeZero)) - self.assertIn('forced to 1', str(w[-1].message), - 'Warning message did not contain "forced to 1".') + + self.assertGreaterEqual( + len(rec_warnings), 1, + '{} call should have generated one warning.' + 'Actual generated warnings: {}'.format( + WarnBulkSizeZero, '\n'.join(map(str, rec_warnings)))) + + expected_msg = ( + 'Definition of bulk_size in WarnBulkSizeZero forced to 1, ' + 'was less than 1.') + + self.assertIn(expected_msg, list(w.message.args[0] + for w in rec_warnings), + 'Warning message did not contain "forced to 1".') def testWarnBulkSizeNoEffect(self): """ @@ -184,11 +191,24 @@ class Meta: bulk_size = 5 autocommit = False - with warnings.catch_warnings(record=True) as w: + with warnings.catch_warnings(record=True) as rec_warnings: warnings.simplefilter("always") WarnBulkSizeNoEffect(time=159, server_name='us.east-1') - self.assertEqual(len(w), 1, - '{} call should have generated one warning.' - .format(WarnBulkSizeNoEffect)) - self.assertIn('has no affect', str(w[-1].message), - 'Warning message did not contain "has not affect".') + + self.assertGreaterEqual( + len(rec_warnings), 1, + '{} call should have generated one warning.' + 'Actual generated warnings: {}'.format( + WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings)))) + + expected_msg = ( + 'Definition of bulk_size in WarnBulkSizeNoEffect has no affect ' + 'because autocommit is false.') + + self.assertIn(expected_msg, list(w.message.args[0] + for w in rec_warnings), + 'Warning message did not contain the expected_msg.') + + +if __name__ == '__main__': + unittest.main() From c5081ebfe80e997209e1476eae0cb1c5ba1f4640 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 30 Apr 2015 13:21:54 -0400 Subject: [PATCH 161/536] Construct DataFrame objects from ResultSet in place of from raw JSON. --- influxdb/_dataframe_client.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 122d9934..c4508231 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -78,7 +78,7 @@ def query(self, query, chunked=False, database=None): results = super(DataFrameClient, self).query(query, database=database) if query.upper().startswith("SELECT"): if len(results) > 0: - return self._to_dataframe(results.raw) + return self._to_dataframe(results) else: return {} else: @@ -93,24 +93,22 @@ def get_list_series(self, database=None): .query("SHOW SERIES", database=database) if len(results): return dict( - (s['name'], pd.DataFrame(s['values'], columns=s['columns'])) - for s in results.raw['results'][0]['series'] + (key[0], pd.DataFrame(data)) for key, data in results.items() ) else: return {} - def _to_dataframe(self, json_result): + def _to_dataframe(self, rs): result = {} - series = json_result['results'][0]['series'] - for s in series: - tags = s.get('tags') + for key, data in rs.items(): + name, tags = key if tags is None: - key = s['name'] + key = name else: - key = (s['name'], tuple(sorted(tags.items()))) - df = pd.DataFrame(s['values'], columns=s['columns']) + key = (name, tuple(sorted(tags.items()))) + df = pd.DataFrame(data) df.time = pd.to_datetime(df.time) - df.set_index(['time'], inplace=True) + df.set_index('time', inplace=True) df.index = df.index.tz_localize('UTC') df.index.name = None result[key] = df From 19fe9deb3de44f5987de3cb00af68bc49cc94381 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sun, 3 May 2015 00:12:32 +0100 Subject: [PATCH 162/536] Fix docstrings in client.py. --- influxdb/client.py | 384 +++++++++++++++++++++++++++++---------------- 1 file changed, 246 insertions(+), 138 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index dd74ae1b..35fdad4a 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -25,7 +25,7 @@ class InfluxDBClientError(Exception): - """Raised when an error occurs in the request""" + """Raised when an error occurs in the request.""" def __init__(self, content, code): if isinstance(content, type(b'')): content = content.decode('UTF-8', errors='replace') @@ -36,39 +36,38 @@ def __init__(self, content, code): class InfluxDBServerError(Exception): - """Raised when server error occurs""" + """Raised when a server error occurs.""" def __init__(self, content): super(InfluxDBServerError, self).__init__(content) class InfluxDBClient(object): - - """ - The ``InfluxDBClient`` object holds information necessary to connect - to InfluxDB. Requests can be made to InfluxDB directly through the client. + """The :class:`~.InfluxDBClient` object holds information necessary to + connect to InfluxDB. Requests can be made to InfluxDB directly through + the client. :param host: hostname to connect to InfluxDB, defaults to 'localhost' - :type host: string - :param port: port to connect to InfluxDB, defaults to 'localhost' + :type host: str + :param port: port to connect to InfluxDB, defaults to 8086 :type port: int :param username: user to connect, defaults to 'root' - :type username: string + :type username: str :param password: password of the user, defaults to 'root' - :type password: string - :param database: database name to connect to, defaults is None - :type database: string - :param ssl: use https instead of http to connect to InfluxDB, defaults is + :type password: str + :param database: database name to connect to, defaults to None + :type database: str + :param ssl: use https instead of http to connect to InfluxDB, defaults to False - :type ssl: boolean - :param verify_ssl: verify SSL certificates for HTTPS requests, defaults is + :type ssl: bool + :param verify_ssl: verify SSL certificates for HTTPS requests, defaults to False - :type verify_ssl: boolean + :type verify_ssl: bool :param timeout: number of seconds Requests will wait for your client to establish a connection, defaults to None :type timeout: int - :param use_udp: use UDP to connect to InfluxDB, defaults is False + :param use_udp: use UDP to connect to InfluxDB, defaults to False :type use_udp: int - :param udp_port: UDP port to connect to InfluxDB, defaults is 4444 + :param udp_port: UDP port to connect to InfluxDB, defaults to 4444 :type udp_port: int """ @@ -84,9 +83,7 @@ def __init__(self, use_udp=False, udp_port=4444, ): - """ - Construct a new InfluxDBClient object. - """ + """Construct a new InfluxDBClient object.""" self._host = host self._port = port self._username = username @@ -118,31 +115,32 @@ def __init__(self, @staticmethod def from_DSN(dsn, **kwargs): - """ - Returns an instance of InfluxDBClient from the provided data source - name. Supported schemes are "influxdb", "https+influxdb", - "udp+influxdb". Parameters for the InfluxDBClient constructor may be - also be passed to this function. + """Return an instance of :class:`~.InfluxDBClient` from the provided + data source name. Supported schemes are "influxdb", "https+influxdb" + and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` + constructor may also be passed to this method. + + :param dsn: data source name + :type dsn: string + :param kwargs: additional parameters for `InfluxDBClient` + :type kwargs: dict + :raises ValueError: if the provided DSN has any unexpected values - Examples: - >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ + :Example: + + >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ localhost:8086/databasename', timeout=5) - >>> type(cli) - - >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ + >>> type(cli) + + >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ localhost:8086/databasename', timeout=5, udp_port=159) - >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) - http://localhost:8086 - True 159 + >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) + http://localhost:8086 - True 159 - :param dsn: data source name - :type dsn: string - :param **kwargs: additional parameters for InfluxDBClient. - :type **kwargs: dict - :note: parameters provided in **kwargs may override dsn parameters. - :note: when using "udp+influxdb" the specified port (if any) will be - used for the TCP connection; specify the udp port with the - additional udp_port parameter (cf. examples). - :raise ValueError: if the provided DSN has any unexpected value. + .. note:: parameters provided in `**kwargs` may override dsn parameters + .. note:: when using "udp+influxdb" the specified port (if any) will + be used for the TCP connection; specify the UDP port with the + additional `udp_port` parameter (cf. examples). """ dsn = dsn.lower() @@ -181,34 +179,43 @@ def from_DSN(dsn, **kwargs): return InfluxDBClient(**init_args) def switch_database(self, database): - """ - switch_database() + """Change the client's database. - Change client database. - - :param database: the new database name to switch to - :type database: string + :param database: the name of the database to switch to + :type database: str """ self._database = database def switch_user(self, username, password): - """ - switch_user() - - Change client username. + """Change the client's username. - :param username: the new username to switch to - :type username: string - :param password: the new password to switch to - :type password: string + :param username: the username to switch to + :type username: str + :param password: the password for the username + :type password: str """ self._username = username self._password = password def request(self, url, method='GET', params=None, data=None, expected_response_code=200): - """ - Make a http request to API + """Make a HTTP request to the InfluxDB API. + + :param url: the path of the HTTP request, e.g. write, query, etc. + :type url: str + :param method: the HTTP method for the request, defaults to GET + :type method: str + :param params: additional parameters for the request, defaults to None + :type params: dict + :param data: the data of the request, defaults to None + :type data: str + :param expected_response_code: the expected response code of + the request, defaults to 200 + :type expected_response_code: int + :returns: the response from the request + :rtype: :class:`requests.Response` + :raises InfluxDBClientError: if the response code is not the + same as `expected_response_code` """ url = "{0}/{1}".format(self._baseurl, url) @@ -251,7 +258,18 @@ def request(self, url, method='GET', params=None, data=None, raise InfluxDBClientError(response.content, response.status_code) def write(self, data, params=None, expected_response_code=200): - """ Write to influxdb """ + """Write data to InfluxDB. + + :param data: the data to be written + :type data: dict + :param params: additional parameters for the request, defaults to None + :type params: dict + :param expected_response_code: the expected response code of the write + operation, defaults to 200 + :type expected_response_code: int + :returns: True, if the write operation is successful + :rtype: bool + """ self.request( url="write", method='POST', @@ -266,17 +284,20 @@ def query(self, params={}, expected_response_code=200, database=None): + """Send a query to InfluxDB. + + :param query: the actual query string + :type query: str + :param params: additional parameters for the request, defaults to {} + :type params: dict + :param expected_response_code: the expected status code of response, + defaults to 200 + :type expected_response_code: int + :param database: database to query, defaults to None + :type database: str + :returns: the queried data + :rtype: :class:`~.ResultSet` """ - Query data - - :param params: Additional parameters to be passed to requests. - :param database: Database to query, default to None. - :param expected_response_code: Expected response code. Defaults to 200. - - :rtype : ResultSet - - """ - params['q'] = query params['db'] = database or self._database @@ -300,27 +321,32 @@ def write_points(self, tags=None, batch_size=None, ): - """ - Write to multiple time series names. + """Write to multiple time series names. :param points: the list of points to be written in the database :type points: list of dictionaries, each dictionary represents a point - :param time_precision: [Optional, default None] Either 's', 'm', 'ms' - or 'u'. - :type time_precision: string - :param database: The database to write the points to. Defaults to - the client's current db. - :type database: string + :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None + :type time_precision: str + :param database: the database to write the points to. Defaults to + the client's current database + :type database: str :param tags: a set of key-value pairs associated with each point. Both keys and values must be strings. These are shared tags and will be - merged with point-specific tags. - :type tags: dictionary - :param retention_policy: The retention policy for the points. - :type retention_policy: string - :param batch_size: [Optional] Value to write the points in batches + merged with point-specific tags, defaults to None + :type tags: dict + :param retention_policy: the retention policy for the points. Defaults + to None + :type retention_policy: str + :param batch_size: value to write the points in batches instead of all at one time. Useful for when doing data dumps from - one database to another or when doing a massive write operation + one database to another or when doing a massive write operation, + defaults to None :type batch_size: int + :returns: True, if the operation is successful + :rtype: bool + + .. note:: if no retention policy is specified, the default retention + policy for the database is used """ if batch_size and batch_size > 0: @@ -384,35 +410,56 @@ def _write_points(self, return True def get_list_database(self): - """ - Get the list of databases + """Get the list of databases in InfluxDB. + + :returns: all databases in InfluxDB + :rtype: list of dictionaries + + :Example: + + >>> dbs = client.get_list_database() + >>> dbs + [{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}] """ return list(self.query("SHOW DATABASES")['databases']) def create_database(self, dbname): - """ - Create a new database + """Create a new database in InfluxDB. + + :param dbname: the name of the database to create + :type dbname: str """ self.query("CREATE DATABASE %s" % dbname) def drop_database(self, dbname): - """ - Create a new database - """ - self.query("DROP DATABASE %s" % dbname) + """Drop a database from InfluxDB. - def create_retention_policy( - self, name, duration, - replication, database=None, default=False): + :param dbname: the name of the database to drop + :type dbname: str """ - Create a retention policy + self.query("DROP DATABASE %s" % dbname) - :param duration: The duration. Ex: '1d' - :param replication: The replication. - :param database: The database. Defaults to current database - :param default: (bool) Wether or not to set the policy as default + def create_retention_policy(self, name, duration, replication, + database=None, default=False): + """Create a retention policy for a database. + + :param name: the name of the new retention policy + :type name: str + :param duration: the duration of the new retention policy. + Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported + and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, + respectively. For infinite retention – meaning the data will + never be deleted – use 'INF' for duration. + The minimum retention period is 1 hour. + :type duration: str + :param replication: the replication of the retention policy + :type replication: str + :param database: the database for which the retention policy is + created. Defaults to current client's database + :type database: str + :param default: whether or not to set the policy as default + :type default: bool """ - query_string = \ "CREATE RETENTION POLICY %s ON %s " \ "DURATION %s REPLICATION %s" % \ @@ -424,8 +471,22 @@ def create_retention_policy( self.query(query_string) def get_list_retention_policies(self, database=None): - """ - Get the list of retention policies + """Get the list of retention policies for a database. + + :param database: the name of the database, defaults to the client's + current database + :type database: str + :returns: all retention policies for the database + :rtype: list of dictionaries + + :Example: + + >>> ret_policies = client.get_list_retention_policies('my_db') + >>> ret_policies + [{u'default': True, + u'duration': u'0', + u'name': u'default', + u'replicaN': 1}] """ rsp = self.query( "SHOW RETENTION POLICIES %s" % (database or self._database) @@ -433,8 +494,22 @@ def get_list_retention_policies(self, database=None): return list(rsp['results']) def get_list_series(self, database=None): - """ - Get the list of series + """Get the list of series for a database. + + :param database: the name of the database, defaults to the client's + current database + :type database: str + :returns: all series in the specified database + :rtype: list of dictionaries + + :Example: + + >>> series = client.get_list_series('my_database') + >>> series + [{'name': u'cpu_usage', + 'tags': [{u'_id': 1, + u'host': u'server01', + u'region': u'us-west'}]}] """ rsp = self.query("SHOW SERIES", database=database) series = [] @@ -448,66 +523,91 @@ def get_list_series(self, database=None): return series def get_list_users(self): - """ - Get the list of users + """Get the list of all users in InfluxDB. + + :returns: all users in InfluxDB + :rtype: list of dictionaries + + :Example: + + >>> users = client.get_list_users() + >>> users + [{u'admin': True, u'user': u'user1'}, + {u'admin': False, u'user': u'user2'}, + {u'admin': False, u'user': u'user3'}] """ return list(self.query("SHOW USERS")["results"]) def create_user(self, username, password): - """ - Create a new user + """Create a new user in InfluxDB :param username: the new username to create - :type username: string + :type username: str :param password: the password for the new user - :type password: string + :type password: str """ text = "CREATE USER {} WITH PASSWORD '{}'".format(username, password) self.query(text) def drop_user(self, username): - """ - Drop an user + """Drop an user from InfluxDB. :param username: the username to drop - :type username: string + :type username: str """ text = "DROP USER {}".format(username) self.query(text) def set_user_password(self, username, password): - """ - Change the password of an existing user + """Change the password of an existing user. :param username: the username who's password is being changed - :type username: string + :type username: str :param password: the new password for the user - :type password: string + :type password: str """ text = "SET PASSWORD FOR {} = '{}'".format(username, password) self.query(text) def delete_series(self, name, database=None): + """Delete series from a database. + + :param name: the name of the series to be deleted + :type name: str + :param database: the database from which the series should be + deleted, defaults to client's current database + :type database: str + """ database = database or self._database self.query('DROP SERIES \"%s\"' % name, database=database) def send_packet(self, packet): + """Send an UDP packet. + + :param packet: the packet to be sent + :type packet: dict + """ data = json.dumps(packet) byte = data.encode('utf-8') self.udp_socket.sendto(byte, (self._host, self.udp_port)) class InfluxDBClusterClient(object): - """ - The ``InfluxDBClusterClient`` is the client for connecting to a cluster of - InfluxDB Servers. It basically is a proxy to multiple ``InfluxDBClient``s. - - :param hosts: A list of hosts, where a host should be in format - (address, port) - e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)] - :param shuffle: If true, queries will hit servers evenly(randomly) - :param client_base_class: In order to support different clients, - default to InfluxDBClient + """The :class:`~.InfluxDBClusterClient` is the client for connecting + to a cluster of InfluxDB servers. It's basically a proxy to multiple + InfluxDBClients. + + :param hosts: all hosts to be included in the cluster, each of which + should be in the format (address, port), + e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)]. Defaults to + [('localhost', 8086)] + :type hosts: list of tuples + :param shuffle: whether the queries should hit servers evenly(randomly), + defaults to True + :type shuffle: bool + :param client_base_class: the base class for all clients in the cluster. + This parameter is used to enable the support of different client + types. Defaults to :class:`~.InfluxDBClient` """ def __init__(self, @@ -547,17 +647,25 @@ def __init__(self, @staticmethod def from_DSN(dsn, client_base_class=InfluxDBClient, shuffle=True, **kwargs): - """ - Same as InfluxDBClient.from_DSN, and supports multiple servers. - - Example DSN: - influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db_name - udp+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db_name - https+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db_name - - :param shuffle: If true, queries will hit servers evenly(randomly) - :param client_base_class: In order to support different clients, - default to InfluxDBClient + """Same as :meth:`~.InfluxDBClient.from_DSN`, but supports + multiple servers. + + :param shuffle: whether the queries should hit servers + evenly(randomly), defaults to True + :type shuffle: bool + :param client_base_class: the base class for all clients in the + cluster. This parameter is used to enable the support of + different client types. Defaults to :class:`~.InfluxDBClient` + + :Example: + + >>> cluster = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd\ +@host1:8086,usr:pwd@host2:8086/db_name', timeout=5) + >>> type(cluster) + + >>> cluster.clients + [, + ] """ dsn = dsn.lower() conn_params = urlparse(dsn) From 18f2efdd84bd876c7a949d9aee4fb9fa6e0c93d8 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sun, 3 May 2015 00:13:11 +0100 Subject: [PATCH 163/536] Add InfluxDBClusterClient to the API docs. --- docs/source/api-documentation.rst | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/source/api-documentation.rst b/docs/source/api-documentation.rst index 5449cef5..c6178fed 100644 --- a/docs/source/api-documentation.rst +++ b/docs/source/api-documentation.rst @@ -45,9 +45,19 @@ These clients are initiated in the same way as the :members: :undoc-members: ------------------------ +------------------------------ +:class:`InfluxDBClusterClient` +------------------------------ + + +.. currentmodule:: influxdb.InfluxDBClusterClient +.. autoclass:: influxdb.InfluxDBClusterClient + :members: + :undoc-members: + +------------------------ :class:`DataFrameClient` ------------------------ +------------------------ .. currentmodule:: influxdb.DataFrameClient From 79e4fb2dad3a43e1506c7db27f3b5cd2d750825c Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 3 May 2015 14:51:46 -0400 Subject: [PATCH 164/536] Allow mixed case in DSN (closes #176) --- influxdb/client.py | 2 -- tests/influxdb/client_test.py | 5 +++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index dd74ae1b..927d654d 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -144,7 +144,6 @@ def from_DSN(dsn, **kwargs): additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ - dsn = dsn.lower() init_args = {} conn_params = urlparse(dsn) @@ -559,7 +558,6 @@ def from_DSN(dsn, client_base_class=InfluxDBClient, :param client_base_class: In order to support different clients, default to InfluxDBClient """ - dsn = dsn.lower() conn_params = urlparse(dsn) netlocs = conn_params.netloc.split(',') cluster_client = InfluxDBClusterClient( diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index c81c3c30..00ca9b1f 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -727,3 +727,8 @@ def test_dsn_single_client(self): 'https+influxdb://usr:pwd@host:8086/db', **{'ssl': False}) self.assertEqual('http://host:8086', cli.clients[0]._baseurl) + + def test_dsn_password_caps(self): + cli = InfluxDBClusterClient.from_DSN( + 'https+influxdb://usr:pWd@host:8086/db') + self.assertEqual('pWd', cli.clients[0]._password) From 317497b9f126bac1f8ef787df7d441f473b266ae Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 4 May 2015 08:51:36 -0400 Subject: [PATCH 165/536] Added 'test_dsn_mixed_scheme_case' --- tests/influxdb/client_test.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 00ca9b1f..adeafe96 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -732,3 +732,13 @@ def test_dsn_password_caps(self): cli = InfluxDBClusterClient.from_DSN( 'https+influxdb://usr:pWd@host:8086/db') self.assertEqual('pWd', cli.clients[0]._password) + + def test_dsn_mixed_scheme_case(self): + cli = InfluxDBClusterClient.from_DSN( + 'hTTps+inFLUxdb://usr:pWd@host:8086/db') + self.assertEqual('pWd', cli.clients[0]._password) + self.assertEqual('https://host:8086', cli.clients[0]._baseurl) + + cli = InfluxDBClusterClient.from_DSN( + 'uDP+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + self.assertTrue(cli.clients[0].use_udp) From e683a1c933b2785bdf908f394e48c5d1cf5d91e3 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Mon, 4 May 2015 17:19:58 +0100 Subject: [PATCH 166/536] Add alter_retention_policy method to the client. --- influxdb/client.py | 37 +++++++++++++++ tests/influxdb/client_test.py | 38 ++++++++++++++++ tests/influxdb/client_test_with_server.py | 55 +++++++++++++++++++++++ 3 files changed, 130 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 35fdad4a..e4b9cbc7 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -470,6 +470,43 @@ def create_retention_policy(self, name, duration, replication, self.query(query_string) + def alter_retention_policy(self, name, database=None, + duration=None, replication=None, default=None): + """Mofidy an existing retention policy for a database. + + :param name: the name of the retention policy to modify + :type name: str + :param database: the database for which the retention policy is + modified. Defaults to current client's database + :type database: str + :param duration: the new duration of the existing retention policy. + Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported + and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, + respectively. For infinite retention – meaning the data will + never be deleted – use 'INF' for duration. + The minimum retention period is 1 hour. + :type duration: str + :param replication: the new replication of the existing + retention policy + :type replication: str + :param default: whether or not to set the modified policy as default + :type default: bool + + .. note:: at least one of duration, replication, or default flag + should be set. Otherwise the operation will fail. + """ + query_string = ( + "ALTER RETENTION POLICY {} ON {}" + ).format(name, database or self._database) + if duration: + query_string += " DURATION {}".format(duration) + if replication: + query_string += " REPLICATION {}".format(replication) + if default is True: + query_string += " DEFAULT" + + self.query(query_string) + def get_list_retention_policies(self, database=None): """Get the list of retention policies for a database. diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index c81c3c30..266bdd24 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -490,6 +490,44 @@ def test_create_retention_policy(self): 'db duration 1d replication 4' ) + def test_alter_retention_policy(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + # Test alter duration + self.cli.alter_retention_policy('somename', 'db', + duration='4d') + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy somename on db duration 4d' + ) + # Test alter replication + self.cli.alter_retention_policy('somename', 'db', + replication=4) + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy somename on db replication 4' + ) + + # Test alter default + self.cli.alter_retention_policy('somename', 'db', + default=True) + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy somename on db default' + ) + + @raises(Exception) + def test_alter_retention_policy_invalid(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + self.cli.alter_retention_policy('somename', 'db') + def test_get_list_retention_policies(self): example_response = \ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 7b9a70ee..997e7b06 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -657,6 +657,61 @@ def test_create_retention_policy(self): rsp ) + def test_alter_retention_policy(self): + self.cli.create_retention_policy('somename', '1d', 1) + + # Test alter duration + self.cli.alter_retention_policy('somename', 'db', + duration='4d') + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [{'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}, + {'duration': '96h0m0s', 'default': False, + 'replicaN': 1, 'name': 'somename'}], + rsp + ) + + # Test alter replication + self.cli.alter_retention_policy('somename', 'db', + replication=4) + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [{'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}, + {'duration': '96h0m0s', 'default': False, + 'replicaN': 4, 'name': 'somename'}], + rsp + ) + + # Test alter default + self.cli.alter_retention_policy('somename', 'db', + default=True) + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [{'duration': '0', 'default': False, + 'replicaN': 1, 'name': 'default'}, + {'duration': '96h0m0s', 'default': True, + 'replicaN': 4, 'name': 'somename'}], + rsp + ) + + def test_alter_retention_policy_invalid(self): + self.cli.create_retention_policy('somename', '1d', 1) + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.alter_retention_policy('somename', 'db') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ', + ctx.exception.content) + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [{'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}, + {'duration': '24h0m0s', 'default': False, + 'replicaN': 1, 'name': 'somename'}], + rsp + ) + def test_issue_143(self): pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ From 352071492a0ff518e6f000963deb121071aca651 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Mon, 4 May 2015 18:45:54 +0100 Subject: [PATCH 167/536] Add grant_admin_privileges method to the client. --- influxdb/client.py | 12 ++++++++++++ tests/influxdb/client_test.py | 22 ++++++++++++++++++++++ tests/influxdb/client_test_with_server.py | 15 +++++++++++++++ 3 files changed, 49 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index e4b9cbc7..15562920 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -618,6 +618,18 @@ def delete_series(self, name, database=None): database = database or self._database self.query('DROP SERIES \"%s\"' % name, database=database) + def grant_admin_privileges(self, username): + """Grant cluster administration privileges to an user. + + :param username: the username to grant privileges to + :type username: str + + .. note:: Only a cluster administrator can create/ drop databases + and manage users. + """ + text = "GRANT ALL PRIVILEGES TO {}".format(username) + self.query(text) + def send_packet(self, packet): """Send an UDP packet. diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 266bdd24..5f8a0528 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -624,6 +624,28 @@ def test_get_list_users_empty(self): self.assertListEqual(self.cli.get_list_users(), []) + def test_grant_admin_privileges(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.grant_admin_privileges('test') + + self.assertEqual( + m.last_request.qs['q'][0], + 'grant all privileges to test' + ) + + @raises(Exception) + def test_grant_admin_privileges_invalid(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + self.cli.grant_admin_privileges('') + class FakeClient(InfluxDBClient): fail = False diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 997e7b06..41adee55 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -376,6 +376,21 @@ def test_drop_user_invalid(self): 'found invalid, expected', ctx.exception.content) + def test_grant_admin_privileges(self): + self.cli.create_user('test', 'test') + self.assertEqual([{'user': 'test', 'admin': False}], + self.cli.get_list_users()) + self.cli.grant_admin_privileges('test') + self.assertEqual([{'user': 'test', 'admin': True}], + self.cli.get_list_users()) + + def test_grant_admin_privileges_invalid(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.grant_admin_privileges('') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ', + ctx.exception.content) + ############################################################################ From a4072b10706e4973c9563087d4dedc0628525c4b Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Mon, 4 May 2015 19:02:35 +0100 Subject: [PATCH 168/536] Add revoke_admin_privileges to the client. --- influxdb/client.py | 12 ++++++++++++ tests/influxdb/client_test.py | 22 ++++++++++++++++++++++ tests/influxdb/client_test_with_server.py | 16 ++++++++++++++++ 3 files changed, 50 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 15562920..cd53a3e9 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -630,6 +630,18 @@ def grant_admin_privileges(self, username): text = "GRANT ALL PRIVILEGES TO {}".format(username) self.query(text) + def revoke_admin_privileges(self, username): + """Revoke cluster administration privileges from an user. + + :param username: the username to revoke privileges from + :type username: str + + .. note:: Only a cluster administrator can create/ drop databases + and manage users. + """ + text = "REVOKE ALL PRIVILEGES FROM {}".format(username) + self.query(text) + def send_packet(self, packet): """Send an UDP packet. diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 5f8a0528..7720d1ed 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -646,6 +646,28 @@ def test_grant_admin_privileges_invalid(self): with _mocked_session(cli, 'get', 400): self.cli.grant_admin_privileges('') + def test_revoke_admin_privileges(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.revoke_admin_privileges('test') + + self.assertEqual( + m.last_request.qs['q'][0], + 'revoke all privileges from test' + ) + + @raises(Exception) + def test_revoke_admin_privileges_invalid(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + self.cli.revoke_admin_privileges('') + class FakeClient(InfluxDBClient): fail = False diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 41adee55..d6bf4282 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -391,6 +391,22 @@ def test_grant_admin_privileges_invalid(self): self.assertIn('{"error":"error parsing query: ', ctx.exception.content) + def test_revoke_admin_privileges(self): + self.cli.create_user('test', 'test') + self.cli.grant_admin_privileges('test') + self.assertEqual([{'user': 'test', 'admin': True}], + self.cli.get_list_users()) + self.cli.revoke_admin_privileges('test') + self.assertEqual([{'user': 'test', 'admin': False}], + self.cli.get_list_users()) + + def test_revoke_admin_privileges_invalid(self): + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.revoke_admin_privileges('') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ', + ctx.exception.content) + ############################################################################ From 36e50eacf3e3ad7daa83e9d9c935fe3b23d0fdf7 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Mon, 4 May 2015 20:42:07 +0100 Subject: [PATCH 169/536] Add grant_priviliege method to the client. --- influxdb/client.py | 16 ++++++++++++++++ tests/influxdb/client_test.py | 22 ++++++++++++++++++++++ tests/influxdb/client_test_with_server.py | 15 +++++++++++++++ 3 files changed, 53 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index cd53a3e9..3efa1b49 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -642,6 +642,22 @@ def revoke_admin_privileges(self, username): text = "REVOKE ALL PRIVILEGES FROM {}".format(username) self.query(text) + def grant_privilege(self, privilege, database, username): + """Grant a privilege on a database to an user. + + :param privilege: the privilege to grant, one of 'read', 'write' + or 'all'. The string is case-insensitive + :type privilege: str + :param database: the database to grant the privilege on + :type database: str + :param username: the username to grant the privilege to + :type username: str + """ + text = "GRANT {} ON {} TO {}".format(privilege, + database, + username) + self.query(text) + def send_packet(self, packet): """Send an UDP packet. diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 7720d1ed..1b368c2d 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -668,6 +668,28 @@ def test_revoke_admin_privileges_invalid(self): with _mocked_session(cli, 'get', 400): self.cli.revoke_admin_privileges('') + def test_grant_privilege(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.grant_privilege('read', 'testdb', 'test') + + self.assertEqual( + m.last_request.qs['q'][0], + 'grant read on testdb to test' + ) + + @raises(Exception) + def test_grant_privilege_invalid(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + self.cli.grant_privilege('', 'testdb', 'test') + class FakeClient(InfluxDBClient): fail = False diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index d6bf4282..208f265d 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -407,6 +407,21 @@ def test_revoke_admin_privileges_invalid(self): self.assertIn('{"error":"error parsing query: ', ctx.exception.content) + def test_grant_privilege(self): + self.cli.create_user('test', 'test') + self.cli.create_database('testdb') + self.cli.grant_privilege('all', 'testdb', 'test') + # TODO: when supported by InfluxDB, check if privileges are granted + + def test_grant_privilege_invalid(self): + self.cli.create_user('test', 'test') + self.cli.create_database('testdb') + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.grant_privilege('', 'testdb', 'test') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ', + ctx.exception.content) + ############################################################################ From 731c4e0730118d5f131c4b1918b50c7ddbd18a8b Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Mon, 4 May 2015 20:42:28 +0100 Subject: [PATCH 170/536] Add revoke_privilege method to the client. --- influxdb/client.py | 16 ++++++++++++++++ tests/influxdb/client_test.py | 22 ++++++++++++++++++++++ tests/influxdb/client_test_with_server.py | 15 +++++++++++++++ 3 files changed, 53 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 3efa1b49..a31373ed 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -658,6 +658,22 @@ def grant_privilege(self, privilege, database, username): username) self.query(text) + def revoke_privilege(self, privilege, database, username): + """Revoke a privilege on a database from an user. + + :param privilege: the privilege to revoke, one of 'read', 'write' + or 'all'. The string is case-insensitive + :type privilege: str + :param database: the database to revoke the privilege on + :type database: str + :param username: the username to revoke the privilege from + :type username: str + """ + text = "REVOKE {} ON {} FROM {}".format(privilege, + database, + username) + self.query(text) + def send_packet(self, packet): """Send an UDP packet. diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 1b368c2d..04342ac2 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -690,6 +690,28 @@ def test_grant_privilege_invalid(self): with _mocked_session(cli, 'get', 400): self.cli.grant_privilege('', 'testdb', 'test') + def test_revoke_privilege(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.revoke_privilege('read', 'testdb', 'test') + + self.assertEqual( + m.last_request.qs['q'][0], + 'revoke read on testdb from test' + ) + + @raises(Exception) + def test_revoke_privilege_invalid(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + self.cli.revoke_privilege('', 'testdb', 'test') + class FakeClient(InfluxDBClient): fail = False diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 208f265d..8cf7c8d4 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -422,6 +422,21 @@ def test_grant_privilege_invalid(self): self.assertIn('{"error":"error parsing query: ', ctx.exception.content) + def test_revoke_privilege(self): + self.cli.create_user('test', 'test') + self.cli.create_database('testdb') + self.cli.revoke_privilege('all', 'testdb', 'test') + # TODO: when supported by InfluxDB, check if privileges are revoked + + def test_revoke_privilege_invalid(self): + self.cli.create_user('test', 'test') + self.cli.create_database('testdb') + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.revoke_privilege('', 'testdb', 'test') + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: ', + ctx.exception.content) + ############################################################################ From ead7d2343a623f9cfd2b2151a4180a0836b2fcae Mon Sep 17 00:00:00 2001 From: Kurt Spindler Date: Mon, 4 May 2015 22:19:24 -0700 Subject: [PATCH 171/536] improve docs for 0.8 --- influxdb/influxdb08/client.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index a9acf624..5eb0e03f 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -273,9 +273,23 @@ def write(self, data): def write_points(self, data, time_precision='s', *args, **kwargs): """ - Write to multiple time series names. - - :param data: A list of dicts. + Write to multiple time series names. An example data blob is: + + data = [ + { + "points": [ + [ + 12 + ] + ], + "name": "cpu_load_short", + "columns": [ + "value" + ] + } + ] + + :param data: A list of dicts in InfluxDB 0.8.x data format. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param batch_size: [Optional] Value to write the points in batches From 62f74260b39ff50c5b90cc35bdd9b63c16a88cca Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Tue, 5 May 2015 16:13:32 -0400 Subject: [PATCH 172/536] Changed `DataFrameClient.write_points` parameters. --- influxdb/_dataframe_client.py | 57 +++++++++++------------ tests/influxdb/client_test_with_server.py | 55 ++++++++++++---------- tests/influxdb/dataframe_client_test.py | 23 +++++---- 3 files changed, 69 insertions(+), 66 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index c4508231..c36af11c 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -30,41 +30,43 @@ class DataFrameClient(InfluxDBClient): EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') - def write_points(self, data, time_precision=None, database=None, - retention_policy=None, tags=None, batch_size=None): + def write_points(self, dataframe, measurement, tags=None, + time_precision=None, database=None, retention_policy=None, + batch_size=None): """ Write to multiple time series names. - :param data: A dictionary mapping series to pandas DataFrames + :param dataframe: data points in a DataFrame + :param measurement: name of measurement + :param tags: dictionary of tags, with string key-values :param time_precision: [Optional, default 's'] Either 's', 'ms', 'u' or 'n'. :param batch_size: [Optional] Value to write the points in batches instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation :type batch_size: int - """ + """ if batch_size: - for key, data_frame in data.items(): - number_batches = int(math.ceil( - len(data_frame) / float(batch_size))) - for batch in range(number_batches): - start_index = batch * batch_size - end_index = (batch + 1) * batch_size - data = self._convert_dataframe_to_json( - key=key, - dataframe=data_frame.ix[start_index:end_index].copy(), - ) - super(DataFrameClient, self).write_points( - data, time_precision, database, retention_policy, tags) - return True - else: - for key, data_frame in data.items(): - data = self._convert_dataframe_to_json( - key=key, dataframe=data_frame, + number_batches = int(math.ceil( + len(dataframe) / float(batch_size))) + for batch in range(number_batches): + start_index = batch * batch_size + end_index = (batch + 1) * batch_size + points = self._convert_dataframe_to_json( + dataframe.ix[start_index:end_index].copy(), + measurement, + tags ) super(DataFrameClient, self).write_points( - data, time_precision, database, retention_policy, tags) + points, time_precision, database, retention_policy) + return True + else: + points = self._convert_dataframe_to_json( + dataframe, measurement, tags + ) + super(DataFrameClient, self).write_points( + points, time_precision, database, retention_policy) return True def query(self, query, chunked=False, database=None): @@ -114,7 +116,7 @@ def _to_dataframe(self, rs): result[key] = df return result - def _convert_dataframe_to_json(self, key, dataframe): + def _convert_dataframe_to_json(self, dataframe, measurement, tags=None): if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {}.' @@ -134,14 +136,9 @@ def _convert_dataframe_to_json(self, key, dataframe): # Convert dtype for json serialization dataframe = dataframe.astype('object') - if isinstance(key, str): - name = key - tags = None - else: - name, tags = key points = [ - {'name': name, - 'tags': dict(tags) if tags else {}, + {'name': measurement, + 'tags': tags if tags else {}, 'fields': rec, 'timestamp': ts.isoformat() } diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index f3334c0e..3ad30e4e 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -117,22 +117,27 @@ def point(serie_name, timestamp=None, tags=None, **fields): if not using_pypy: dummy_pointDF = { - ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): - pd.DataFrame( + "measurement": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "dataframe": pd.DataFrame( [[0.64]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) } - dummy_pointsDF = { - ("cpu_load_short", (("host", "server01"), ("region", "us-west"))): - pd.DataFrame( + dummy_pointsDF = [{ + "measurement": "cpu_load_short", + "tags": {"host": "server01", "region": "us-west"}, + "dataframe": pd.DataFrame( [[0.64]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])), - ("memory", (("host", "server01"), ("region", "us-west"))): - pd.DataFrame( + }, { + "measurement": "memory", + "tags": {"host": "server01", "region": "us-west"}, + "dataframe": pd.DataFrame( [[33]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:01:35Z"]) ) - } + }] dummy_point_without_timestamp = [ @@ -507,7 +512,14 @@ def test_write_points(self): @skipIfPYpy def test_write_points_DF(self): """ same as test_write() but with write_points \o/ """ - self.assertIs(True, self.cliDF.write_points(dummy_pointDF)) + self.assertIs( + True, + self.cliDF.write_points( + dummy_pointDF['dataframe'], + dummy_pointDF['measurement'], + dummy_pointDF['tags'] + ) + ) def test_write_points_check_read(self): """ same as test_write_check_read() but with write_points \o/ """ @@ -538,9 +550,7 @@ def test_write_points_check_read_DF(self): rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], - dummy_pointDF[ - ('cpu_load_short', - (('host', 'server01'), ('region', 'us-west')))] + dummy_pointDF['dataframe'] ) # Query with Tags @@ -549,9 +559,7 @@ def test_write_points_check_read_DF(self): assert_frame_equal( rsp[('cpu_load_short', (('host', 'server01'), ('region', 'us-west')))], - dummy_pointDF[ - ('cpu_load_short', - (('host', 'server01'), ('region', 'us-west')))] + dummy_pointDF['dataframe'] ) def test_write_multiple_points_different_series(self): @@ -574,25 +582,24 @@ def test_write_multiple_points_different_series(self): @skipIfPYpy def test_write_multiple_points_different_series_DF(self): - self.assertIs(True, self.cliDF.write_points(dummy_pointsDF)) + for i in range(2): + self.assertIs( + True, self.cliDF.write_points( + dummy_pointsDF[i]['dataframe'], + dummy_pointsDF[i]['measurement'], + dummy_pointsDF[i]['tags'])) time.sleep(1) rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], - dummy_pointsDF[ - ('cpu_load_short', (('host', 'server01'), - ('region', 'us-west'))) - ] + dummy_pointsDF[0]['dataframe'] ) rsp = self.cliDF.query('SELECT * FROM memory') assert_frame_equal( rsp['memory'], - dummy_pointsDF[ - ('memory', (('host', 'server01'), - ('region', 'us-west'))) - ] + dummy_pointsDF[1]['dataframe'] ) def test_write_points_batch(self): diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 830458b2..be8b4b16 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -56,10 +56,10 @@ def test_write_points_from_dataframe(self): cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points(dataframe, 'foo') self.assertEqual(json.loads(m.last_request.body), expected) - cli.write_points({("foo", None): dataframe}) + cli.write_points(dataframe, 'foo', tags=None) self.assertEqual(json.loads(m.last_request.body), expected) def test_write_points_from_dataframe_in_batches(self): @@ -73,8 +73,7 @@ def test_write_points_from_dataframe_in_batches(self): "http://localhost:8086/write") cli = DataFrameClient(database='db') - assert cli.write_points({("foo", None): dataframe}, - batch_size=1) is True + assert cli.write_points(dataframe, "foo", batch_size=1) is True def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -106,7 +105,7 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({("foo", (('hello', 'there'),)): dataframe}) + cli.write_points(dataframe, "foo", {"hello": "there"}) self.assertEqual(json.loads(m.last_request.body), expected) @@ -140,7 +139,7 @@ def test_write_points_from_dataframe_with_period_index(self): "http://localhost:8086/write") cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points(dataframe, "foo") self.assertEqual(json.loads(m.last_request.body), expected) @@ -175,17 +174,17 @@ def test_write_points_from_dataframe_with_time_precision(self): } cli = DataFrameClient(database='db') - key = "foo" + measurement = "foo" - cli.write_points({key: dataframe}, time_precision='s') + cli.write_points(dataframe, measurement, time_precision='s') points.update(precision='s') self.assertEqual(json.loads(m.last_request.body), points) - cli.write_points({key: dataframe}, time_precision='m') + cli.write_points(dataframe, measurement, time_precision='m') points.update(precision='m') self.assertEqual(json.loads(m.last_request.body), points) - cli.write_points({key: dataframe}, time_precision='u') + cli.write_points(dataframe, measurement, time_precision='u') points.update(precision='u') self.assertEqual(json.loads(m.last_request.body), points) @@ -200,7 +199,7 @@ def test_write_points_from_dataframe_fails_without_time_index(self): "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points(dataframe, "foo") @raises(TypeError) def test_write_points_from_dataframe_fails_with_series(self): @@ -213,7 +212,7 @@ def test_write_points_from_dataframe_fails_with_series(self): "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') - cli.write_points({"foo": dataframe}) + cli.write_points(dataframe, "foo") def test_query_into_dataframe(self): data = { From 4934fd1312bc9b9815809989fdc1dc14a8741550 Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 5 May 2015 16:32:22 -0400 Subject: [PATCH 173/536] Released 2.2.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index cff4c122..06df5859 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.1.0' +__version__ = '2.2.0' From aa9aace783c7c353a742f09e22321f671604c99b Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Tue, 5 May 2015 21:56:08 -0400 Subject: [PATCH 174/536] Update DataFrameClient example. --- examples/tutorial_pandas.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index f6a193cf..c9a09fde 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -20,10 +20,10 @@ def main(host='localhost', port=8086): client.create_database(dbname) print("Write DataFrame") - client.write_points({'demo': df}) + client.write_points(df, 'demo') print("Write DataFrame with Tags") - client.write_points({('demo', (('k1', 'v1'), ('k2', 'v2'))): df}) + client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'}) print("Read DataFrame") client.query("select * from demo") From e71a674319db41b7865078113bb6e026180ec85c Mon Sep 17 00:00:00 2001 From: "weiyoung.zhou" Date: Wed, 13 May 2015 19:10:54 +0800 Subject: [PATCH 175/536] for write-204-status --- influxdb/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 8cf2dcca..2d2edbff 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -256,7 +256,7 @@ def request(self, url, method='GET', params=None, data=None, else: raise InfluxDBClientError(response.content, response.status_code) - def write(self, data, params=None, expected_response_code=200): + def write(self, data, params=None, expected_response_code=204): """Write data to InfluxDB. :param data: the data to be written @@ -264,7 +264,7 @@ def write(self, data, params=None, expected_response_code=200): :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write - operation, defaults to 200 + operation, defaults to 204 :type expected_response_code: int :returns: True, if the write operation is successful :rtype: bool @@ -403,7 +403,7 @@ def _write_points(self, else: self.write( data=data, - expected_response_code=200 + expected_response_code=204 ) return True From 5d5661aec6c69645045c8a356ed0992cc5fc3784 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 13 May 2015 08:50:30 -0400 Subject: [PATCH 176/536] Updated to influxdb_0.9.0-rc30 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 3e26eaf9..d724a7c6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls - - wget http://get.influxdb.org/influxdb_0.9.0-rc26_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc26_amd64.deb + - wget http://get.influxdb.org/influxdb_0.9.0-rc30_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc30_amd64.deb script: - travis_wait tox -e $TOX_ENV after_success: From 47c6fbce07f1917fecd279c08f8f49e238d5c4a1 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 13 May 2015 09:30:17 -0400 Subject: [PATCH 177/536] Adapted tests to rc30 --- influxdb/_dataframe_client.py | 2 +- tests/influxdb/client_test.py | 17 +++++++---- tests/influxdb/client_test_with_server.py | 26 ++++++++-------- tests/influxdb/dataframe_client_test.py | 37 ++++++++++++++--------- 4 files changed, 47 insertions(+), 35 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index c36af11c..48ecdf40 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -140,7 +140,7 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None): {'name': measurement, 'tags': tags if tags else {}, 'fields': rec, - 'timestamp': ts.isoformat() + 'time': ts.isoformat() } for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))] return points diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index a6b3fe18..05cde546 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -137,7 +137,8 @@ def test_write(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write" + "http://localhost:8086/write", + status_code=204 ) cli = InfluxDBClient(database='db') cli.write( @@ -165,7 +166,8 @@ def test_write_points(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write" + "http://localhost:8086/write", + status_code=204 ) cli = InfluxDBClient(database='db') @@ -184,7 +186,8 @@ def test_write_points_toplevel_attributes(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write" + "http://localhost:8086/write", + status_code=204 ) cli = InfluxDBClient(database='db') @@ -222,7 +225,8 @@ def test_write_points_batch(self): "fields": {"value": 12.00}}]} with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write") + "http://localhost:8086/write", + status_code=204) cli = InfluxDBClient(database='db') cli.write_points(points=dummy_points, database='db', @@ -278,7 +282,8 @@ def test_write_points_with_precision(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write" + "http://localhost:8086/write", + status_code=204 ) cli = InfluxDBClient(database='db') @@ -559,7 +564,7 @@ def connection_error(self, *args, **kwargs): raise requests.exceptions.ConnectionError else: r = requests.Response() - r.status_code = 200 + r.status_code = 204 return r mock_request.side_effect = CustomMock().connection_error diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 3ad30e4e..5d25b914 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -79,7 +79,7 @@ def point(serie_name, timestamp=None, tags=None, **fields): res = {'name': serie_name} if timestamp: - res['timestamp'] = timestamp + res['time'] = timestamp if tags: res['tags'] = tags res['fields'] = fields @@ -93,7 +93,7 @@ def point(serie_name, timestamp=None, tags=None, **fields): "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } @@ -108,7 +108,7 @@ def point(serie_name, timestamp=None, tags=None, **fields): "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T23:01:35Z", + "time": "2009-11-10T23:01:35Z", "fields": { "value": 33 } @@ -331,6 +331,7 @@ def test_create_database(self): [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) + @unittest.skip("Broken as of 0.9.0-rc30") def test_create_database_fails(self): self.assertIsNone(self.cli.create_database('new_db')) with self.assertRaises(InfluxDBClientError) as ctx: @@ -344,6 +345,7 @@ def test_drop_database(self): self.assertIsNone(self.cli.drop_database('new_db_1')) self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) + @unittest.skip("Broken as of 0.9.0-rc30") def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_database('db') @@ -351,6 +353,7 @@ def test_drop_database_fails(self): self.assertIn('{"results":[{"error":"database not found: db', ctx.exception.content) + @unittest.skip("Broken as of 0.9.0-rc30") def test_query_fail(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.query('select column_one from foo') @@ -396,6 +399,7 @@ def test_drop_user(self): users = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(users, []) + @unittest.skip("Broken as of 0.9.0-rc30") def test_drop_user_nonexisting(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_user('test') @@ -506,12 +510,10 @@ def test_write_check_read(self): ) def test_write_points(self): - """ same as test_write() but with write_points \o/ """ self.assertIs(True, self.cli.write_points(dummy_point)) @skipIfPYpy def test_write_points_DF(self): - """ same as test_write() but with write_points \o/ """ self.assertIs( True, self.cliDF.write_points( @@ -522,7 +524,6 @@ def test_write_points_DF(self): ) def test_write_points_check_read(self): - """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points() time.sleep(1) # same as test_write_check_read() rsp = self.cli.query('SELECT * FROM cpu_load_short') @@ -543,7 +544,6 @@ def test_write_points_check_read(self): @skipIfPYpy def test_write_points_check_read_DF(self): - """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points_DF() time.sleep(1) # same as test_write_check_read() @@ -605,11 +605,11 @@ def test_write_multiple_points_different_series_DF(self): def test_write_points_batch(self): dummy_points = [ {"name": "cpu_usage", "tags": {"unit": "percent"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, {"name": "network", "tags": {"direction": "in"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, {"name": "network", "tags": {"direction": "out"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} ] self.cli.write_points(points=dummy_points, tags={"host": "server01", @@ -626,9 +626,9 @@ def test_write_points_batch(self): self.assertIn(12.34, cpu['series'][0]['values'][0]) def test_write_points_with_precision(self): - ''' check that points written with an explicit precision have + """ check that points written with an explicit precision have actually that precision used. - ''' + """ # for that we'll check that - for each precision - the actual 'time' # value returned by a select has the correct regex format.. # n : u'2015-03-20T15:23:36.615654966Z' @@ -646,7 +646,7 @@ def test_write_points_with_precision(self): "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T12:34:56.123456789Z", + "time": "2009-11-10T12:34:56.123456789Z", "fields": { "value": 0.64 } diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index be8b4b16..5b517320 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -34,14 +34,14 @@ def test_write_points_from_dataframe(self): expected = { 'database': 'db', 'points': [ - {'timestamp': '1970-01-01T00:00:00+00:00', + {'time': '1970-01-01T00:00:00+00:00', 'fields': { 'column_two': 1, 'column_three': 1.0, 'column_one': '1'}, 'tags': {}, 'name': 'foo'}, - {'timestamp': '1970-01-01T01:00:00+00:00', + {'time': '1970-01-01T01:00:00+00:00', 'fields': { 'column_two': 2, 'column_three': 2.0, @@ -52,7 +52,8 @@ def test_write_points_from_dataframe(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write") + "http://localhost:8086/write", + status_code=204) cli = DataFrameClient(database='db') @@ -70,7 +71,8 @@ def test_write_points_from_dataframe_in_batches(self): "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write") + "http://localhost:8086/write", + status_code=204) cli = DataFrameClient(database='db') assert cli.write_points(dataframe, "foo", batch_size=1) is True @@ -89,20 +91,21 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): '1': 1, '2': 1.0}, 'tags': {'hello': 'there'}, - 'timestamp': '1970-01-01T00:00:00+00:00', + 'time': '1970-01-01T00:00:00+00:00', 'name': 'foo'}, {'fields': { '0': '2', '1': 2, '2': 2.0}, 'tags': {'hello': 'there'}, - 'timestamp': '1970-01-01T01:00:00+00:00', + 'time': '1970-01-01T01:00:00+00:00', 'name': 'foo'}], } with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write") + "http://localhost:8086/write", + status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo", {"hello": "there"}) @@ -123,20 +126,21 @@ def test_write_points_from_dataframe_with_period_index(self): 'column_one': '1', 'column_two': 1, 'column_three': 1.0}, - 'timestamp': '1970-01-01T00:00:00+00:00'}, + 'time': '1970-01-01T00:00:00+00:00'}, {'name': 'foo', 'tags': {}, 'fields': { 'column_one': '2', 'column_two': 2, 'column_three': 2.0}, - 'timestamp': '1970-01-02T00:00:00+00:00'}], + 'time': '1970-01-02T00:00:00+00:00'}], 'database': 'db', } with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write") + "http://localhost:8086/write", + status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") @@ -152,19 +156,20 @@ def test_write_points_from_dataframe_with_time_precision(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write") + "http://localhost:8086/write", + status_code=204) points = { 'database': 'db', 'points': [ - {'timestamp': '1970-01-01T00:00:00+00:00', + {'time': '1970-01-01T00:00:00+00:00', 'fields': { 'column_one': '1', 'column_three': 1.0, 'column_two': 1}, 'tags': {}, 'name': 'foo'}, - {'timestamp': '1970-01-01T01:00:00+00:00', + {'time': '1970-01-01T01:00:00+00:00', 'fields': { 'column_one': '2', 'column_three': 2.0, @@ -196,7 +201,8 @@ def test_write_points_from_dataframe_fails_without_time_index(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/db/db/series", + status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") @@ -209,7 +215,8 @@ def test_write_points_from_dataframe_fails_with_series(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/db/db/series") + "http://localhost:8086/db/db/series", + status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") From 78b840139d902ec83688a1aa0768c93170256c1b Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 13 May 2015 12:18:05 -0400 Subject: [PATCH 178/536] Released v2.3.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 06df5859..86c78d5c 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.2.0' +__version__ = '2.3.0' From acc3e5d1a7f9928279f56ba3730a79ba003af9ae Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 21 May 2015 09:11:17 -0400 Subject: [PATCH 179/536] Fixed delete_series (Closes #187) --- influxdb/client.py | 8 ++++---- tests/influxdb/client_test_with_server.py | 15 ++++++++++++++- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 2d2edbff..007115fc 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -605,17 +605,17 @@ def set_user_password(self, username, password): text = "SET PASSWORD FOR {} = '{}'".format(username, password) self.query(text) - def delete_series(self, name, database=None): + def delete_series(self, id, database=None): """Delete series from a database. - :param name: the name of the series to be deleted - :type name: str + :param id: the id of the series to be deleted + :type id: int :param database: the database from which the series should be deleted, defaults to client's current database :type database: str """ database = database or self._database - self.query('DROP SERIES \"%s\"' % name, database=database) + self.query('DROP SERIES %s' % id, database=database) def grant_admin_privileges(self, username): """Grant cluster administration privileges to an user. diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 5d25b914..f0137088 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -749,7 +749,7 @@ def test_get_list_series_empty_DF(self): rsp = self.cliDF.get_list_series() self.assertEqual({}, rsp) - def test_get_list_series(self): + def test_get_list_series_and_delete(self): self.cli.write_points(dummy_point) rsp = self.cli.get_list_series() self.assertEqual( @@ -761,6 +761,19 @@ def test_get_list_series(self): rsp ) + def test_delete_series(self): + self.assertEqual( + len(self.cli.get_list_series()), 0 + ) + self.cli.write_points(dummy_point) + self.assertEqual( + len(self.cli.get_list_series()), 1 + ) + self.cli.delete_series(1) + self.assertEqual( + len(self.cli.get_list_series()), 0 + ) + @skipIfPYpy def test_get_list_series_DF(self): self.cli.write_points(dummy_point) From 4b6e9c29f3477fa29fb42dc5b81f7f53012640f1 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 21 May 2015 09:33:17 -0400 Subject: [PATCH 180/536] Skip testWarnBulkSizeZero --- tests/influxdb/helper_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/influxdb/helper_test.py b/tests/influxdb/helper_test.py index d5f1d02e..20cb7bbb 100644 --- a/tests/influxdb/helper_test.py +++ b/tests/influxdb/helper_test.py @@ -221,6 +221,7 @@ class Meta: AttributeError, cls, **{'time': 159, 'server_name': 'us.east-1'}) + @unittest.skip("Fails on py32") def testWarnBulkSizeZero(self): """ Tests warning for an invalid bulk size. From 781bbde23a2ac1a1e17ec0a4bda6a4d31941e750 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Thu, 21 May 2015 11:53:51 -0400 Subject: [PATCH 181/536] travis_wait for 30 mins instead --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index d724a7c6..d2b9f809 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,7 @@ install: - sudo pip install coveralls - wget http://get.influxdb.org/influxdb_0.9.0-rc30_amd64.deb && sudo dpkg -i influxdb_0.9.0-rc30_amd64.deb script: - - travis_wait tox -e $TOX_ENV + - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi notifications: From 85f5efd90f4ad485ffbaf0b1db37c1a5d9fc7d5a Mon Sep 17 00:00:00 2001 From: Kurt Spindler Date: Sun, 24 May 2015 20:52:55 -0700 Subject: [PATCH 182/536] fix flake8 --- influxdb/influxdb08/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 5eb0e03f..115fa5dd 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -281,8 +281,8 @@ def write_points(self, data, time_precision='s', *args, **kwargs): [ 12 ] - ], - "name": "cpu_load_short", + ], + "name": "cpu_load_short", "columns": [ "value" ] From 06677056f92f1337a849e6fe30b0b9367c91e2e6 Mon Sep 17 00:00:00 2001 From: Kimmo Huoman Date: Mon, 1 Jun 2015 12:53:04 +0300 Subject: [PATCH 183/536] Remove dsn.lower() from influxdb08.InfluxDBClient.from_DSN() --- influxdb/influxdb08/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index a9acf624..3fc0bb9e 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -134,7 +134,6 @@ def from_DSN(dsn, **kwargs): udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ - dsn = dsn.lower() init_args = {} conn_params = urlparse(dsn) From 45f4b412cc93d6658bc5b814c37c011fefeef881 Mon Sep 17 00:00:00 2001 From: Kimmo Huoman Date: Mon, 1 Jun 2015 14:48:30 +0300 Subject: [PATCH 184/536] Refactor tests. Use `DSN_STRING` and `CLUSTER_DSN_STRING` instead of rewriting the whole string. Makes it easier to change, if and when tests are developed. Change all `assert .* == .*`, `assert .* is True` and `assert .* is False` to use `assertEqual`, `assertTrue` and `assertFalse`. This is mainly for consistency, no "real" reason. --- tests/influxdb/client_test.py | 37 +++++----- tests/influxdb/dataframe_client_test.py | 4 +- tests/influxdb/influxdb08/client_test.py | 67 ++++++++++--------- .../influxdb08/dataframe_client_test.py | 9 ++- 4 files changed, 59 insertions(+), 58 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 05cde546..cdcda278 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -29,6 +29,9 @@ from influxdb import InfluxDBClient, InfluxDBClusterClient from influxdb.client import InfluxDBServerError +DSN_STRING = 'influxdb://uSr:pWd@host:1886/db' +CLUSTER_DSN_STRING = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db' + def _build_response_object(status_code=200, content=""): resp = requests.Response() @@ -105,20 +108,20 @@ def test_scheme(self): self.assertEqual('https://host:8086', cli._baseurl) def test_dsn(self): - cli = InfluxDBClient.from_DSN('influxdb://usr:pwd@host:1886/db') + cli = InfluxDBClient.from_DSN(DSN_STRING) self.assertEqual('http://host:1886', cli._baseurl) - self.assertEqual('usr', cli._username) - self.assertEqual('pwd', cli._password) + self.assertEqual('uSr', cli._username) + self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) - cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') + cli = InfluxDBClient.from_DSN('udp+' + DSN_STRING) self.assertTrue(cli.use_udp) - cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') + cli = InfluxDBClient.from_DSN('https+' + DSN_STRING) self.assertEqual('https://host:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', + cli = InfluxDBClient.from_DSN('https+' + DSN_STRING, **{'ssl': False}) self.assertEqual('http://host:1886', cli._baseurl) @@ -808,33 +811,29 @@ def test_recovery(self): self.assertEqual(2, len(cluster.bad_clients)) def test_dsn(self): - cli = InfluxDBClusterClient.from_DSN( - 'influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + cli = InfluxDBClusterClient.from_DSN(CLUSTER_DSN_STRING) self.assertEqual(2, len(cli.clients)) self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) - self.assertEqual('usr', cli.clients[0]._username) - self.assertEqual('pwd', cli.clients[0]._password) + self.assertEqual('uSr', cli.clients[0]._username) + self.assertEqual('pWd', cli.clients[0]._password) self.assertEqual('db', cli.clients[0]._database) self.assertFalse(cli.clients[0].use_udp) self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) - self.assertEqual('usr', cli.clients[1]._username) - self.assertEqual('pwd', cli.clients[1]._password) + self.assertEqual('uSr', cli.clients[1]._username) + self.assertEqual('pWd', cli.clients[1]._password) self.assertEqual('db', cli.clients[1]._database) self.assertFalse(cli.clients[1].use_udp) - cli = InfluxDBClusterClient.from_DSN( - 'udp+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + cli = InfluxDBClusterClient.from_DSN('udp+' + CLUSTER_DSN_STRING) self.assertTrue(cli.clients[0].use_udp) self.assertTrue(cli.clients[1].use_udp) - cli = InfluxDBClusterClient.from_DSN( - 'https+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') + cli = InfluxDBClusterClient.from_DSN('https+' + CLUSTER_DSN_STRING) self.assertEqual('https://host1:8086', cli.clients[0]._baseurl) self.assertEqual('https://host2:8086', cli.clients[1]._baseurl) - cli = InfluxDBClusterClient.from_DSN( - 'https+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db', - **{'ssl': False}) + cli = InfluxDBClusterClient.from_DSN('https+' + CLUSTER_DSN_STRING, + **{'ssl': False}) self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 5b517320..c73ad7af 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -75,7 +75,7 @@ def test_write_points_from_dataframe_in_batches(self): status_code=204) cli = DataFrameClient(database='db') - assert cli.write_points(dataframe, "foo", batch_size=1) is True + self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1)) def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -266,7 +266,7 @@ def test_query_with_empty_result(self): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'GET', 200, {"results": [{}]}): result = cli.query('select column_one from foo;') - assert result == {} + self.assertEqual(result, {}) def test_list_series(self): response = { diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index e9cdc0b5..23c3b996 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -27,6 +27,8 @@ def u(x): def u(x): return x +DSN_STRING = 'influxdb://uSr:pWd@host:1886/db' + def _build_response_object(status_code=200, content=""): resp = requests.Response() @@ -91,47 +93,47 @@ def setUp(self): def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') - assert cli._baseurl == 'http://host:8086' + self.assertEqual(cli._baseurl, 'http://host:8086') cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) - assert cli._baseurl == 'https://host:8086' + self.assertEqual(cli._baseurl, 'https://host:8086') def test_dsn(self): - cli = InfluxDBClient.from_DSN('influxdb://usr:pwd@host:1886/db') - assert cli._baseurl == 'http://host:1886' - assert cli._username == 'usr' - assert cli._password == 'pwd' - assert cli._database == 'db' - assert cli.use_udp is False + cli = InfluxDBClient.from_DSN(DSN_STRING) + self.assertEqual('http://host:1886', cli._baseurl) + self.assertEqual('uSr', cli._username) + self.assertEqual('pWd', cli._password) + self.assertEqual('db', cli._database) + self.assertFalse(cli.use_udp) - cli = InfluxDBClient.from_DSN('udp+influxdb://usr:pwd@host:1886/db') - assert cli.use_udp is True + cli = InfluxDBClient.from_DSN('udp+' + DSN_STRING) + self.assertTrue(cli.use_udp) - cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db') - assert cli._baseurl == 'https://host:1886' + cli = InfluxDBClient.from_DSN('https+' + DSN_STRING) + self.assertEqual('https://host:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db', + cli = InfluxDBClient.from_DSN('https+' + DSN_STRING, **{'ssl': False}) - assert cli._baseurl == 'http://host:1886' + self.assertEqual('http://host:1886', cli._baseurl) def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') - assert cli._database == 'another_database' + self.assertEqual(cli._database, 'another_database') @raises(FutureWarning) def test_switch_db_deprecated(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_db('another_database') - assert cli._database == 'another_database' + self.assertEqual(cli._database, 'another_database') def test_switch_user(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') - assert cli._username == 'another_username' - assert cli._password == 'another_password' + self.assertEqual(cli._username, 'another_username') + self.assertEqual(cli._password, 'another_password') def test_write(self): with requests_mock.Mocker() as m: @@ -250,8 +252,8 @@ def test_write_points_udp(self): received_data, addr = s.recvfrom(1024) - assert self.dummy_points == \ - json.loads(received_data.decode(), strict=True) + self.assertEqual(self.dummy_points, + json.loads(received_data.decode(), strict=True)) def test_write_bad_precision_udp(self): cli = InfluxDBClient( @@ -277,7 +279,7 @@ def test_write_points_fails(self): def test_write_points_with_precision(self): with _mocked_session('post', 200, self.dummy_points): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.write_points(self.dummy_points) is True + self.assertTrue(cli.write_points(self.dummy_points)) def test_write_points_bad_precision(self): cli = InfluxDBClient() @@ -299,13 +301,14 @@ def test_write_points_with_precision_fails(self): def test_delete_points(self): with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.delete_points("foo") is True + self.assertTrue(cli.delete_points("foo")) - assert len(mocked.call_args_list) == 1 + self.assertEqual(len(mocked.call_args_list), 1) args, kwds = mocked.call_args_list[0] - assert kwds['params'] == {'u': 'username', 'p': 'password'} - assert kwds['url'] == 'http://host:8086/db/db/series/foo' + self.assertEqual(kwds['params'], + {'u': 'username', 'p': 'password'}) + self.assertEqual(kwds['url'], 'http://host:8086/db/db/series/foo') @raises(Exception) def test_delete_points_with_wrong_name(self): @@ -342,7 +345,7 @@ def test_query(self): with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') - assert len(result[0]['points']) == 4 + self.assertEqual(len(result[0]['points']), 4) def test_query_chunked(self): cli = InfluxDBClient(database='db') @@ -422,7 +425,7 @@ def test_query_bad_precision(self): def test_create_database(self): with _mocked_session('post', 201, {"name": "new_db"}): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.create_database('new_db') is True + self.assertTrue(cli.create_database('new_db')) @raises(Exception) def test_create_database_fails(self): @@ -433,7 +436,7 @@ def test_create_database_fails(self): def test_delete_database(self): with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - assert cli.delete_database('old_db') is True + self.assertTrue(cli.delete_database('old_db')) @raises(Exception) def test_delete_database_fails(self): @@ -447,8 +450,8 @@ def test_get_list_database(self): ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') - assert len(cli.get_list_database()) == 1 - assert cli.get_list_database()[0]['name'] == 'a_db' + self.assertEqual(len(cli.get_list_database()), 1) + self.assertEqual(cli.get_list_database()[0]['name'], 'a_db') @raises(Exception) def test_get_list_database_fails(self): @@ -463,8 +466,8 @@ def test_get_database_list_deprecated(self): ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') - assert len(cli.get_database_list()) == 1 - assert cli.get_database_list()[0]['name'] == 'a_db' + self.assertEqual(len(cli.get_database_list()), 1) + self.assertEqual(cli.get_database_list()[0]['name'], 'a_db') def test_delete_series(self): with _mocked_session('delete', 204): diff --git a/tests/influxdb/influxdb08/dataframe_client_test.py b/tests/influxdb/influxdb08/dataframe_client_test.py index 9fc54b9e..ebea3616 100644 --- a/tests/influxdb/influxdb08/dataframe_client_test.py +++ b/tests/influxdb/influxdb08/dataframe_client_test.py @@ -63,8 +63,7 @@ def test_write_points_from_dataframe_in_batches(self): "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') - assert cli.write_points({"foo": dataframe}, - batch_size=1) is True + self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1)) def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -239,7 +238,7 @@ def test_query_multiple_time_series(self): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') result = cli.query("""select mean(value), min(value), max(value), stddev(value) from series1, series2, series3""") - assert dataframes.keys() == result.keys() + self.assertEqual(dataframes.keys(), result.keys()) for key in dataframes.keys(): assert_frame_equal(dataframes[key], result[key]) @@ -247,7 +246,7 @@ def test_query_with_empty_result(self): with _mocked_session('get', 200, []): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') - assert result == [] + self.assertEqual(result, []) def test_list_series(self): response = [ @@ -260,7 +259,7 @@ def test_list_series(self): with _mocked_session('get', 200, response): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') series_list = cli.get_list_series() - assert series_list == ['seriesA', 'seriesB'] + self.assertEqual(series_list, ['seriesA', 'seriesB']) def test_datetime_to_epoch(self): timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') From 8a9b09cf81d1602f1c64f79eb0cd1237ffd15a9d Mon Sep 17 00:00:00 2001 From: Kimmo Huoman Date: Mon, 1 Jun 2015 20:37:33 +0300 Subject: [PATCH 185/536] Change DSN-constants to be initialized in setUp. --- tests/influxdb/client_test.py | 22 +++++++++++----------- tests/influxdb/influxdb08/client_test.py | 12 ++++++------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index cdcda278..e30cdf36 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -29,9 +29,6 @@ from influxdb import InfluxDBClient, InfluxDBClusterClient from influxdb.client import InfluxDBServerError -DSN_STRING = 'influxdb://uSr:pWd@host:1886/db' -CLUSTER_DSN_STRING = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db' - def _build_response_object(status_code=200, content=""): resp = requests.Response() @@ -98,6 +95,8 @@ def setUp(self): } ] + self.dsn_string = 'influxdb://uSr:pWd@host:1886/db' + def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual('http://host:8086', cli._baseurl) @@ -108,20 +107,20 @@ def test_scheme(self): self.assertEqual('https://host:8086', cli._baseurl) def test_dsn(self): - cli = InfluxDBClient.from_DSN(DSN_STRING) + cli = InfluxDBClient.from_DSN(self.dsn_string) self.assertEqual('http://host:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) - cli = InfluxDBClient.from_DSN('udp+' + DSN_STRING) + cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string) self.assertTrue(cli.use_udp) - cli = InfluxDBClient.from_DSN('https+' + DSN_STRING) + cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) self.assertEqual('https://host:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN('https+' + DSN_STRING, + cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host:1886', cli._baseurl) @@ -745,6 +744,7 @@ def setUp(self): warnings.simplefilter('error', FutureWarning) self.hosts = [('host1', 8086), ('host2', 8086), ('host3', 8086)] + self.dsn_string = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db' def test_init(self): cluster = InfluxDBClusterClient(hosts=self.hosts, @@ -811,7 +811,7 @@ def test_recovery(self): self.assertEqual(2, len(cluster.bad_clients)) def test_dsn(self): - cli = InfluxDBClusterClient.from_DSN(CLUSTER_DSN_STRING) + cli = InfluxDBClusterClient.from_DSN(self.dsn_string) self.assertEqual(2, len(cli.clients)) self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) self.assertEqual('uSr', cli.clients[0]._username) @@ -824,15 +824,15 @@ def test_dsn(self): self.assertEqual('db', cli.clients[1]._database) self.assertFalse(cli.clients[1].use_udp) - cli = InfluxDBClusterClient.from_DSN('udp+' + CLUSTER_DSN_STRING) + cli = InfluxDBClusterClient.from_DSN('udp+' + self.dsn_string) self.assertTrue(cli.clients[0].use_udp) self.assertTrue(cli.clients[1].use_udp) - cli = InfluxDBClusterClient.from_DSN('https+' + CLUSTER_DSN_STRING) + cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string) self.assertEqual('https://host1:8086', cli.clients[0]._baseurl) self.assertEqual('https://host2:8086', cli.clients[1]._baseurl) - cli = InfluxDBClusterClient.from_DSN('https+' + CLUSTER_DSN_STRING, + cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) diff --git a/tests/influxdb/influxdb08/client_test.py b/tests/influxdb/influxdb08/client_test.py index 23c3b996..343f1d22 100644 --- a/tests/influxdb/influxdb08/client_test.py +++ b/tests/influxdb/influxdb08/client_test.py @@ -27,8 +27,6 @@ def u(x): def u(x): return x -DSN_STRING = 'influxdb://uSr:pWd@host:1886/db' - def _build_response_object(status_code=200, content=""): resp = requests.Response() @@ -91,6 +89,8 @@ def setUp(self): } ] + self.dsn_string = 'influxdb://uSr:pWd@host:1886/db' + def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual(cli._baseurl, 'http://host:8086') @@ -101,20 +101,20 @@ def test_scheme(self): self.assertEqual(cli._baseurl, 'https://host:8086') def test_dsn(self): - cli = InfluxDBClient.from_DSN(DSN_STRING) + cli = InfluxDBClient.from_DSN(self.dsn_string) self.assertEqual('http://host:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) - cli = InfluxDBClient.from_DSN('udp+' + DSN_STRING) + cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string) self.assertTrue(cli.use_udp) - cli = InfluxDBClient.from_DSN('https+' + DSN_STRING) + cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) self.assertEqual('https://host:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN('https+' + DSN_STRING, + cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host:1886', cli._baseurl) From 314b4bc9f31811d42de951da53d5d232ff4128bf Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 12 Jun 2015 14:34:25 -0400 Subject: [PATCH 186/536] Released 2.4.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 86c78d5c..d1898c93 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.3.0' +__version__ = '2.4.0' From 784b1443a7344f96648d864d385aaf116e7fa914 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 12 Jun 2015 22:12:57 -0400 Subject: [PATCH 187/536] Config sample: Added handoff_dir --- tests/influxdb/client_test_with_server.py | 1 + tests/influxdb/influxdb.conf.template | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index c196d164..85ae0d37 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -182,6 +182,7 @@ def __init__(self, conf_template, udp_enabled=False): meta_dir=os.path.join(tempdir, 'meta'), data_dir=os.path.join(tempdir, 'data'), cluster_dir=os.path.join(tempdir, 'state'), + handoff_dir=os.path.join(tempdir, 'handoff'), logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), udp_enabled='true' if udp_enabled else 'false', ) diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/influxdb.conf.template index a7c0d838..92e381ee 100644 --- a/tests/influxdb/influxdb.conf.template +++ b/tests/influxdb/influxdb.conf.template @@ -74,7 +74,7 @@ [hinted-handoff] enabled = false - dir = "/home/reazem/.influxdb/hh" + dir = "{handoff_dir}" max-size = 1073741824 max-age = "168h0m0s" retry-rate-limit = 0 From 3e1a03caf5fa607efc6201be9514f5a42530e485 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 12 Jun 2015 22:36:46 -0400 Subject: [PATCH 188/536] Removed useless unicode strings --- tests/influxdb/client_test_with_server.py | 34 +++++++++++------------ 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 85ae0d37..8c0bdc2d 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -762,12 +762,12 @@ def test_get_list_series_and_delete(self): rsp = self.cli.get_list_series() self.assertEqual( [ - {'name': u'cpu_load_short', + {'name': 'cpu_load_short', 'tags': [ - {u'host': u'server01', - u'region': u'us-west', - u'_key': - u'cpu_load_short,host=server01,region=us-west'}]} + {'host': 'server01', + 'region': 'us-west', + '_key': + 'cpu_load_short,host=server01,region=us-west'}]} ], rsp ) @@ -828,18 +828,18 @@ def test_create_retention_policy_default(self): self.assertEqual( [ - {u'duration': u'0', - u'default': False, - u'replicaN': 1, - u'name': u'default'}, - {u'duration': u'24h0m0s', - u'default': True, - u'replicaN': 1, - u'name': u'somename'}, - {u'duration': u'48h0m0s', - u'default': False, - u'replicaN': 1, - u'name': u'another'} + {'duration': '0', + 'default': False, + 'replicaN': 1, + 'name': 'default'}, + {'duration': '24h0m0s', + 'default': True, + 'replicaN': 1, + 'name': 'somename'}, + {'duration': '48h0m0s', + 'default': False, + 'replicaN': 1, + 'name': 'another'} ], rsp ) From cf1ef9b974cf56f3dfcef625ae3c14c516dbb1d2 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 12 Jun 2015 21:56:33 -0400 Subject: [PATCH 189/536] Refactored ResultSet --- docs/source/resultset.rst | 26 +++++++---- influxdb/client.py | 33 +++++-------- influxdb/exceptions.py | 22 +++++++++ influxdb/resultset.py | 29 +++++++----- tests/influxdb/client_test.py | 2 +- tests/influxdb/client_test_with_server.py | 52 +++++++++------------ tests/influxdb/resultset_test.py | 56 ++++++++++++++--------- 7 files changed, 125 insertions(+), 95 deletions(-) create mode 100644 influxdb/exceptions.py diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst index 48ddf709..f498e288 100644 --- a/docs/source/resultset.rst +++ b/docs/source/resultset.rst @@ -7,33 +7,39 @@ Query response object: ResultSet Using the ``InfluxDBClient.query()`` function will return a ``ResultSet`` Object. -A ResultSet behaves like a dict. Its keys are series and values are points. However, it is a little bit smarter than a regular dict. Its ``__getitem__`` method can be used to query the ResultSet in several ways. +A ResultSet can be browsed in several ways. Its ``get_points`` method can be used to retrieve points generators that filter either by measurement, tags, or both. -Filtering by serie name ------------------------ +Getting all points +------------------ -Using ``rs['cpu']`` will return a generator for all the points that are in a serie named ``cpu``, no matter the tags. +Using ``rs.get_points()`` will return a generator for all the points in the ResultSet. + + +Filtering by measurement +------------------------ + +Using ``rs.get_points('cpu')`` will return a generator for all the points that are in a serie with measurement name ``cpu``, no matter the tags. :: rs = cli.query("SELECT * from cpu") - cpu_points = list(rs['cpu']) + cpu_points = list(rs.get_points(measurement='cpu')]) Filtering by tags ----------------- -Using ``rs[{'host_name': 'influxdb.com'}]`` will return a generator for all the points that are tagged with the specified tags, no matter the serie name. +Using ``rs.get_points(tags={'host_name': 'influxdb.com'})`` will return a generator for all the points that are tagged with the specified tags, no matter the measurement name. :: rs = cli.query("SELECT * from cpu") cpu_influxdb_com_points = list(rs[{"host_name": "influxdb.com"}]) -Filtering by serie name and tags --------------------------------- +Filtering by measurement and tags +--------------------------------- -Using a tuple with a serie name and a dict will return a generator for all the points that are in a serie with the given name AND whose tags match the given tags. +Using measurement name and tags will return a generator for all the points that are in a serie with the specified measurement name AND whose tags match the given tags. :: rs = cli.query("SELECT * from cpu") - points = list(rs[('cpu', {'host_name': 'influxdb.com'})]) + points = list(rs.get_points(measurement='cpu', tags={'host_name': 'influxdb.com'})) See the :ref:`api-documentation` page for more information. diff --git a/influxdb/client.py b/influxdb/client.py index 007115fc..cb2084ed 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -12,6 +12,8 @@ from sys import version_info from influxdb.resultset import ResultSet +from .exceptions import InfluxDBClientError +from .exceptions import InfluxDBServerError try: xrange @@ -24,23 +26,6 @@ from urlparse import urlparse -class InfluxDBClientError(Exception): - """Raised when an error occurs in the request.""" - def __init__(self, content, code): - if isinstance(content, type(b'')): - content = content.decode('UTF-8', errors='replace') - super(InfluxDBClientError, self).__init__( - "{0}: {1}".format(code, content)) - self.content = content - self.code = code - - -class InfluxDBServerError(Exception): - """Raised when a server error occurs.""" - def __init__(self, content): - super(InfluxDBServerError, self).__init__(content) - - class InfluxDBClient(object): """The :class:`~.InfluxDBClient` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through @@ -310,7 +295,13 @@ def query(self, data = response.json() - return ResultSet(data) + results = [ResultSet(result) for result in data.get('results', [])] + + # TODO(aviau): Always return a list. (This would be a breaking change) + if len(results) == 1: + return results[0] + else: + return results def write_points(self, points, @@ -420,7 +411,7 @@ def get_list_database(self): >>> dbs [{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}] """ - return list(self.query("SHOW DATABASES")['databases']) + return list(self.query("SHOW DATABASES").get_points()) def create_database(self, dbname): """Create a new database in InfluxDB. @@ -527,7 +518,7 @@ def get_list_retention_policies(self, database=None): rsp = self.query( "SHOW RETENTION POLICIES %s" % (database or self._database) ) - return list(rsp['results']) + return list(rsp.get_points()) def get_list_series(self, database=None): """Get the list of series for a database. @@ -572,7 +563,7 @@ def get_list_users(self): {u'admin': False, u'user': u'user2'}, {u'admin': False, u'user': u'user3'}] """ - return list(self.query("SHOW USERS")["results"]) + return list(self.query("SHOW USERS").get_points()) def create_user(self, username, password): """Create a new user in InfluxDB diff --git a/influxdb/exceptions.py b/influxdb/exceptions.py new file mode 100644 index 00000000..33c909b6 --- /dev/null +++ b/influxdb/exceptions.py @@ -0,0 +1,22 @@ +class InfluxDBClientError(Exception): + """Raised when an error occurs in the request.""" + def __init__(self, content, code=None): + if isinstance(content, type(b'')): + content = content.decode('UTF-8', errors='replace') + + if code is not None: + message = "%s: %s" % (code, content) + else: + message = content + + super(InfluxDBClientError, self).__init__( + message + ) + self.content = content + self.code = code + + +class InfluxDBServerError(Exception): + """Raised when a server error occurs.""" + def __init__(self, content): + super(InfluxDBServerError, self).__init__(content) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index c3c4c33f..f7d59f31 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,14 +1,19 @@ # -*- coding: utf-8 -*- +from influxdb.exceptions import InfluxDBClientError + _sentinel = object() class ResultSet(object): - """A wrapper around series results """ + """A wrapper around a single InfluxDB query result""" def __init__(self, series): self._raw = series + if 'error' in self.raw: + raise InfluxDBClientError(self.raw['error']) + @property def raw(self): """Raw JSON from InfluxDB""" @@ -46,8 +51,14 @@ def __getitem__(self, key): name = key tags = None - if not isinstance(name, (bytes, type(b''.decode()), type(None))): - raise TypeError('serie_name must be an str or None') + return self.get_points(name, tags) + + def get_points(self, measurement=None, tags=None): + + # Raise error if measurement is not str or bytes + if not isinstance(measurement, + (bytes, type(b''.decode()), type(None))): + raise TypeError('measurement must be an str or None') for serie in self._get_series(): serie_name = serie.get('measurement', serie.get('name', 'results')) @@ -55,14 +66,14 @@ def __getitem__(self, key): # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. - if key is None: + if tags is None: for point in serie['values']: yield self.point_from_cols_vals( serie['columns'], point ) - elif name in (None, serie_name): + elif measurement in (None, serie_name): # by default if no tags was provided then # we will matches every returned serie serie_tags = serie.get('tags', {}) @@ -101,13 +112,7 @@ def _tag_matches(self, tags, filter): def _get_series(self): """Returns all series""" - series = [] - try: - for result in self.raw['results']: - series.extend(result['series']) - except KeyError: - pass - return series + return self.raw.get('series', []) def __len__(self): return len(self.keys()) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 605713fb..8e76d271 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -337,7 +337,7 @@ def test_query(self): rs = self.cli.query('select * from foo') self.assertListEqual( - list(rs['cpu_load_short']), + list(rs[0].get_points()), [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] ) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 8c0bdc2d..f665067b 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -29,7 +29,7 @@ warnings.simplefilter('error', FutureWarning) from influxdb import InfluxDBClient -from influxdb.client import InfluxDBClientError +from influxdb.exceptions import InfluxDBClientError from tests.influxdb.misc import get_free_port, is_port_open from tests import skipIfPYpy, using_pypy @@ -337,13 +337,11 @@ def test_create_database(self): [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) - @unittest.skip("Broken as of 0.9.0-rc30") def test_create_database_fails(self): self.assertIsNone(self.cli.create_database('new_db')) with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_database('new_db') - self.assertEqual(500, ctx.exception.code) - self.assertEqual('{"results":[{"error":"database exists"}]}', + self.assertEqual('database already exists', ctx.exception.content) def test_get_list_series_empty(self): @@ -360,20 +358,16 @@ def test_drop_database(self): self.assertIsNone(self.cli.drop_database('new_db_1')) self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) - @unittest.skip("Broken as of 0.9.0-rc30") def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_database('db') - self.assertEqual(500, ctx.exception.code) - self.assertIn('{"results":[{"error":"database not found: db', + self.assertIn('database not found: db', ctx.exception.content) - @unittest.skip("Broken as of 0.9.0-rc30") def test_query_fail(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.query('select column_one from foo') - self.assertEqual(500, ctx.exception.code) - self.assertIn('{"results":[{"error":"database not found: db', + self.assertIn('database not found: db', ctx.exception.content) def test_create_user(self): @@ -388,6 +382,19 @@ def test_create_user_blank_password(self): self.assertIn({'user': 'test_user', 'admin': False}, rsp) + def test_get_list_users_empty(self): + rsp = self.cli.get_list_users() + self.assertEqual([], rsp) + + def test_get_list_users(self): + self.cli.query("CREATE USER test WITH PASSWORD 'test'") + rsp = self.cli.get_list_users() + + self.assertEqual( + [{'user': 'test', 'admin': False}], + rsp + ) + def test_create_user_blank_username(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_user('', 'secret_password') @@ -414,12 +421,10 @@ def test_drop_user(self): users = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(users, []) - @unittest.skip("Broken as of 0.9.0-rc30") def test_drop_user_nonexisting(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_user('test') - self.assertEqual(500, ctx.exception.code) - self.assertIn('{"results":[{"error":"user not found"}]}', + self.assertIn('user not found', ctx.exception.content) def test_drop_user_invalid(self): @@ -550,7 +555,7 @@ def test_write_points_check_read(self): [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]] ) - rsp2 = list(rsp['cpu_load_short']) + rsp2 = list(rsp.get_points()) self.assertEqual(len(rsp2), 1) pt = rsp2[0] @@ -634,10 +639,10 @@ def test_write_points_batch(self): batch_size=2) time.sleep(5) net_in = self.cli.query("SELECT value FROM network " - "WHERE direction='in'").raw['results'][0] + "WHERE direction='in'").raw net_out = self.cli.query("SELECT value FROM network " - "WHERE direction='out'").raw['results'][0] - cpu = self.cli.query("SELECT value FROM cpu_usage").raw['results'][0] + "WHERE direction='out'").raw + cpu = self.cli.query("SELECT value FROM cpu_usage").raw self.assertIn(123, net_in['series'][0]['values'][0]) self.assertIn(12, net_out['series'][0]['values'][0]) self.assertIn(12.34, cpu['series'][0]['values'][0]) @@ -796,19 +801,6 @@ def test_get_list_series_DF(self): columns=['_id', 'host', 'region']) assert_frame_equal(rsp['cpu_load_short'], expected) - def test_get_list_users_empty(self): - rsp = self.cli.get_list_users() - self.assertEqual([], rsp) - - def test_get_list_users_non_empty(self): - self.cli.query("CREATE USER test WITH PASSWORD 'test'") - rsp = self.cli.get_list_users() - - self.assertEqual( - [{'user': 'test', 'admin': False}], - rsp - ) - def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py index d585a241..ce5fd41a 100644 --- a/tests/influxdb/resultset_test.py +++ b/tests/influxdb/resultset_test.py @@ -2,6 +2,7 @@ import unittest +from influxdb.exceptions import InfluxDBClientError from influxdb.resultset import ResultSet @@ -33,24 +34,33 @@ def setUp(self): ]}]} ] } - self.rs = ResultSet(self.query_response) + self.rs = ResultSet(self.query_response['results'][0]) def test_filter_by_name(self): + expected = [ + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, + {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} + ] + + self.assertEqual(expected, list(self.rs['cpu_load_short'])) + self.assertEqual(expected, + list(self.rs.get_points( + measurement='cpu_load_short'))) + + def test_filter_by_tags(self): + expected = [ + {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, + {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} + ] + self.assertEqual( - list(self.rs['cpu_load_short']), - [ - {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, - {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} - ] + expected, + list(self.rs[{"host": "server01"}]) ) - def test_filter_by_tags(self): self.assertEqual( - list(self.rs[{"host": "server01"}]), - [ - {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, - {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} - ] + expected, + list(self.rs.get_points(tags={'host': 'server01'})) ) def test_filter_by_name_and_tags(self): @@ -120,15 +130,12 @@ def test_point_from_cols_vals(self): def test_system_query(self): rs = ResultSet( - {'results': [ - {'series': [ - {'values': [['another', '48h0m0s', 3, False], - ['default', '0', 1, False], - ['somename', '24h0m0s', 4, True]], - 'columns': ['name', 'duration', - 'replicaN', 'default']}]} - ] - } + {'series': [ + {'values': [['another', '48h0m0s', 3, False], + ['default', '0', 1, False], + ['somename', '24h0m0s', 4, True]], + 'columns': ['name', 'duration', + 'replicaN', 'default']}]} ) self.assertEqual( @@ -147,3 +154,10 @@ def test_system_query(self): 'name': 'somename'} ] ) + + def test_resultset_error(self): + with self.assertRaises(InfluxDBClientError): + ResultSet({ + "series": [], + "error": "Big error, many problems." + }) From 0eab673b418142ba491495859695819785179e44 Mon Sep 17 00:00:00 2001 From: Marin Vintila Date: Mon, 15 Jun 2015 13:01:52 +0300 Subject: [PATCH 190/536] Change name to measurement in examples --- examples/tutorial.py | 2 +- examples/tutorial_server_data.py | 2 +- examples/tutorial_sine_wave.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/tutorial.py b/examples/tutorial.py index fa1ebe9f..d144049a 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -12,7 +12,7 @@ def main(host='localhost', port=8086): query = 'select value from cpu_load_short;' json_body = [ { - "name": "cpu_load_short", + "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" diff --git a/examples/tutorial_server_data.py b/examples/tutorial_server_data.py index aa518f95..f17def9b 100644 --- a/examples/tutorial_server_data.py +++ b/examples/tutorial_server_data.py @@ -29,7 +29,7 @@ def main(host='localhost', port=8086, nb_day=15): # pointValues = [int(past_date.strftime('%s')), value, hostName] pointValues = { "timestamp": int(past_date.strftime('%s')), - "name": metric, + "measurement": metric, 'fields': { 'value': value, }, diff --git a/examples/tutorial_sine_wave.py b/examples/tutorial_sine_wave.py index 49e8973b..aad6872d 100644 --- a/examples/tutorial_sine_wave.py +++ b/examples/tutorial_sine_wave.py @@ -22,7 +22,7 @@ def main(host='localhost', port=8086): y = 10 + math.sin(math.radians(angle)) * 10 point = { - "name": 'foobar', + "measurement": 'foobar', "timestamp": int(now.strftime('%s')) + angle, "fields": { "value": y From 5d3aaeb9b88bcfe86077c9ba789f66bbfed6018e Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Mon, 15 Jun 2015 08:22:24 -0400 Subject: [PATCH 191/536] Updated InfluxDB 0.8.x notice --- README.rst | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/README.rst b/README.rst index c026f06a..b6c7e2b3 100644 --- a/README.rst +++ b/README.rst @@ -36,16 +36,10 @@ InfluxDB is an open-source distributed time series database, find more about Inf .. _installation: -InfluxDB > v0.9 support -======================= - -The 1.0.0 version of this library now supports InfluxDB 0.9. Please note that InfluxDB 0.9 is still pre-release software. For stability, you should use the ``influxdb.influxdb08`` module in conjunction with InfluxDB 0.8. - - InfluxDB v0.8.X users ===================== -Influxdb >=0.9.0 brings many breaking changes to the API. InfluxDB 0.8.X users may use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. +InfluxDB 0.9 was released and it is the new recommended version. However, InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. Installation ============ From a23e46cdf064ed2a1f37a75c3420083ad512433e Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 15 Jun 2015 08:27:27 -0400 Subject: [PATCH 192/536] Added default error code to InfluxDBClientError (Closes #159) --- influxdb/influxdb08/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index c67fe46f..b5a02bd9 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -25,8 +25,8 @@ class InfluxDBClientError(Exception): - "Raised when an error occurs in the request" - def __init__(self, content, code): + """Raised when an error occurs in the request""" + def __init__(self, content, code=-1): super(InfluxDBClientError, self).__init__( "{0}: {1}".format(code, content)) self.content = content From 3a2a2a94c9f837957052d1c24a79eb9c376f41c3 Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 15 Jun 2015 08:32:45 -0400 Subject: [PATCH 193/536] Fixed wrong ResultSet example --- docs/source/resultset.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst index f498e288..0a1b7541 100644 --- a/docs/source/resultset.rst +++ b/docs/source/resultset.rst @@ -31,7 +31,7 @@ Using ``rs.get_points(tags={'host_name': 'influxdb.com'})`` will return a genera :: rs = cli.query("SELECT * from cpu") - cpu_influxdb_com_points = list(rs[{"host_name": "influxdb.com"}]) + cpu_influxdb_com_points = list(rs.get_points(tags={"host_name": "influxdb.com"})) Filtering by measurement and tags --------------------------------- From 7d629ccea5feba0034478ada0deb05f72ed2504e Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 15 Jun 2015 08:38:15 -0400 Subject: [PATCH 194/536] Released 2.5.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index d1898c93..6a86c185 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.4.0' +__version__ = '2.5.0' From 99dc3e7c6354d662ba9742a199c84796b9063d2d Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 15 Jun 2015 08:54:11 -0400 Subject: [PATCH 195/536] Added warning on ResultSet's __getitem__ and documented its replacement --- influxdb/__init__.py | 2 +- influxdb/resultset.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 6a86c185..774b87e4 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.5.0' +__version__ = '2.5.1' diff --git a/influxdb/resultset.py b/influxdb/resultset.py index f7d59f31..8e5b058e 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +import warnings + from influxdb.exceptions import InfluxDBClientError _sentinel = object() @@ -37,6 +39,13 @@ def __getitem__(self, key): The order in which the points are yielded is actually undefined but it might change.. """ + + warnings.warn( + ("ResultSet's ``__getitem__`` method will be deprecated. Use" + "``get_points`` instead."), + DeprecationWarning + ) + if isinstance(key, tuple): if 2 != len(key): raise TypeError('only 2-tuples allowed') @@ -54,6 +63,17 @@ def __getitem__(self, key): return self.get_points(name, tags) def get_points(self, measurement=None, tags=None): + """ + Returns a generator for all the points that match the given filters. + + :param measurement: The measurement name + :type measurement: str + + :param tags: Tags to look for + :type tags: dict + + :return: Points generator + """ # Raise error if measurement is not str or bytes if not isinstance(measurement, From c3f10130ca11ec99fdd01b250b532b308ddf8aaf Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 15 Jun 2015 20:16:20 -0400 Subject: [PATCH 196/536] New query option: raise_errors (Closes #201) --- influxdb/client.py | 17 +++++++++++++++-- influxdb/resultset.py | 12 +++++++++--- tests/influxdb/client_test_with_server.py | 5 +++++ 3 files changed, 29 insertions(+), 5 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index cb2084ed..624cafb7 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -267,18 +267,27 @@ def query(self, query, params={}, expected_response_code=200, - database=None): + database=None, + raise_errors=True): """Send a query to InfluxDB. :param query: the actual query string :type query: str + :param params: additional parameters for the request, defaults to {} :type params: dict + :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int + :param database: database to query, defaults to None :type database: str + + :param raise_errors: Whether or not to raise exceptions when InfluxDB + returns errors, defaults to True + :type raise_errors: bool + :returns: the queried data :rtype: :class:`~.ResultSet` """ @@ -295,7 +304,11 @@ def query(self, data = response.json() - results = [ResultSet(result) for result in data.get('results', [])] + results = [ + ResultSet(result, raise_errors=raise_errors) + for result + in data.get('results', []) + ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 8e5b058e..de7b1a3a 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -10,11 +10,12 @@ class ResultSet(object): """A wrapper around a single InfluxDB query result""" - def __init__(self, series): + def __init__(self, series, raise_errors=True): self._raw = series + self._error = self.raw.get('error', None) - if 'error' in self.raw: - raise InfluxDBClientError(self.raw['error']) + if self.error is not None and raise_errors is True: + raise InfluxDBClientError(self.error) @property def raw(self): @@ -25,6 +26,11 @@ def raw(self): def raw(self, value): self._raw = value + @property + def error(self): + """Error returned by InfluxDB""" + return self._error + def __getitem__(self, key): """ :param key: Either a serie name, or a tags_dict, or diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index f665067b..def0289f 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -370,6 +370,11 @@ def test_query_fail(self): self.assertIn('database not found: db', ctx.exception.content) + def test_query_fail_ignore_errors(self): + result = self.cli.query('select column_one from foo', + raise_errors=False) + self.assertEqual(result.error, 'database not found: db') + def test_create_user(self): self.cli.create_user('test_user', 'secret_password') rsp = list(self.cli.query("SHOW USERS")['results']) From f12063c6816931b6544b820e1c63f0950f439d4a Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 16 Jun 2015 08:31:33 -0400 Subject: [PATCH 197/536] Released 2.6.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 774b87e4..7acb8439 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.5.1' +__version__ = '2.6.0' From e62ce773a9bf7dc196e0b506ebd8067a1a720187 Mon Sep 17 00:00:00 2001 From: Jon Moses Date: Wed, 17 Jun 2015 07:27:42 -0400 Subject: [PATCH 198/536] Replace instances of `timestamp` with `time` for v0.9 release re: https://github.com/influxdb/influxdb/issues/2108 --- README.rst | 2 +- examples/tutorial.py | 2 +- examples/tutorial_server_data.py | 2 +- examples/tutorial_sine_wave.py | 2 +- tests/influxdb/client_test.py | 14 +++++++------- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/README.rst b/README.rst index b6c7e2b3..8a363540 100644 --- a/README.rst +++ b/README.rst @@ -103,7 +103,7 @@ Here's a basic example (for more see the examples directory):: "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } diff --git a/examples/tutorial.py b/examples/tutorial.py index d144049a..64e95778 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -17,7 +17,7 @@ def main(host='localhost', port=8086): "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } diff --git a/examples/tutorial_server_data.py b/examples/tutorial_server_data.py index f17def9b..b5b26d1c 100644 --- a/examples/tutorial_server_data.py +++ b/examples/tutorial_server_data.py @@ -28,7 +28,7 @@ def main(host='localhost', port=8086, nb_day=15): hostName = "server-%d" % random.randint(1, 5) # pointValues = [int(past_date.strftime('%s')), value, hostName] pointValues = { - "timestamp": int(past_date.strftime('%s')), + "time": int(past_date.strftime('%s')), "measurement": metric, 'fields': { 'value': value, diff --git a/examples/tutorial_sine_wave.py b/examples/tutorial_sine_wave.py index aad6872d..414dd10d 100644 --- a/examples/tutorial_sine_wave.py +++ b/examples/tutorial_sine_wave.py @@ -23,7 +23,7 @@ def main(host='localhost', port=8086): point = { "measurement": 'foobar', - "timestamp": int(now.strftime('%s')) + angle, + "time": int(now.strftime('%s')) + angle, "fields": { "value": y } diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 8e76d271..ebb6ad92 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -88,7 +88,7 @@ def setUp(self): "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } @@ -149,7 +149,7 @@ def test_write(self): "points": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": {"value": 0.64}}]} ) @@ -160,7 +160,7 @@ def test_write(self): "points": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": {"value": 0.64}}]} ) @@ -212,18 +212,18 @@ def test_write_points_toplevel_attributes(self): def test_write_points_batch(self): dummy_points = [ {"measurement": "cpu_usage", "tags": {"unit": "percent"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, {"measurement": "network", "tags": {"direction": "in"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, {"measurement": "network", "tags": {"direction": "out"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} ] expected_last_body = {"tags": {"host": "server01", "region": "us-west"}, "database": "db", "points": [{"measurement": "network", "tags": {"direction": "out"}, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}]} with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, From 078350e35f43e63a4183b2c36cca9de1c54f09cf Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Sun, 21 Jun 2015 12:08:32 +0200 Subject: [PATCH 199/536] initial implementation of line protocol as introduced in https://github.com/influxdb/influxdb/pull/2696 --- influxdb/client.py | 23 +++------ influxdb/line_protocol.py | 103 ++++++++++++++++++++++++++++++++++++++ requirements.txt | 4 +- 3 files changed, 114 insertions(+), 16 deletions(-) create mode 100644 influxdb/line_protocol.py diff --git a/influxdb/client.py b/influxdb/client.py index 007115fc..0187e617 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -11,6 +11,7 @@ import requests.exceptions from sys import version_info +from influxdb.line_protocol import make_lines from influxdb.resultset import ResultSet try: @@ -221,14 +222,7 @@ def request(self, url, method='GET', params=None, data=None, if params is None: params = {} - auth = { - 'u': self._username, - 'p': self._password - } - - params.update(auth) - - if data is not None and not isinstance(data, str): + if isinstance(data, dict) or isinstance(data, list): data = json.dumps(data) # Try to send the request a maximum of three times. (see #103) @@ -238,6 +232,7 @@ def request(self, url, method='GET', params=None, data=None, response = self._session.request( method=method, url=url, + auth=(self._username, self._password), params=params, data=data, headers=self._headers, @@ -270,10 +265,10 @@ def write(self, data, params=None, expected_response_code=204): :rtype: bool """ self.request( - url="write", + url="write_points", method='POST', params=params, - data=data, + data=make_lines(data).encode('utf-8'), expected_response_code=expected_response_code ) return True @@ -396,13 +391,12 @@ def _write_points(self, if tags: data['tags'] = tags - data['database'] = database or self._database - if self.use_udp: self.send_packet(data) else: self.write( data=data, + params={'db': database or self._database}, expected_response_code=204 ) @@ -679,9 +673,8 @@ def send_packet(self, packet): :param packet: the packet to be sent :type packet: dict """ - data = json.dumps(packet) - byte = data.encode('utf-8') - self.udp_socket.sendto(byte, (self._host, self.udp_port)) + data = make_lines(packet).encode('utf-8') + self.udp_socket.sendto(data, (self._host, self.udp_port)) class InfluxDBClusterClient(object): diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py new file mode 100644 index 00000000..11fb6d1e --- /dev/null +++ b/influxdb/line_protocol.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from copy import copy +from datetime import datetime +from time import mktime + +from dateutil.parser import parse +from pytz import utc +from six import binary_type, text_type + + +def _convert_timestamp(timestamp): + if isinstance(timestamp, int): + return timestamp + if isinstance(_force_text(timestamp), text_type): + timestamp = parse(timestamp) + if isinstance(timestamp, datetime): + if timestamp.tzinfo: + timestamp = timestamp.astimezone(utc) + timestamp.replace(tzinfo=None) + return ( + mktime(timestamp.timetuple()) * 1e9 + + timestamp.microsecond * 1e3 + ) + raise ValueError(timestamp) + + +def _escape_tag(tag): + return tag.replace( + "\\", "\\\\" + ).replace( + " ", "\\ " + ).replace( + ",", "\\," + ).replace( + "=", "\\=" + ) + + +def _escape_value(value): + value = _force_text(value) + if isinstance(value, text_type): + return "\"{}\"".format(value.replace( + "\"", "\\\"" + )) + else: + return str(value) + + +def _force_text(data): + """ + Try to return a text aka unicode object from the given data. + """ + if isinstance(data, binary_type): + return data.decode('utf-8', 'replace') + else: + return data + + +def make_lines(data): + """ + Extracts the points from the given dict and returns a Unicode string + matching the line protocol introduced in InfluxDB 0.9.0. + """ + lines = "" + static_tags = data.get('tags', None) + for point in data['points']: + # add measurement name + lines += _escape_tag(_force_text( + point.get('measurement', data.get('measurement')) + )) + "," + + # add tags + if static_tags is None: + tags = point.get('tags', {}) + else: + tags = copy(static_tags) + tags.update(point.get('tags', {})) + # tags should be sorted client-side to take load off server + for tag_key in sorted(tags.keys()): + lines += "{key}={value},".format( + key=_escape_tag(tag_key), + value=_escape_tag(tags[tag_key]), + ) + lines = lines[:-1] + " " # strip the trailing comma + + # add fields + for field_key in sorted(point['fields'].keys()): + lines += "{key}={value},".format( + key=_escape_tag(field_key), + value=_escape_value(point['fields'][field_key]), + ) + lines = lines[:-1] # strip the trailing comma + + # add timestamp + if 'timestamp' in point: + lines += " " + _force_text(str(int( + _convert_timestamp(point['timestamp']) + ))) + + lines += "\n" + return lines diff --git a/requirements.txt b/requirements.txt index 45cc6284..3445ca42 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,4 @@ +python-dateutil>=2.0.0 +pytz requests>=1.0.3 -six==1.9.0 \ No newline at end of file +six==1.9.0 From 9b753de079d3b46f8a3ada1f27f791d739b44f47 Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Sun, 21 Jun 2015 12:09:28 +0200 Subject: [PATCH 200/536] update client tests for line protocol --- tests/influxdb/client_test.py | 29 ++++++++++------------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 605713fb..e329eeb0 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -139,7 +139,7 @@ def test_write(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write", + "http://localhost:8086/write_points", status_code=204 ) cli = InfluxDBClient(database='db') @@ -154,21 +154,15 @@ def test_write(self): ) self.assertEqual( - json.loads(m.last_request.body), - {"database": "mydb", - "retentionPolicy": "mypolicy", - "points": [{"measurement": "cpu_load_short", - "tags": {"host": "server01", - "region": "us-west"}, - "timestamp": "2009-11-10T23:00:00Z", - "fields": {"value": 0.64}}]} + m.last_request.body, + b"cpu_load_short,host=server01,region=us-west value=0.64 1257890400000000000\n", ) def test_write_points(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write", + "http://localhost:8086/write_points", status_code=204 ) @@ -188,7 +182,7 @@ def test_write_points_toplevel_attributes(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write", + "http://localhost:8086/write_points", status_code=204 ) @@ -227,7 +221,7 @@ def test_write_points_batch(self): "fields": {"value": 12.00}}]} with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write", + "http://localhost:8086/write_points", status_code=204) cli = InfluxDBClient(database='db') cli.write_points(points=dummy_points, @@ -284,7 +278,7 @@ def test_write_points_with_precision(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write", + "http://localhost:8086/write_points", status_code=204 ) @@ -294,12 +288,9 @@ def test_write_points_with_precision(self): time_precision='n' ) - self.assertDictEqual( - {'points': self.dummy_points, - 'database': 'db', - 'precision': 'n', - }, - json.loads(m.last_request.body) + self.assertEqual( + b"cpu_load_short,host=server01,region=us-west value=0.64 1257890400000000000\n", + m.last_request.body, ) def test_write_points_bad_precision(self): From 99cc8caf8e599e86c8dd03bdc0ccbad2d8aa1a98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Petr=20=C5=A0tetiar?= Date: Wed, 24 Jun 2015 17:19:14 +0200 Subject: [PATCH 201/536] Fixes for line-protocol feature MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Working again with https://github.com/influxdb/influxdb/commit/246ce61b85d3d7b1eb5da354c35e2c4b4b42020e Signed-off-by: Petr Štetiar --- influxdb/client.py | 16 ++++++++++++---- tests/influxdb/client_test.py | 10 +++++----- 2 files changed, 17 insertions(+), 9 deletions(-) mode change 100755 => 100644 influxdb/client.py diff --git a/influxdb/client.py b/influxdb/client.py old mode 100755 new mode 100644 index 0187e617..3e87b739 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -198,7 +198,7 @@ def switch_user(self, username, password): self._password = password def request(self, url, method='GET', params=None, data=None, - expected_response_code=200): + expected_response_code=200, headers=None): """Make a HTTP request to the InfluxDB API. :param url: the path of the HTTP request, e.g. write, query, etc. @@ -219,6 +219,9 @@ def request(self, url, method='GET', params=None, data=None, """ url = "{0}/{1}".format(self._baseurl, url) + if headers is None: + headers = self._headers + if params is None: params = {} @@ -235,7 +238,7 @@ def request(self, url, method='GET', params=None, data=None, auth=(self._username, self._password), params=params, data=data, - headers=self._headers, + headers=headers, verify=self._verify_ssl, timeout=self._timeout ) @@ -264,12 +267,17 @@ def write(self, data, params=None, expected_response_code=204): :returns: True, if the write operation is successful :rtype: bool """ + + headers = self._headers + headers['Content-type'] = 'application/octet-stream' + self.request( - url="write_points", + url="write", method='POST', params=params, data=make_lines(data).encode('utf-8'), - expected_response_code=expected_response_code + expected_response_code=expected_response_code, + headers=headers ) return True diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index e329eeb0..8e3ef5f8 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -139,7 +139,7 @@ def test_write(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write_points", + "http://localhost:8086/write", status_code=204 ) cli = InfluxDBClient(database='db') @@ -162,7 +162,7 @@ def test_write_points(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write_points", + "http://localhost:8086/write", status_code=204 ) @@ -182,7 +182,7 @@ def test_write_points_toplevel_attributes(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write_points", + "http://localhost:8086/write", status_code=204 ) @@ -221,7 +221,7 @@ def test_write_points_batch(self): "fields": {"value": 12.00}}]} with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, - "http://localhost:8086/write_points", + "http://localhost:8086/write", status_code=204) cli = InfluxDBClient(database='db') cli.write_points(points=dummy_points, @@ -278,7 +278,7 @@ def test_write_points_with_precision(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, - "http://localhost:8086/write_points", + "http://localhost:8086/write", status_code=204 ) From ca10f5e31bf4a01443f6847b04c5c8e3ae01f6a4 Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Wed, 24 Jun 2015 22:47:48 +0200 Subject: [PATCH 202/536] line protocol: fix timestamp UTC conversion --- influxdb/line_protocol.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 11fb6d1e..27eafa0f 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals +from calendar import timegm from copy import copy from datetime import datetime -from time import mktime from dateutil.parser import parse from pytz import utc @@ -20,7 +20,7 @@ def _convert_timestamp(timestamp): timestamp = timestamp.astimezone(utc) timestamp.replace(tzinfo=None) return ( - mktime(timestamp.timetuple()) * 1e9 + + timegm(timestamp.timetuple()) * 1e9 + timestamp.microsecond * 1e3 ) raise ValueError(timestamp) From f02967766a3f70e4efa6df2c8878ec27242cd01a Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Wed, 24 Jun 2015 22:48:47 +0200 Subject: [PATCH 203/536] the timestamp field is called 'time' --- influxdb/line_protocol.py | 4 ++-- tests/influxdb/client_test.py | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 27eafa0f..2b99d469 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -94,9 +94,9 @@ def make_lines(data): lines = lines[:-1] # strip the trailing comma # add timestamp - if 'timestamp' in point: + if 'time' in point: lines += " " + _force_text(str(int( - _convert_timestamp(point['timestamp']) + _convert_timestamp(point['time']) ))) lines += "\n" diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 8e3ef5f8..ca913860 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -88,7 +88,7 @@ def setUp(self): "host": "server01", "region": "us-west" }, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } @@ -149,7 +149,7 @@ def test_write(self): "points": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": {"value": 0.64}}]} ) @@ -206,18 +206,18 @@ def test_write_points_toplevel_attributes(self): def test_write_points_batch(self): dummy_points = [ {"measurement": "cpu_usage", "tags": {"unit": "percent"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, {"measurement": "network", "tags": {"direction": "in"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, {"measurement": "network", "tags": {"direction": "out"}, - "timestamp": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} ] expected_last_body = {"tags": {"host": "server01", "region": "us-west"}, "database": "db", "points": [{"measurement": "network", "tags": {"direction": "out"}, - "timestamp": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}]} with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, From dae6a8a69a86d452f7ffe2a0603d6b581fd31d1f Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Wed, 24 Jun 2015 22:49:15 +0200 Subject: [PATCH 204/536] fix some expected timestamps --- tests/influxdb/client_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index ca913860..45532907 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -155,7 +155,7 @@ def test_write(self): self.assertEqual( m.last_request.body, - b"cpu_load_short,host=server01,region=us-west value=0.64 1257890400000000000\n", + b"cpu_load_short,host=server01,region=us-west value=0.64 1257894000000000000\n", ) def test_write_points(self): @@ -289,7 +289,7 @@ def test_write_points_with_precision(self): ) self.assertEqual( - b"cpu_load_short,host=server01,region=us-west value=0.64 1257890400000000000\n", + b"cpu_load_short,host=server01,region=us-west value=0.64 1257894000000000000\n", m.last_request.body, ) From 8bf57cb84f85c0280e7a8404dc3201ea6d77e1c2 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Tue, 16 Jun 2015 23:35:24 +0100 Subject: [PATCH 205/536] Convert NaN values to None in numpy arrays. This allows the default JSON encoder to do serialize panda dataframes --- influxdb/influxdb08/dataframe_client.py | 28 ++++++++++++++----- .../influxdb08/dataframe_client_test.py | 26 +++++++++++++++++ 2 files changed, 47 insertions(+), 7 deletions(-) diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index f73fc269..197422a6 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -15,18 +15,17 @@ class DataFrameClient(InfluxDBClient): The client reads and writes from pandas DataFrames. """ - def __init__(self, *args, **kwargs): + def __init__(self, ignore_nan=True, *args, **kwargs): super(DataFrameClient, self).__init__(*args, **kwargs) + try: global pd import pandas as pd except ImportError as ex: - raise ImportError( - 'DataFrameClient requires Pandas, "{ex}" problem importing' - .format(ex=str(ex)) - ) - + raise ImportError('DataFrameClient requires Pandas, ' + '"{ex}" problem importing'.format(ex=str(ex))) self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') + self.ignore_nan = ignore_nan def write_points(self, data, *args, **kwargs): """ @@ -135,9 +134,24 @@ def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): for dt in dataframe.index] data = {'name': name, 'columns': [str(column) for column in dataframe.columns], - 'points': list([list(x) for x in dataframe.values])} + 'points': [self._convert_array(x) for x in dataframe.values]} return data + def _convert_array(self, array): + try: + global np + import numpy as np + except ImportError as ex: + raise ImportError('DataFrameClient requires Numpy, ' + '"{ex}" problem importing'.format(ex=str(ex))) + if self.ignore_nan: + number_types = (int, float, np.number) + condition = (all(isinstance(el, number_types) for el in array) and + np.isnan(array)) + return list(np.where(condition, None, array)) + else: + return list(array) + def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() if time_precision == 's': diff --git a/tests/influxdb/influxdb08/dataframe_client_test.py b/tests/influxdb/influxdb08/dataframe_client_test.py index ebea3616..dd8955db 100644 --- a/tests/influxdb/influxdb08/dataframe_client_test.py +++ b/tests/influxdb/influxdb08/dataframe_client_test.py @@ -52,6 +52,32 @@ def test_write_points_from_dataframe(self): self.assertListEqual(json.loads(m.last_request.body), points) + def test_write_points_from_dataframe_with_float_nan(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + points = [ + { + "points": [ + [1, None, 1.0, 0], + [2, 2, 2.0, 3600] + ], + "name": "foo", + "columns": ["column_one", "column_two", "column_three", "time"] + } + ] + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/db/db/series") + + cli = DataFrameClient(database='db') + cli.write_points({"foo": dataframe}) + + self.assertListEqual(json.loads(m.last_request.body), points) + def test_write_points_from_dataframe_in_batches(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], From 7fae65ff0fcabb998b02d22c136edb19ffd4da7f Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Sat, 27 Jun 2015 01:32:31 +0200 Subject: [PATCH 206/536] set precision and retention policy in GET params --- influxdb/client.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 3e87b739..c295a424 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -390,21 +390,25 @@ def _write_points(self, 'points': points } + if tags: + data['tags'] = tags + + params = { + 'db': database or self._database + } + if time_precision: - data['precision'] = time_precision + params['precision'] = time_precision if retention_policy: - data['retentionPolicy'] = retention_policy - - if tags: - data['tags'] = tags + params['rp'] = retention_policy if self.use_udp: self.send_packet(data) else: self.write( data=data, - params={'db': database or self._database}, + params=params, expected_response_code=204 ) From 1350d05a6c60f9d916c991c3bfa1ab503cf35c3a Mon Sep 17 00:00:00 2001 From: Torsten Rehn Date: Sat, 27 Jun 2015 01:39:19 +0200 Subject: [PATCH 207/536] update some more test cases --- tests/influxdb/client_test.py | 20 ++---- tests/influxdb/client_test_with_server.py | 7 ++- tests/influxdb/dataframe_client_test.py | 74 +++++------------------ 3 files changed, 26 insertions(+), 75 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 45532907..3d40d9a6 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -170,12 +170,9 @@ def test_write_points(self): cli.write_points( self.dummy_points, ) - self.assertDictEqual( - { - "database": "db", - "points": self.dummy_points, - }, - json.loads(m.last_request.body) + self.assertEqual( + "cpu_load_short,host=server01,region=us-west value=0.64 1257894000000000000\n", + m.last_request.body.decode('utf-8'), ) def test_write_points_toplevel_attributes(self): @@ -193,14 +190,9 @@ def test_write_points_toplevel_attributes(self): tags={"tag": "hello"}, retention_policy="somepolicy" ) - self.assertDictEqual( - { - "database": "testdb", - "tags": {"tag": "hello"}, - "points": self.dummy_points, - "retentionPolicy": "somepolicy" - }, - json.loads(m.last_request.body) + self.assertEqual( + "cpu_load_short,host=server01,region=us-west,tag=hello value=0.64 1257894000000000000\n", + m.last_request.body.decode('utf-8'), ) def test_write_points_batch(self): diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 8c0bdc2d..1238e5cb 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -504,9 +504,10 @@ class CommonTests(ManyTestCasesWithServerMixin, influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_write(self): - new_dummy_point = dummy_point[0].copy() - new_dummy_point['database'] = 'db' - self.assertIs(True, self.cli.write(new_dummy_point)) + self.assertIs(True, self.cli.write( + {'points': dummy_point}, + params={'db': 'db'}, + )) @unittest.skip("fail against real server instance, " "don't know if it should succeed actually..") diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 61b411fd..8f7e8e81 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -31,24 +31,10 @@ def test_write_points_from_dataframe(self): index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) - expected = { - 'database': 'db', - 'points': [ - {'time': '1970-01-01T00:00:00+00:00', - 'fields': { - 'column_two': 1, - 'column_three': 1.0, - 'column_one': '1'}, - 'tags': {}, - 'measurement': 'foo'}, - {'time': '1970-01-01T01:00:00+00:00', - 'fields': { - 'column_two': 2, - 'column_three': 2.0, - 'column_one': '2'}, - 'tags': {}, - 'measurement': 'foo'}] - } + expected = ( + b"foo column_one=\"1\",column_three=1.0,column_two=1 0\n" + b"foo column_one=\"2\",column_three=2.0,column_two=2 3600000000000\n" + ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, @@ -58,10 +44,10 @@ def test_write_points_from_dataframe(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, 'foo') - self.assertEqual(json.loads(m.last_request.body), expected) + self.assertEqual(m.last_request.body, expected) cli.write_points(dataframe, 'foo', tags=None) - self.assertEqual(json.loads(m.last_request.body), expected) + self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_in_batches(self): now = pd.Timestamp('1970-01-01 00:00+00:00') @@ -83,24 +69,10 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)]) - expected = { - 'database': 'db', - 'points': [ - {'fields': { - '0': '1', - '1': 1, - '2': 1.0}, - 'tags': {'hello': 'there'}, - 'time': '1970-01-01T00:00:00+00:00', - 'measurement': 'foo'}, - {'fields': { - '0': '2', - '1': 2, - '2': 2.0}, - 'tags': {'hello': 'there'}, - 'time': '1970-01-01T01:00:00+00:00', - 'measurement': 'foo'}], - } + expected = ( + b"foo,hello=there 0=\"1\",1=1,2=1.0 0\n" + b"foo,hello=there 0=\"2\",1=2,2=2.0 3600000000000\n" + ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, @@ -110,7 +82,7 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo", {"hello": "there"}) - self.assertEqual(json.loads(m.last_request.body), expected) + self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_period_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], @@ -118,24 +90,10 @@ def test_write_points_from_dataframe_with_period_index(self): pd.Period('1970-01-02')], columns=["column_one", "column_two", "column_three"]) - expected = { - 'points': [ - {'measurement': 'foo', - 'tags': {}, - 'fields': { - 'column_one': '1', - 'column_two': 1, - 'column_three': 1.0}, - 'time': '1970-01-01T00:00:00+00:00'}, - {'measurement': 'foo', - 'tags': {}, - 'fields': { - 'column_one': '2', - 'column_two': 2, - 'column_three': 2.0}, - 'time': '1970-01-02T00:00:00+00:00'}], - 'database': 'db', - } + expected = ( + b"foo column_one=\"1\",column_three=1.0,column_two=1 0\n" + b"foo column_one=\"2\",column_three=2.0,column_two=2 86400000000000\n" + ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, @@ -145,7 +103,7 @@ def test_write_points_from_dataframe_with_period_index(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") - self.assertEqual(json.loads(m.last_request.body), expected) + self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_time_precision(self): now = pd.Timestamp('1970-01-01 00:00+00:00') From 6627efac8540c65ae1d0c38ddcebefb0d9b6aaea Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Jul 2015 10:40:44 -0400 Subject: [PATCH 208/536] Formatting improvements * ``if value`` can lead to errors --- influxdb/client.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 2fa4226a..def990c8 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -97,7 +97,8 @@ def __init__(self, self._headers = { 'Content-type': 'application/json', - 'Accept': 'text/plain'} + 'Accept': 'text/plain' + } @staticmethod def from_DSN(dsn, **kwargs): @@ -394,17 +395,17 @@ def _write_points(self, 'points': points } - if tags: + if tags is not None: data['tags'] = tags params = { 'db': database or self._database } - if time_precision: + if time_precision is not None: params['precision'] = time_precision - if retention_policy: + if retention_policy is not None: params['rp'] = retention_policy if self.use_udp: From 2ea2437a894a8b704ef60f9ef4731c2720b9d576 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Jul 2015 11:14:58 -0400 Subject: [PATCH 209/536] New epoch parameter + pep8 + enable udp tests * Added epoch parameter to ``query`` * Removed test_write_points_with_precision as queries now always return nanosecond precision See doc: > The format of the returned timestamps complies with RFC3339, and > has nanosecond precision. --- influxdb/client.py | 4 + tests/influxdb/client_test.py | 12 ++- tests/influxdb/client_test_with_server.py | 98 +---------------------- tests/influxdb/dataframe_client_test.py | 6 +- 4 files changed, 19 insertions(+), 101 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index def990c8..9ebf3c0f 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -270,6 +270,7 @@ def write(self, data, params=None, expected_response_code=204): def query(self, query, params={}, + epoch=None, expected_response_code=200, database=None, raise_errors=True): @@ -298,6 +299,9 @@ def query(self, params['q'] = query params['db'] = database or self._database + if epoch is not None: + params['epoch'] = epoch + response = self.request( url="query", method='GET', diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 4da32d7b..e9185208 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -155,7 +155,8 @@ def test_write(self): self.assertEqual( m.last_request.body, - b"cpu_load_short,host=server01,region=us-west value=0.64 1257894000000000000\n", + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000000000000\n", ) def test_write_points(self): @@ -171,7 +172,8 @@ def test_write_points(self): self.dummy_points, ) self.assertEqual( - "cpu_load_short,host=server01,region=us-west value=0.64 1257894000000000000\n", + "cpu_load_short,host=server01,region=us-west " + "value=0.64 1257894000000000000\n", m.last_request.body.decode('utf-8'), ) @@ -191,7 +193,8 @@ def test_write_points_toplevel_attributes(self): retention_policy="somepolicy" ) self.assertEqual( - "cpu_load_short,host=server01,region=us-west,tag=hello value=0.64 1257894000000000000\n", + "cpu_load_short,host=server01,region=us-west,tag=hello " + "value=0.64 1257894000000000000\n", m.last_request.body.decode('utf-8'), ) @@ -281,7 +284,8 @@ def test_write_points_with_precision(self): ) self.assertEqual( - b"cpu_load_short,host=server01,region=us-west value=0.64 1257894000000000000\n", + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000000000000\n", m.last_request.body, ) diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 99bd7a71..a298d3de 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -15,7 +15,6 @@ import distutils.spawn from functools import partial import os -import re import shutil import subprocess import sys @@ -653,96 +652,6 @@ def test_write_points_batch(self): self.assertIn(12, net_out['series'][0]['values'][0]) self.assertIn(12.34, cpu['series'][0]['values'][0]) - def test_write_points_with_precision(self): - """ check that points written with an explicit precision have - actually that precision used. - """ - # for that we'll check that - for each precision - the actual 'time' - # value returned by a select has the correct regex format.. - # n : u'2015-03-20T15:23:36.615654966Z' - # u : u'2015-03-20T15:24:10.542554Z' - # ms : u'2015-03-20T15:24:50.878Z' - # s : u'2015-03-20T15:20:24Z' - # m : u'2015-03-20T15:25:00Z' - # h : u'2015-03-20T15:00:00Z' - base_regex = '\d{4}-\d{2}-\d{2}T\d{2}:' # YYYY-MM-DD 'T' hh: - base_s_regex = base_regex + '\d{2}:\d{2}' # base_regex + mm:ss - - point = { - "measurement": "cpu_load_short", - "tags": { - "host": "server01", - "region": "us-west" - }, - "time": "2009-11-10T12:34:56.123456789Z", - "fields": { - "value": 0.64 - } - } - - # As far as we can see the values aren't directly available depending - # on the precision used. - # The less the precision, the more to wait for the value to be - # actually written/available. - for idx, (precision, expected_regex, sleep_time) in enumerate(( - ('n', base_s_regex + '\.\d{9}Z', 1), - ('u', base_s_regex + '\.\d{6}Z', 1), - ('ms', base_s_regex + '\.\d{3}Z', 1), - ('s', base_s_regex + 'Z', 1), - - # ('h', base_regex + '00:00Z', ), - # that would require a sleep of possibly up to 3600 secs (/ 2 ?).. - )): - db = 'db1' # to not shoot us in the foot/head, - # we work on a fresh db each time: - self.cli.create_database(db) - before = datetime.datetime.now() - self.assertIs( - True, - self.cli.write_points( - [point], - time_precision=precision, - database=db)) - - # sys.stderr.write('checking presision with %r : - # before=%s\n' % (precision, before)) - after = datetime.datetime.now() - - if sleep_time > 1: - sleep_time -= (after if before.min != after.min - else before).second - - start = time.time() - timeout = start + sleep_time - # sys.stderr.write('should sleep %s ..\n' % sleep_time) - while time.time() < timeout: - rsp = self.cli.query('SELECT * FROM cpu_load_short', - database=db) - if rsp != {'cpu_load_short': []}: - # sys.stderr.write('already ? only slept %s\n' % ( - # time.time() - start)) - break - time.sleep(1) - else: - pass - # sys.stderr.write('ok !\n') - - # sys.stderr.write('sleeping %s..\n' % sleep_time) - - if sleep_time: - time.sleep(sleep_time) - - rsp = self.cli.query('SELECT * FROM cpu_load_short', database=db) - # sys.stderr.write('precision=%s rsp_timestamp = %r\n' % ( - # precision, rsp['cpu_load_short'][0]['time'])) - - m = re.match( - expected_regex, - list(rsp['cpu_load_short'])[0]['time'] - ) - self.assertIsNotNone(m) - self.cli.drop_database(db) - def test_query(self): self.assertIs(True, self.cli.write_points(dummy_point)) @@ -973,13 +882,11 @@ def test_query_multiple_series(self): ############################################################################ -@unittest.skip("Broken as of 0.9.0") @unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): influxdb_udp_enabled = True - influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') @@ -990,14 +897,15 @@ def test_write_points_udp(self): 'root', '', database='db', - use_udp=True, udp_port=self.influxd_inst.udp_port + use_udp=True, + udp_port=self.influxd_inst.udp_port ) cli.write_points(dummy_point) # The points are not immediately available after write_points. # This is to be expected because we are using udp (no response !). # So we have to wait some time, - time.sleep(1) # 1 sec seems to be a good choice. + time.sleep(3) # 3 sec seems to be a good choice. rsp = self.cli.query('SELECT * FROM cpu_load_short') self.assertEqual( diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 8f7e8e81..7f54990a 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -33,7 +33,8 @@ def test_write_points_from_dataframe(self): "column_three"]) expected = ( b"foo column_one=\"1\",column_three=1.0,column_two=1 0\n" - b"foo column_one=\"2\",column_three=2.0,column_two=2 3600000000000\n" + b"foo column_one=\"2\",column_three=2.0,column_two=2 " + b"3600000000000\n" ) with requests_mock.Mocker() as m: @@ -92,7 +93,8 @@ def test_write_points_from_dataframe_with_period_index(self): "column_three"]) expected = ( b"foo column_one=\"1\",column_three=1.0,column_two=1 0\n" - b"foo column_one=\"2\",column_three=2.0,column_two=2 86400000000000\n" + b"foo column_one=\"2\",column_three=2.0,column_two=2 " + b"86400000000000\n" ) with requests_mock.Mocker() as m: From a17e743c0823db3508a08f196ff4568aa9e3edbe Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Jul 2015 11:22:47 -0400 Subject: [PATCH 210/536] Fixed tests for new line protocol --- tests/influxdb/client_test.py | 24 ++++++++++-------------- tests/influxdb/dataframe_client_test.py | 7 +++---- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index e9185208..80fa49db 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -207,13 +207,11 @@ def test_write_points_batch(self): {"measurement": "network", "tags": {"direction": "out"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} ] - expected_last_body = {"tags": {"host": "server01", - "region": "us-west"}, - "database": "db", - "points": [{"measurement": "network", - "tags": {"direction": "out"}, - "time": "2009-11-10T23:00:00Z", - "fields": {"value": 12.00}}]} + expected_last_body = ( + "network,direction=out,host=server01,region=us-west " + "value=12.0 1257894000000000000\n" + ) + with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", @@ -225,7 +223,7 @@ def test_write_points_batch(self): "region": "us-west"}, batch_size=2) self.assertEqual(m.call_count, 2) - self.assertEqual(expected_last_body, m.last_request.json()) + self.assertEqual(expected_last_body, m.last_request.body) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -240,12 +238,10 @@ def test_write_points_udp(self): received_data, addr = s.recvfrom(1024) - self.assertDictEqual( - { - "points": self.dummy_points, - "database": "test" - }, - json.loads(received_data.decode(), strict=True) + self.assertEqual( + "cpu_load_short,host=server01,region=us-west " + "value=0.64 1257894000000000000\n", + received_data.decode() ) def test_write_bad_precision_udp(self): diff --git a/tests/influxdb/dataframe_client_test.py b/tests/influxdb/dataframe_client_test.py index 7f54990a..481eefbc 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/tests/influxdb/dataframe_client_test.py @@ -142,16 +142,15 @@ def test_write_points_from_dataframe_with_time_precision(self): measurement = "foo" cli.write_points(dataframe, measurement, time_precision='s') - points.update(precision='s') - self.assertEqual(json.loads(m.last_request.body), points) + self.assertEqual(m.last_request.qs['precision'], ['s']) cli.write_points(dataframe, measurement, time_precision='m') points.update(precision='m') - self.assertEqual(json.loads(m.last_request.body), points) + self.assertEqual(m.last_request.qs['precision'], ['m']) cli.write_points(dataframe, measurement, time_precision='u') points.update(precision='u') - self.assertEqual(json.loads(m.last_request.body), points) + self.assertEqual(m.last_request.qs['precision'], ['u']) @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): From 439f46c2a7eba21207a28eabb7eaac68577b153d Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 3 Jul 2015 12:19:48 -0400 Subject: [PATCH 211/536] isinstance: Use tuple --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 9ebf3c0f..8fde6103 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -211,7 +211,7 @@ def request(self, url, method='GET', params=None, data=None, if params is None: params = {} - if isinstance(data, dict) or isinstance(data, list): + if isinstance(data, (dict, list)): data = json.dumps(data) # Try to send the request a maximum of three times. (see #103) From 58dac7b1522ea6e4bbb82cdcd6c0e6a362ea61a6 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Fri, 3 Jul 2015 22:53:30 +0100 Subject: [PATCH 212/536] Fix write_points_batch test for python3. --- tests/influxdb/client_test.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 80fa49db..d175b4f0 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -223,7 +223,8 @@ def test_write_points_batch(self): "region": "us-west"}, batch_size=2) self.assertEqual(m.call_count, 2) - self.assertEqual(expected_last_body, m.last_request.body) + self.assertEqual(expected_last_body, + m.last_request.body.decode('utf-8')) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) From 126b39d42e4b179d14e59a3cbbbb46b7d0e35158 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sat, 4 Jul 2015 01:30:38 +0100 Subject: [PATCH 213/536] Update queries and tests for InfluxDB v0.9.1. * Update delete_series query. Instead of an id, it has FROM and WHERE = clauses. Update tests. * Remove grant_admin_privileges method. Cluster administration privileges are granted upon user creation. Remove tests. * Update create_user method to accept an optional argument to determine whether the user should be granted cluster administration privileges. Update tests. * Fix the test_write_check_read test case in the tests against a real server. * Fix revoke_admin_privileges test. It was failing because of the grant_admin_privileges method malfunction. --- influxdb/client.py | 39 +++++++------ tests/influxdb/client_test.py | 22 -------- tests/influxdb/client_test_with_server.py | 68 ++++++++--------------- 3 files changed, 44 insertions(+), 85 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 8fde6103..409c9cc5 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -589,15 +589,20 @@ def get_list_users(self): """ return list(self.query("SHOW USERS").get_points()) - def create_user(self, username, password): + def create_user(self, username, password, admin=False): """Create a new user in InfluxDB :param username: the new username to create :type username: str :param password: the password for the new user :type password: str + :param admin: whether the user should have cluster administration + privileges or not + :type admin: boolean """ text = "CREATE USER {} WITH PASSWORD '{}'".format(username, password) + if admin: + text += ' WITH ALL PRIVILEGES' self.query(text) def drop_user(self, username): @@ -620,29 +625,27 @@ def set_user_password(self, username, password): text = "SET PASSWORD FOR {} = '{}'".format(username, password) self.query(text) - def delete_series(self, id, database=None): - """Delete series from a database. + def delete_series(self, database=None, measurement=None, tags=None): + """Delete series from a database. Series can be filtered by + measurement and tags. - :param id: the id of the series to be deleted - :type id: int + :param measurement: Delete all series from a measurement + :type id: string + :param tags: Delete all series that match given tags + :type id: dict :param database: the database from which the series should be deleted, defaults to client's current database :type database: str """ database = database or self._database - self.query('DROP SERIES %s' % id, database=database) - - def grant_admin_privileges(self, username): - """Grant cluster administration privileges to an user. - - :param username: the username to grant privileges to - :type username: str - - .. note:: Only a cluster administrator can create/ drop databases - and manage users. - """ - text = "GRANT ALL PRIVILEGES TO {}".format(username) - self.query(text) + query_str = 'DROP SERIES' + if measurement: + query_str += ' FROM "{}"'.format(measurement) + + if tags: + query_str += ' WHERE ' + ' and '.join(["{}='{}'".format(k, v) + for k, v in tags.items()]) + self.query(query_str, database=database) def revoke_admin_privileges(self, username): """Revoke cluster administration privileges from an user. diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index d175b4f0..43ecb68e 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -615,28 +615,6 @@ def test_get_list_users_empty(self): self.assertListEqual(self.cli.get_list_users(), []) - def test_grant_admin_privileges(self): - example_response = '{"results":[{}]}' - - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.GET, - "http://localhost:8086/query", - text=example_response - ) - self.cli.grant_admin_privileges('test') - - self.assertEqual( - m.last_request.qs['q'][0], - 'grant all privileges to test' - ) - - @raises(Exception) - def test_grant_admin_privileges_invalid(self): - cli = InfluxDBClient('host', 8086, 'username', 'password') - with _mocked_session(cli, 'get', 400): - self.cli.grant_admin_privileges('') - def test_revoke_admin_privileges(self): example_response = '{"results":[{}]}' diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index a298d3de..0d4f36f7 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -109,7 +109,7 @@ def point(serie_name, timestamp=None, tags=None, **fields): }, "time": "2009-11-10T23:01:35Z", "fields": { - "value": 33 + "value": 33.0 } } ] @@ -380,6 +380,12 @@ def test_create_user(self): self.assertIn({'user': 'test_user', 'admin': False}, rsp) + def test_create_user_admin(self): + self.cli.create_user('test_user', 'secret_password', True) + rsp = list(self.cli.query("SHOW USERS")['results']) + self.assertIn({'user': 'test_user', 'admin': True}, + rsp) + def test_create_user_blank_password(self): self.cli.create_user('test_user', '') rsp = list(self.cli.query("SHOW USERS")['results']) @@ -439,26 +445,9 @@ def test_drop_user_invalid(self): 'found invalid, expected', ctx.exception.content) - @unittest.skip("Broken as of 0.9.0") - def test_grant_admin_privileges(self): - self.cli.create_user('test', 'test') - self.assertEqual([{'user': 'test', 'admin': False}], - self.cli.get_list_users()) - self.cli.grant_admin_privileges('test') - self.assertEqual([{'user': 'test', 'admin': True}], - self.cli.get_list_users()) - - def test_grant_admin_privileges_invalid(self): - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.grant_admin_privileges('') - self.assertEqual(400, ctx.exception.code) - self.assertIn('{"error":"error parsing query: ', - ctx.exception.content) - @unittest.skip("Broken as of 0.9.0") def test_revoke_admin_privileges(self): - self.cli.create_user('test', 'test') - self.cli.grant_admin_privileges('test') + self.cli.create_user('test', 'test', admin=True) self.assertEqual([{'user': 'test', 'admin': True}], self.cli.get_list_users()) self.cli.revoke_admin_privileges('test') @@ -518,23 +507,13 @@ def test_write(self): params={'db': 'db'}, )) - @unittest.skip("fail against real server instance, " - "don't know if it should succeed actually..") def test_write_check_read(self): self.test_write() - # hmmmm damn, - # after write has returned, if we directly query for the data it's not - # directly available.. (don't know if this is expected behavior ( - # but it maybe)) - # So we have to : - time.sleep(5) - # so that then the data is available through select : + time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db') - self.assertEqual( - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - rsp - ) + self.assertListEqual([{'value': 0.64, + 'time': '2009-11-10T23:00:00Z'}], + list(rsp.get_points())) def test_write_points(self): self.assertIs(True, self.cli.write_points(dummy_point)) @@ -692,19 +671,18 @@ def test_get_list_series_and_delete(self): rsp ) - @unittest.skip("broken on 0.9.0") + def test_delete_series_invalid(self): + with self.assertRaises(InfluxDBClientError): + self.cli.delete_series() + def test_delete_series(self): - self.assertEqual( - len(self.cli.get_list_series()), 0 - ) - self.cli.write_points(dummy_point) - self.assertEqual( - len(self.cli.get_list_series()), 1 - ) - self.cli.delete_series(1) - self.assertEqual( - len(self.cli.get_list_series()), 0 - ) + self.assertEqual(len(self.cli.get_list_series()), 0) + self.cli.write_points(dummy_points) + self.assertEqual(len(self.cli.get_list_series()), 2) + self.cli.delete_series(measurement='cpu_load_short') + self.assertEqual(len(self.cli.get_list_series()), 1) + self.cli.delete_series(tags={'region': 'us-west'}) + self.assertEqual(len(self.cli.get_list_series()), 0) @unittest.skip("Broken as of 0.9.0") def test_get_list_series_DF(self): From 9087455e794167b3c494e5466b64f9d865d9ab93 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 5 Jul 2015 10:12:58 -0400 Subject: [PATCH 214/536] influxdb08: Requests - catch Timeouts --- influxdb/influxdb08/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index b5a02bd9..0af499bc 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -242,7 +242,8 @@ def request(self, url, method='GET', params=None, data=None, timeout=self._timeout ) break - except requests.exceptions.ConnectionError as e: + except (requests.exceptions.ConnectionError, + requests.exceptions.Timeout) as e: if i < 2: continue else: From 34257806e6ecaa638ba9243320e4805727ffe5d9 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Sun, 5 Jul 2015 12:04:22 -0400 Subject: [PATCH 215/536] Travis - Enable caching --- .travis.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.travis.yml b/.travis.yml index 564ce172..63ec221f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,3 +19,10 @@ after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi notifications: email: false + +# Travis caching +cache: + directories: + - $HOME/.cache/pip +before_cache: + - rm -f $HOME/.cache/pip/log/debug.log From ab590e33d4eb0a02c6b7598fb2d727637fb2c86a Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 5 Jul 2015 10:21:28 -0400 Subject: [PATCH 216/536] Travis.yml: Updated to InfluxDB 0.9.1 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 564ce172..70e210aa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls - - wget http://get.influxdb.org/influxdb_0.9.0_amd64.deb && sudo dpkg -i influxdb_0.9.0_amd64.deb + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.1_amd64.deb && sudo dpkg -i influxdb_0.9.1_amd64.deb script: - travis_wait 30 tox -e $TOX_ENV after_success: From 32f5bed33c0ef5ac5cf0762b3bbc3a64498a1a1d Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 6 Jul 2015 09:06:13 -0400 Subject: [PATCH 217/536] 'time.sleep(0.1)' before returning free port --- tests/influxdb/misc.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/influxdb/misc.py b/tests/influxdb/misc.py index 4761d0e9..70e16f11 100644 --- a/tests/influxdb/misc.py +++ b/tests/influxdb/misc.py @@ -1,6 +1,7 @@ import socket +import time def get_free_port(ip='127.0.0.1'): @@ -12,6 +13,10 @@ def get_free_port(ip='127.0.0.1'): finally: sock.close() + # Is there a better way than a sleep? + # There were issues on Travis where the port was not yet free. + time.sleep(0.1) + def is_port_open(port, ip='127.0.0.1'): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) From 1775b244206d678cc08cb7fd1bf4c1a632cde89b Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 7 Jul 2015 09:21:47 -0400 Subject: [PATCH 218/536] line_protocol: Don't replace when decoding utf-8 --- influxdb/line_protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 2b99d469..266cf59e 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -53,7 +53,7 @@ def _force_text(data): Try to return a text aka unicode object from the given data. """ if isinstance(data, binary_type): - return data.decode('utf-8', 'replace') + return data.decode('utf-8') else: return data From 28dee66da92bd94cfcff67d379da34c84b33ccb3 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 17 Jul 2015 14:43:08 -0400 Subject: [PATCH 219/536] Dont include empty tags in lines (Closes: #215) --- influxdb/line_protocol.py | 8 ++++---- tests/influxdb/test_line_protocol.py | 27 +++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 tests/influxdb/test_line_protocol.py diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 266cf59e..403e463c 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -79,10 +79,10 @@ def make_lines(data): tags.update(point.get('tags', {})) # tags should be sorted client-side to take load off server for tag_key in sorted(tags.keys()): - lines += "{key}={value},".format( - key=_escape_tag(tag_key), - value=_escape_tag(tags[tag_key]), - ) + key = _escape_tag(tag_key) + value = _escape_tag(tags[tag_key]) + if key != '' and value != '': + lines += "{key}={value},".format(key=key, value=value) lines = lines[:-1] + " " # strip the trailing comma # add fields diff --git a/tests/influxdb/test_line_protocol.py b/tests/influxdb/test_line_protocol.py new file mode 100644 index 00000000..44c397eb --- /dev/null +++ b/tests/influxdb/test_line_protocol.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + +import unittest +from influxdb import line_protocol + + +class TestLineProtocol(unittest.TestCase): + + def test_empty_tag(self): + data = { + "tags": { + "my_tag": "" + }, + "points": [ + { + "measurement": "test", + "fields": { + "value": "hello!" + } + } + ] + } + + self.assertEqual( + line_protocol.make_lines(data), + 'test value="hello!"\n' + ) From d737c660567ef5847a2116e9f644455d8297bdd3 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 23 Jul 2015 14:48:25 -0400 Subject: [PATCH 220/536] Released 2.7.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 7acb8439..e2982e62 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.6.0' +__version__ = '2.7.0' From 9e40ef247df41f8f1783e7ec5effd55d4efd8b69 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Jul 2015 21:30:47 -0400 Subject: [PATCH 221/536] Fixed bad 'SHOW RETENTION POLICIES' syntax --- .travis.yml | 2 +- influxdb/client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b7e4db11..39bf1dce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: install: - sudo pip install tox - sudo pip install coveralls - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.1_amd64.deb && sudo dpkg -i influxdb_0.9.1_amd64.deb + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.2_amd64.deb && sudo dpkg -i influxdb_0.9.2_amd64.deb script: - travis_wait 30 tox -e $TOX_ENV after_success: diff --git a/influxdb/client.py b/influxdb/client.py index 409c9cc5..a7c265a7 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -540,7 +540,7 @@ def get_list_retention_policies(self, database=None): u'replicaN': 1}] """ rsp = self.query( - "SHOW RETENTION POLICIES %s" % (database or self._database) + "SHOW RETENTION POLICIES ON %s" % (database or self._database) ) return list(rsp.get_points()) From cd8a2cef18ea627e673e00efa125471aa7d60b13 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Jul 2015 22:18:06 -0400 Subject: [PATCH 222/536] Ensure tests pass without influxdb bin --- tests/__init__.py | 4 + tests/influxdb/server_tests/__init__.py | 0 tests/influxdb/server_tests/base.py | 70 ++++++ .../client_test_with_server.py | 225 +----------------- .../{ => server_tests}/influxdb.conf.template | 0 .../server_tests/influxdb_instance.py | 153 ++++++++++++ tox.ini | 1 + 7 files changed, 235 insertions(+), 218 deletions(-) create mode 100644 tests/influxdb/server_tests/__init__.py create mode 100644 tests/influxdb/server_tests/base.py rename tests/influxdb/{ => server_tests}/client_test_with_server.py (74%) rename tests/influxdb/{ => server_tests}/influxdb.conf.template (100%) create mode 100644 tests/influxdb/server_tests/influxdb_instance.py diff --git a/tests/__init__.py b/tests/__init__.py index 3bbc06da..e87fdbc4 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- import unittest import sys +import os using_pypy = hasattr(sys, "pypy_version_info") skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") + +_skip_server_tests = os.environ.get('INFLUXDB_PYTHON_SKIP_SERVER_TESTS', None) == 'True' +skipServerTests = unittest.skipIf(_skip_server_tests, "Skipping server tests...") diff --git a/tests/influxdb/server_tests/__init__.py b/tests/influxdb/server_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/influxdb/server_tests/base.py b/tests/influxdb/server_tests/base.py new file mode 100644 index 00000000..db15dfd4 --- /dev/null +++ b/tests/influxdb/server_tests/base.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +import sys + +from tests import using_pypy +from tests.influxdb.server_tests.influxdb_instance import InfluxDbInstance + +from influxdb.client import InfluxDBClient + +if not using_pypy: + from influxdb.dataframe_client import DataFrameClient + + +def _setup_influxdb_server(inst): + inst.influxd_inst = InfluxDbInstance( + inst.influxdb_template_conf, + udp_enabled=getattr(inst, 'influxdb_udp_enabled', False), + ) + + inst.cli = InfluxDBClient('localhost', + inst.influxd_inst.http_port, + 'root', + '', + database='db') + if not using_pypy: + inst.cliDF = DataFrameClient('localhost', + inst.influxd_inst.http_port, + 'root', + '', + database='db') + + +def _teardown_influxdb_server(inst): + remove_tree = sys.exc_info() == (None, None, None) + inst.influxd_inst.close(remove_tree=remove_tree) + + +class SingleTestCaseWithServerMixin(object): + ''' A mixin for unittest.TestCase to start an influxdb server instance + in a temporary directory **for each test function/case** + ''' + + # 'influxdb_template_conf' attribute must be set + # on the TestCase class or instance. + + setUp = _setup_influxdb_server + tearDown = _teardown_influxdb_server + + +class ManyTestCasesWithServerMixin(object): + ''' Same than SingleTestCaseWithServerMixin + but creates a single instance for the whole class. + Also pre-creates a fresh database: 'db'. + ''' + + # 'influxdb_template_conf' attribute must be set on the class itself ! + + @classmethod + def setUpClass(cls): + _setup_influxdb_server(cls) + + def setUp(self): + self.cli.create_database('db') + + @classmethod + def tearDownClass(cls): + _teardown_influxdb_server(cls) + + def tearDown(self): + self.cli.drop_database('db') diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/server_tests/client_test_with_server.py similarity index 74% rename from tests/influxdb/client_test_with_server.py rename to tests/influxdb/server_tests/client_test_with_server.py index 0d4f36f7..8b0274f6 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/server_tests/client_test_with_server.py @@ -11,17 +11,11 @@ """ from __future__ import print_function -import datetime -import distutils.spawn + from functools import partial import os -import shutil -import subprocess -import sys -import tempfile import time import unittest - import warnings # By default, raise exceptions on warnings @@ -30,50 +24,17 @@ from influxdb import InfluxDBClient from influxdb.exceptions import InfluxDBClientError -from tests.influxdb.misc import get_free_port, is_port_open -from tests import skipIfPYpy, using_pypy +from tests import skipIfPYpy, using_pypy, skipServerTests +from tests.influxdb.server_tests.base import ManyTestCasesWithServerMixin +from tests.influxdb.server_tests.base import SingleTestCaseWithServerMixin if not using_pypy: import pandas as pd from pandas.util.testing import assert_frame_equal - from influxdb import DataFrameClient THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -############################################################################# -# try to find where the 'influxd' binary is located: -# You can define 'InfluxDbPythonClientTest_SERVER_BIN_PATH' -# env var to force it : -influxdb_bin_path = influxdb_forced_bin = os.environ.get( - 'InfluxDbPythonClientTest_SERVER_BIN_PATH', '') -if not influxdb_bin_path: - try: - influxdb_bin_path = distutils.spawn.find_executable('influxd') - if not influxdb_bin_path: - raise Exception('not found via distutils') - except Exception as err: - try: - influxdb_bin_path = subprocess.check_output( - ['which', 'influxdb']).strip() - except subprocess.CalledProcessError as err: - # fallback on : - influxdb_bin_path = '/opt/influxdb/influxd' - -is_influxdb_bin_ok = ( - # if the env var is set then consider the influxdb_bin as OK.. - influxdb_forced_bin - or (os.path.isfile(influxdb_bin_path) - and os.access(influxdb_bin_path, os.X_OK)) -) - -if is_influxdb_bin_ok: - # read version : - version = subprocess.check_output([influxdb_bin_path, 'version']) - print(version, file=sys.stderr) - - -############################################################################# def point(serie_name, timestamp=None, tags=None, **fields): res = {'measurement': serie_name} @@ -152,174 +113,7 @@ def point(serie_name, timestamp=None, tags=None, **fields): } ] -############################################################################# - - -class InfluxDbInstance(object): - ''' A class to launch of fresh influxdb server instance - in a temporary place, using a config file template. - ''' - - def __init__(self, conf_template, udp_enabled=False): - # create a temporary dir to store all needed files - # for the influxdb server instance : - self.temp_dir_base = tempfile.mkdtemp() - # "temp_dir_base" will be used for conf file and logs, - # while "temp_dir_influxdb" is for the databases files/dirs : - tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( - dir=self.temp_dir_base) - # we need some "free" ports : - - ports = dict( - http_port=get_free_port(), - admin_port=get_free_port(), - meta_port=get_free_port(), - udp_port=get_free_port() if udp_enabled else -1, - ) - - conf_data = dict( - meta_dir=os.path.join(tempdir, 'meta'), - data_dir=os.path.join(tempdir, 'data'), - cluster_dir=os.path.join(tempdir, 'state'), - handoff_dir=os.path.join(tempdir, 'handoff'), - logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), - udp_enabled='true' if udp_enabled else 'false', - ) - conf_data.update(ports) - self.__dict__.update(conf_data) - - conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') - with open(conf_file, "w") as fh: - with open(conf_template) as fh_template: - fh.write(fh_template.read().format(**conf_data)) - - # now start the server instance: - proc = self.proc = subprocess.Popen( - [influxdb_bin_path, '-config', conf_file], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - print("%s > Started influxdb bin in %r with ports %s and %s.." % ( - datetime.datetime.now(), - self.temp_dir_base, - self.admin_port, self.http_port)) - - # wait for it to listen on the broker and admin ports: - # usually a fresh instance is ready in less than 1 sec .. - timeout = time.time() + 10 # so 10 secs should be enough, - # otherwise either your system load is high, - # or you run a 286 @ 1Mhz ? - try: - while time.time() < timeout: - if (is_port_open(self.http_port) - and is_port_open(self.admin_port)): - # it's hard to check if a UDP port is open.. - if udp_enabled: - # so let's just sleep 0.5 sec in this case - # to be sure that the server has open the port - time.sleep(0.5) - break - time.sleep(0.5) - if proc.poll() is not None: - raise RuntimeError('influxdb prematurely exited') - else: - proc.terminate() - proc.wait() - raise RuntimeError('Timeout waiting for influxdb to listen' - ' on its ports (%s)' % ports) - except RuntimeError as err: - data = self.get_logs_and_output() - data['reason'] = str(err) - data['now'] = datetime.datetime.now() - raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" - "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r" - % data) - - def get_logs_and_output(self): - proc = self.proc - try: - with open(self.logs_file) as fh: - logs = fh.read() - except IOError as err: - logs = "Couldn't read logs: %s" % err - return { - 'rc': proc.returncode, - 'out': proc.stdout.read(), - 'err': proc.stderr.read(), - 'logs': logs - } - - def close(self, remove_tree=True): - self.proc.terminate() - self.proc.wait() - if remove_tree: - shutil.rmtree(self.temp_dir_base) - -############################################################################ - - -def _setup_influxdb_server(inst): - inst.influxd_inst = InfluxDbInstance( - inst.influxdb_template_conf, - udp_enabled=getattr(inst, 'influxdb_udp_enabled', False)) - - inst.cli = InfluxDBClient('localhost', - inst.influxd_inst.http_port, - 'root', - '', - database='db') - if not using_pypy: - inst.cliDF = DataFrameClient('localhost', - inst.influxd_inst.http_port, - 'root', - '', - database='db') - - -def _unsetup_influxdb_server(inst): - remove_tree = sys.exc_info() == (None, None, None) - inst.influxd_inst.close(remove_tree=remove_tree) - -############################################################################ - - -class SingleTestCaseWithServerMixin(object): - ''' A mixin for unittest.TestCase to start an influxdb server instance - in a temporary directory **for each test function/case** - ''' - - # 'influxdb_template_conf' attribute must be set - # on the TestCase class or instance. - - setUp = _setup_influxdb_server - tearDown = _unsetup_influxdb_server - - -class ManyTestCasesWithServerMixin(object): - ''' Same than SingleTestCaseWithServerMixin - but creates a single instance for the whole class. - Also pre-creates a fresh database: 'db'. - ''' - - # 'influxdb_template_conf' attribute must be set on the class itself ! - - @classmethod - def setUpClass(cls): - _setup_influxdb_server(cls) - - def setUp(self): - self.cli.create_database('db') - - @classmethod - def tearDownClass(cls): - _unsetup_influxdb_server(cls) - - def tearDown(self): - self.cli.drop_database('db') - -############################################################################ - - -@unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") +@skipServerTests class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase): @@ -492,10 +286,7 @@ def test_revoke_privilege_invalid(self): ctx.exception.content) -############################################################################ - - -@unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") +@skipServerTests class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase): @@ -858,9 +649,7 @@ def test_query_multiple_series(self): self.cli.write_points(pts) -############################################################################ - -@unittest.skipIf(not is_influxdb_bin_ok, "could not find influxd binary") +@skipServerTests class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): diff --git a/tests/influxdb/influxdb.conf.template b/tests/influxdb/server_tests/influxdb.conf.template similarity index 100% rename from tests/influxdb/influxdb.conf.template rename to tests/influxdb/server_tests/influxdb.conf.template diff --git a/tests/influxdb/server_tests/influxdb_instance.py b/tests/influxdb/server_tests/influxdb_instance.py new file mode 100644 index 00000000..cbc77c58 --- /dev/null +++ b/tests/influxdb/server_tests/influxdb_instance.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- + +from __future__ import print_function + +import datetime +import os +import tempfile +import distutils +import time +import shutil +import subprocess +import unittest +import sys + +from tests.influxdb.misc import get_free_port, is_port_open + + +class InfluxDbInstance(object): + """ A class to launch of fresh influxdb server instance + in a temporary place, using a config file template. + """ + + def __init__(self, + conf_template, + udp_enabled=False): + + self.influxd_path = self.find_influxd_path() + + + # create a temporary dir to store all needed files + # for the influxdb server instance : + self.temp_dir_base = tempfile.mkdtemp() + + # "temp_dir_base" will be used for conf file and logs, + # while "temp_dir_influxdb" is for the databases files/dirs : + tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( + dir=self.temp_dir_base) + + # find a couple free ports : + ports = dict( + http_port=get_free_port(), + admin_port=get_free_port(), + meta_port=get_free_port(), + udp_port=get_free_port() if udp_enabled else -1, + ) + + conf_data = dict( + meta_dir=os.path.join(tempdir, 'meta'), + data_dir=os.path.join(tempdir, 'data'), + cluster_dir=os.path.join(tempdir, 'state'), + handoff_dir=os.path.join(tempdir, 'handoff'), + logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), + udp_enabled='true' if udp_enabled else 'false', + ) + conf_data.update(ports) + self.__dict__.update(conf_data) + + conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') + with open(conf_file, "w") as fh: + with open(conf_template) as fh_template: + fh.write(fh_template.read().format(**conf_data)) + + # now start the server instance: + self.proc = subprocess.Popen( + [self.influxd_path, '-config', conf_file], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + print( + "%s > Started influxdb bin in %r with ports %s and %s.." % ( + datetime.datetime.now(), + self.temp_dir_base, + self.admin_port, + self.http_port + ) + ) + + # wait for it to listen on the broker and admin ports: + # usually a fresh instance is ready in less than 1 sec .. + timeout = time.time() + 10 # so 10 secs should be enough, + # otherwise either your system load is high, + # or you run a 286 @ 1Mhz ? + try: + while time.time() < timeout: + if (is_port_open(self.http_port) + and is_port_open(self.admin_port)): + # it's hard to check if a UDP port is open.. + if udp_enabled: + # so let's just sleep 0.5 sec in this case + # to be sure that the server has open the port + time.sleep(0.5) + break + time.sleep(0.5) + if self.proc.poll() is not None: + raise RuntimeError('influxdb prematurely exited') + else: + self.proc.terminate() + self.proc.wait() + raise RuntimeError('Timeout waiting for influxdb to listen' + ' on its ports (%s)' % ports) + except RuntimeError as err: + data = self.get_logs_and_output() + data['reason'] = str(err) + data['now'] = datetime.datetime.now() + raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" + "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r" + % data) + + def find_influxd_path(self): + influxdb_bin_path = os.environ.get( + 'INFLUXDB_PYTHON_INFLUXD_PATH', + None + ) + + if influxdb_bin_path is None: + influxdb_bin_path = distutils.spawn.find_executable('influxd') + if not influxdb_bin_path: + try: + influxdb_bin_path = subprocess.check_output( + ['which', 'influxdb'] + ).strip() + except subprocess.CalledProcessError: + # fallback on : + influxdb_bin_path = '/opt/influxdb/influxd' + + if not os.path.isfile(influxdb_bin_path): + raise unittest.SkipTest("Could not find influxd binary") + + version = subprocess.check_output([influxdb_bin_path, 'version']) + print("InfluxDB version: %s" % version, file=sys.stderr) + + return influxdb_bin_path + + def get_logs_and_output(self): + proc = self.proc + try: + with open(self.logs_file) as fh: + logs = fh.read() + except IOError as err: + logs = "Couldn't read logs: %s" % err + return { + 'rc': proc.returncode, + 'out': proc.stdout.read(), + 'err': proc.stderr.read(), + 'logs': logs + } + + def close(self, remove_tree=True): + self.proc.terminate() + self.proc.wait() + if remove_tree: + shutil.rmtree(self.temp_dir_base) \ No newline at end of file diff --git a/tox.ini b/tox.ini index 4ec36717..51cadfa5 100644 --- a/tox.ini +++ b/tox.ini @@ -9,6 +9,7 @@ deps = -r{toxinidir}/requirements.txt commands = nosetests -v {posargs} [testenv:flake8] +setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = flake8 pep8-naming From cfa40152ae9034552fc76312038de2815a5360a9 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Jul 2015 22:38:17 -0400 Subject: [PATCH 223/536] flake8 fixes --- tests/__init__.py | 7 +++++-- tests/influxdb/server_tests/client_test_with_server.py | 1 + tests/influxdb/server_tests/influxdb_instance.py | 3 +-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index e87fdbc4..680c1eaf 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -6,5 +6,8 @@ using_pypy = hasattr(sys, "pypy_version_info") skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") -_skip_server_tests = os.environ.get('INFLUXDB_PYTHON_SKIP_SERVER_TESTS', None) == 'True' -skipServerTests = unittest.skipIf(_skip_server_tests, "Skipping server tests...") +_skip_server_tests = os.environ.get( + 'INFLUXDB_PYTHON_SKIP_SERVER_TESTS', + None) == 'True' +skipServerTests = unittest.skipIf(_skip_server_tests, + "Skipping server tests...") diff --git a/tests/influxdb/server_tests/client_test_with_server.py b/tests/influxdb/server_tests/client_test_with_server.py index 8b0274f6..dfcb11d8 100644 --- a/tests/influxdb/server_tests/client_test_with_server.py +++ b/tests/influxdb/server_tests/client_test_with_server.py @@ -113,6 +113,7 @@ def point(serie_name, timestamp=None, tags=None, **fields): } ] + @skipServerTests class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase): diff --git a/tests/influxdb/server_tests/influxdb_instance.py b/tests/influxdb/server_tests/influxdb_instance.py index cbc77c58..d1943e02 100644 --- a/tests/influxdb/server_tests/influxdb_instance.py +++ b/tests/influxdb/server_tests/influxdb_instance.py @@ -26,7 +26,6 @@ def __init__(self, self.influxd_path = self.find_influxd_path() - # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() @@ -150,4 +149,4 @@ def close(self, remove_tree=True): self.proc.terminate() self.proc.wait() if remove_tree: - shutil.rmtree(self.temp_dir_base) \ No newline at end of file + shutil.rmtree(self.temp_dir_base) From 7ca7e7b6ace1fd2f46eb65a554bbc40c2530042d Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Jul 2015 22:24:10 -0400 Subject: [PATCH 224/536] Use new travis infrastructure --- .travis.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 39bf1dce..868b913a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,12 @@ env: - TOX_ENV=flake8 - TOX_ENV=coverage install: - - sudo pip install tox - - sudo pip install coveralls - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.2_amd64.deb && sudo dpkg -i influxdb_0.9.2_amd64.deb + - pip install tox + - pip install coveralls + - mkdir influxdb_install + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.2_amd64.deb + - dpkg -x influxdb_*_amd64.deb influxdb_install + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.2/influxd script: - travis_wait 30 tox -e $TOX_ENV after_success: @@ -20,6 +23,8 @@ after_success: notifications: email: false +sudo: false + # Travis caching cache: directories: From 45f2fe2caccf26320a030217fcc810f5b5307f89 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Jul 2015 23:06:49 -0400 Subject: [PATCH 225/536] Released 2.7.1 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index e2982e62..2266e476 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.7.0' +__version__ = '2.7.1' From ddc9eb4832c6b0d343e49375100105eae39dfd61 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 09:52:48 -0400 Subject: [PATCH 226/536] Docs: Examples are not doc tests --- influxdb/client.py | 68 ++++++++++++++++++++--------------- influxdb/influxdb08/client.py | 8 ++--- tox.ini | 2 +- 3 files changed, 44 insertions(+), 34 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index a7c265a7..6dae571f 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -115,14 +115,16 @@ def from_DSN(dsn, **kwargs): :Example: - >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ + :: + + >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ localhost:8086/databasename', timeout=5) - >>> type(cli) - - >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ + >> type(cli) + + >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ localhost:8086/databasename', timeout=5, udp_port=159) - >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) - http://localhost:8086 - True 159 + >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) + http://localhost:8086 - True 159 .. note:: parameters provided in `**kwargs` may override dsn parameters .. note:: when using "udp+influxdb" the specified port (if any) will @@ -431,9 +433,11 @@ def get_list_database(self): :Example: - >>> dbs = client.get_list_database() - >>> dbs - [{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}] + :: + + >> dbs = client.get_list_database() + >> dbs + [{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}] """ return list(self.query("SHOW DATABASES").get_points()) @@ -532,13 +536,15 @@ def get_list_retention_policies(self, database=None): :Example: - >>> ret_policies = client.get_list_retention_policies('my_db') - >>> ret_policies - [{u'default': True, - u'duration': u'0', - u'name': u'default', - u'replicaN': 1}] - """ + :: + + >> ret_policies = client.get_list_retention_policies('my_db') + >> ret_policies + [{u'default': True, + u'duration': u'0', + u'name': u'default', + u'replicaN': 1}] + """ rsp = self.query( "SHOW RETENTION POLICIES ON %s" % (database or self._database) ) @@ -555,8 +561,8 @@ def get_list_series(self, database=None): :Example: - >>> series = client.get_list_series('my_database') - >>> series + >> series = client.get_list_series('my_database') + >> series [{'name': u'cpu_usage', 'tags': [{u'_id': 1, u'host': u'server01', @@ -581,11 +587,13 @@ def get_list_users(self): :Example: - >>> users = client.get_list_users() - >>> users - [{u'admin': True, u'user': u'user1'}, - {u'admin': False, u'user': u'user2'}, - {u'admin': False, u'user': u'user3'}] + :: + + >> users = client.get_list_users() + >> users + [{u'admin': True, u'user': u'user1'}, + {u'admin': False, u'user': u'user2'}, + {u'admin': False, u'user': u'user3'}] """ return list(self.query("SHOW USERS").get_points()) @@ -768,13 +776,15 @@ def from_DSN(dsn, client_base_class=InfluxDBClient, :Example: - >>> cluster = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd\ + :: + + >> cluster = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd\ @host1:8086,usr:pwd@host2:8086/db_name', timeout=5) - >>> type(cluster) - - >>> cluster.clients - [, - ] + >> type(cluster) + + >> cluster.clients + [, + ] """ conn_params = urlparse(dsn) netlocs = conn_params.netloc.split(',') diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 0af499bc..5d8f59c5 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -115,13 +115,13 @@ def from_DSN(dsn, **kwargs): also be passed to this function. Examples: - >>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ + >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ ... localhost:8086/databasename', timeout=5) - >>> type(cli) + >> type(cli) - >>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ + >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ ... localhost:8086/databasename', timeout=5, udp_port=159) - >>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) + >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 :param dsn: data source name diff --git a/tox.ini b/tox.ini index 51cadfa5..b3fb4152 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt py27,py32,py33,py34: pandas # Only install pandas with non-pypy interpreters -commands = nosetests -v {posargs} +commands = nosetests -v --with-doctest {posargs} [testenv:flake8] setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False From 35156797d82bef19f2d5ed1991d27e16de7648eb Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 10:17:37 -0400 Subject: [PATCH 227/536] released 2.7.2 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 2266e476..86bf6cf3 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.7.1' +__version__ = '2.7.2' From f6e24116fba30a85cc5286ea8c9f83425d5b236e Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 10:56:15 -0400 Subject: [PATCH 228/536] Moved tests in influxdb package --- {tests => influxdb/tests}/__init__.py | 0 {tests/influxdb => influxdb/tests}/chunked_json_test.py | 0 {tests/influxdb => influxdb/tests}/client_test.py | 0 {tests/influxdb => influxdb/tests}/dataframe_client_test.py | 2 +- {tests/influxdb => influxdb/tests}/helper_test.py | 0 {tests/influxdb => influxdb/tests/influxdb08}/__init__.py | 0 .../influxdb => influxdb/tests}/influxdb08/client_test.py | 0 .../tests}/influxdb08/dataframe_client_test.py | 2 +- .../influxdb => influxdb/tests}/influxdb08/helper_test.py | 0 {tests/influxdb => influxdb/tests}/misc.py | 0 {tests/influxdb => influxdb/tests}/resultset_test.py | 0 {tests/influxdb => influxdb/tests}/server_tests/__init__.py | 0 {tests/influxdb => influxdb/tests}/server_tests/base.py | 4 ++-- .../tests}/server_tests/client_test_with_server.py | 6 +++--- .../tests}/server_tests/influxdb.conf.template | 0 .../tests}/server_tests/influxdb_instance.py | 2 +- {tests/influxdb => influxdb/tests}/test_line_protocol.py | 0 tests/influxdb/influxdb08/__init__.py | 1 - 18 files changed, 8 insertions(+), 9 deletions(-) rename {tests => influxdb/tests}/__init__.py (100%) rename {tests/influxdb => influxdb/tests}/chunked_json_test.py (100%) rename {tests/influxdb => influxdb/tests}/client_test.py (100%) rename {tests/influxdb => influxdb/tests}/dataframe_client_test.py (99%) rename {tests/influxdb => influxdb/tests}/helper_test.py (100%) rename {tests/influxdb => influxdb/tests/influxdb08}/__init__.py (100%) rename {tests/influxdb => influxdb/tests}/influxdb08/client_test.py (100%) rename {tests/influxdb => influxdb/tests}/influxdb08/dataframe_client_test.py (99%) rename {tests/influxdb => influxdb/tests}/influxdb08/helper_test.py (100%) rename {tests/influxdb => influxdb/tests}/misc.py (100%) rename {tests/influxdb => influxdb/tests}/resultset_test.py (100%) rename {tests/influxdb => influxdb/tests}/server_tests/__init__.py (100%) rename {tests/influxdb => influxdb/tests}/server_tests/base.py (94%) rename {tests/influxdb => influxdb/tests}/server_tests/client_test_with_server.py (99%) rename {tests/influxdb => influxdb/tests}/server_tests/influxdb.conf.template (100%) rename {tests/influxdb => influxdb/tests}/server_tests/influxdb_instance.py (98%) rename {tests/influxdb => influxdb/tests}/test_line_protocol.py (100%) delete mode 100644 tests/influxdb/influxdb08/__init__.py diff --git a/tests/__init__.py b/influxdb/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to influxdb/tests/__init__.py diff --git a/tests/influxdb/chunked_json_test.py b/influxdb/tests/chunked_json_test.py similarity index 100% rename from tests/influxdb/chunked_json_test.py rename to influxdb/tests/chunked_json_test.py diff --git a/tests/influxdb/client_test.py b/influxdb/tests/client_test.py similarity index 100% rename from tests/influxdb/client_test.py rename to influxdb/tests/client_test.py diff --git a/tests/influxdb/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py similarity index 99% rename from tests/influxdb/dataframe_client_test.py rename to influxdb/tests/dataframe_client_test.py index 481eefbc..d4aac040 100644 --- a/tests/influxdb/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -9,7 +9,7 @@ import requests_mock from nose.tools import raises from datetime import timedelta -from tests import skipIfPYpy, using_pypy +from influxdb.tests import skipIfPYpy, using_pypy import warnings if not using_pypy: diff --git a/tests/influxdb/helper_test.py b/influxdb/tests/helper_test.py similarity index 100% rename from tests/influxdb/helper_test.py rename to influxdb/tests/helper_test.py diff --git a/tests/influxdb/__init__.py b/influxdb/tests/influxdb08/__init__.py similarity index 100% rename from tests/influxdb/__init__.py rename to influxdb/tests/influxdb08/__init__.py diff --git a/tests/influxdb/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py similarity index 100% rename from tests/influxdb/influxdb08/client_test.py rename to influxdb/tests/influxdb08/client_test.py diff --git a/tests/influxdb/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py similarity index 99% rename from tests/influxdb/influxdb08/dataframe_client_test.py rename to influxdb/tests/influxdb08/dataframe_client_test.py index dd8955db..63a10c93 100644 --- a/tests/influxdb/influxdb08/dataframe_client_test.py +++ b/influxdb/tests/influxdb08/dataframe_client_test.py @@ -9,7 +9,7 @@ import requests_mock from nose.tools import raises from datetime import timedelta -from tests import skipIfPYpy, using_pypy +from influxdb.tests import skipIfPYpy, using_pypy import copy import warnings diff --git a/tests/influxdb/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py similarity index 100% rename from tests/influxdb/influxdb08/helper_test.py rename to influxdb/tests/influxdb08/helper_test.py diff --git a/tests/influxdb/misc.py b/influxdb/tests/misc.py similarity index 100% rename from tests/influxdb/misc.py rename to influxdb/tests/misc.py diff --git a/tests/influxdb/resultset_test.py b/influxdb/tests/resultset_test.py similarity index 100% rename from tests/influxdb/resultset_test.py rename to influxdb/tests/resultset_test.py diff --git a/tests/influxdb/server_tests/__init__.py b/influxdb/tests/server_tests/__init__.py similarity index 100% rename from tests/influxdb/server_tests/__init__.py rename to influxdb/tests/server_tests/__init__.py diff --git a/tests/influxdb/server_tests/base.py b/influxdb/tests/server_tests/base.py similarity index 94% rename from tests/influxdb/server_tests/base.py rename to influxdb/tests/server_tests/base.py index db15dfd4..7bd17eaf 100644 --- a/tests/influxdb/server_tests/base.py +++ b/influxdb/tests/server_tests/base.py @@ -2,8 +2,8 @@ import sys -from tests import using_pypy -from tests.influxdb.server_tests.influxdb_instance import InfluxDbInstance +from influxdb.tests import using_pypy +from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance from influxdb.client import InfluxDBClient diff --git a/tests/influxdb/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py similarity index 99% rename from tests/influxdb/server_tests/client_test_with_server.py rename to influxdb/tests/server_tests/client_test_with_server.py index dfcb11d8..2cc2d1c3 100644 --- a/tests/influxdb/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -24,9 +24,9 @@ from influxdb import InfluxDBClient from influxdb.exceptions import InfluxDBClientError -from tests import skipIfPYpy, using_pypy, skipServerTests -from tests.influxdb.server_tests.base import ManyTestCasesWithServerMixin -from tests.influxdb.server_tests.base import SingleTestCaseWithServerMixin +from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests +from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin +from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin if not using_pypy: import pandas as pd diff --git a/tests/influxdb/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template similarity index 100% rename from tests/influxdb/server_tests/influxdb.conf.template rename to influxdb/tests/server_tests/influxdb.conf.template diff --git a/tests/influxdb/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py similarity index 98% rename from tests/influxdb/server_tests/influxdb_instance.py rename to influxdb/tests/server_tests/influxdb_instance.py index d1943e02..f4d5171d 100644 --- a/tests/influxdb/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -12,7 +12,7 @@ import unittest import sys -from tests.influxdb.misc import get_free_port, is_port_open +from influxdb.tests.misc import get_free_port, is_port_open class InfluxDbInstance(object): diff --git a/tests/influxdb/test_line_protocol.py b/influxdb/tests/test_line_protocol.py similarity index 100% rename from tests/influxdb/test_line_protocol.py rename to influxdb/tests/test_line_protocol.py diff --git a/tests/influxdb/influxdb08/__init__.py b/tests/influxdb/influxdb08/__init__.py deleted file mode 100644 index 40a96afc..00000000 --- a/tests/influxdb/influxdb08/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- From d3442997995a18a9f9cdd627315ecee22593ade0 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 11:05:46 -0400 Subject: [PATCH 229/536] Travis: export INFLUXDB_PYTHON_INFLUXD_PATH during install --- .travis.yml | 2 +- tox.ini | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 868b913a..e19ddf22 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,8 +15,8 @@ install: - mkdir influxdb_install - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.2_amd64.deb - dpkg -x influxdb_*_amd64.deb influxdb_install - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.2/influxd script: + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.2/influxd - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi diff --git a/tox.ini b/tox.ini index b3fb4152..49b25bf3 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,8 @@ envlist = py34, py27, pypy, flake8 [testenv] +passenv = INFLUXDB_PYTHON_INFLUXD_PATH +setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt py27,py32,py33,py34: pandas @@ -9,7 +11,6 @@ deps = -r{toxinidir}/requirements.txt commands = nosetests -v --with-doctest {posargs} [testenv:flake8] -setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = flake8 pep8-naming From aa33472f0c288b0844df2d9a00b32e5b14547a99 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 11:16:06 -0400 Subject: [PATCH 230/536] Added support for INFLUXDB_PYTHON_SKIP_SERVER_TESTS --- influxdb/tests/server_tests/influxdb_instance.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index f4d5171d..ae32bab6 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -24,6 +24,11 @@ def __init__(self, conf_template, udp_enabled=False): + if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True': + raise unittest.SkipTest( + "Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)" + ) + self.influxd_path = self.find_influxd_path() # create a temporary dir to store all needed files From d0a2e6f571e54102434a76d32e807ea1fc5ed4e1 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 11:17:05 -0400 Subject: [PATCH 231/536] Flake8: no more tests folder --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 49b25bf3..74f733d0 100644 --- a/tox.ini +++ b/tox.ini @@ -14,7 +14,7 @@ commands = nosetests -v --with-doctest {posargs} deps = flake8 pep8-naming -commands = flake8 influxdb tests +commands = flake8 influxdb [testenv:coverage] deps = -r{toxinidir}/requirements.txt From a7ba64f9ada8144eb21c3a529b0cc5a40b40cb2e Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 31 Jul 2015 11:20:17 -0400 Subject: [PATCH 232/536] Released 2.7.3 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 86bf6cf3..a686ec6a 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.7.2' +__version__ = '2.7.3' From e34acc16279b68ea0301db9a13e86a2b0460f7c9 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Wed, 5 Aug 2015 22:18:03 -0400 Subject: [PATCH 233/536] Improve string concat peformance. --- influxdb/_dataframe_client.py | 10 +++++----- influxdb/line_protocol.py | 32 +++++++++++++++++++++----------- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 1d640256..4bb07248 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -39,7 +39,7 @@ def write_points(self, dataframe, measurement, tags=None, :param dataframe: data points in a DataFrame :param measurement: name of measurement :param tags: dictionary of tags, with string key-values - :param time_precision: [Optional, default 's'] Either 's', 'ms', 'u' + :param time_precision: [Optional, default None] Either 's', 'ms', 'u' or 'n'. :param batch_size: [Optional] Value to write the points in batches instead of all at one time. Useful for when doing data dumps from @@ -140,7 +140,7 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None): {'measurement': measurement, 'tags': tags if tags else {}, 'fields': rec, - 'time': ts.isoformat() + 'time': ts.value } for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))] return points @@ -150,8 +150,8 @@ def _datetime_to_epoch(self, datetime, time_precision='s'): if time_precision == 's': return seconds elif time_precision == 'ms': - return seconds * 10 ** 3 + return seconds * 1e3 elif time_precision == 'u': - return seconds * 10 ** 6 + return seconds * 1e6 elif time_precision == 'n': - return seconds * 10 ** 9 + return seconds * 1e9 diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 403e463c..12b42fc8 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -63,13 +63,16 @@ def make_lines(data): Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ - lines = "" + lines = [] static_tags = data.get('tags', None) for point in data['points']: + elements = [] + # add measurement name - lines += _escape_tag(_force_text( + measurement = _escape_tag(_force_text( point.get('measurement', data.get('measurement')) - )) + "," + )) + key_values = [measurement] # add tags if static_tags is None: @@ -77,27 +80,34 @@ def make_lines(data): else: tags = copy(static_tags) tags.update(point.get('tags', {})) + # tags should be sorted client-side to take load off server for tag_key in sorted(tags.keys()): key = _escape_tag(tag_key) value = _escape_tag(tags[tag_key]) if key != '' and value != '': - lines += "{key}={value},".format(key=key, value=value) - lines = lines[:-1] + " " # strip the trailing comma + key_values.append("{key}={value}".format(key=key, value=value)) + key_values = ','.join(key_values) + elements.append(key_values) # add fields + field_values = [] for field_key in sorted(point['fields'].keys()): - lines += "{key}={value},".format( + field_values.append("{key}={value}".format( key=_escape_tag(field_key), value=_escape_value(point['fields'][field_key]), - ) - lines = lines[:-1] # strip the trailing comma + )) + field_values = ','.join(field_values) + elements.append(field_values) # add timestamp if 'time' in point: - lines += " " + _force_text(str(int( + timestamp = _force_text(str(int( _convert_timestamp(point['time']) ))) + elements.append(timestamp) - lines += "\n" - return lines + line = ' '.join(elements) + lines.append(line) + lines = '\n'.join(lines) + return lines + '\n' From 02ddf0e783d57762341fbf0662ac3b6c9696dcbc Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Wed, 5 Aug 2015 22:54:43 -0400 Subject: [PATCH 234/536] Add time_precision support to line protocol. --- influxdb/_dataframe_client.py | 17 ++++++--- influxdb/client.py | 7 +++- influxdb/line_protocol.py | 25 +++++++------ influxdb/tests/client_test.py | 26 +++++++++++--- influxdb/tests/dataframe_client_test.py | 48 +++++++++++++------------ 5 files changed, 80 insertions(+), 43 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 4bb07248..6d9989f6 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -55,15 +55,14 @@ def write_points(self, dataframe, measurement, tags=None, end_index = (batch + 1) * batch_size points = self._convert_dataframe_to_json( dataframe.ix[start_index:end_index].copy(), - measurement, - tags + measurement, tags, time_precision ) super(DataFrameClient, self).write_points( points, time_precision, database, retention_policy) return True else: points = self._convert_dataframe_to_json( - dataframe, measurement, tags + dataframe, measurement, tags, time_precision ) super(DataFrameClient, self).write_points( points, time_precision, database, retention_policy) @@ -116,7 +115,8 @@ def _to_dataframe(self, rs): result[key] = df return result - def _convert_dataframe_to_json(self, dataframe, measurement, tags=None): + def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, + time_precision=None): if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {}.' @@ -136,11 +136,18 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None): # Convert dtype for json serialization dataframe = dataframe.astype('object') + precision_factor = { + "n": 1, + "u": 1e3, + "ms": 1e6, + "s": 1e9 + }.get(time_precision, 1) + points = [ {'measurement': measurement, 'tags': tags if tags else {}, 'fields': rec, - 'time': ts.value + 'time': int(ts.value / precision_factor) } for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))] return points diff --git a/influxdb/client.py b/influxdb/client.py index 6dae571f..7bab357d 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -259,11 +259,16 @@ def write(self, data, params=None, expected_response_code=204): headers = self._headers headers['Content-type'] = 'application/octet-stream' + if params: + precision = params.get('precision') + else: + precision = None + self.request( url="write", method='POST', params=params, - data=make_lines(data).encode('utf-8'), + data=make_lines(data, precision).encode('utf-8'), expected_response_code=expected_response_code, headers=headers ) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 12b42fc8..c47d4dc8 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -6,23 +6,28 @@ from datetime import datetime from dateutil.parser import parse -from pytz import utc from six import binary_type, text_type -def _convert_timestamp(timestamp): +def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, int): - return timestamp + return timestamp # assume precision is correct if timestamp is int if isinstance(_force_text(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): - if timestamp.tzinfo: - timestamp = timestamp.astimezone(utc) - timestamp.replace(tzinfo=None) - return ( - timegm(timestamp.timetuple()) * 1e9 + + ns = ( + timegm(timestamp.utctimetuple()) * 1e9 + timestamp.microsecond * 1e3 ) + if precision is None or precision == 'n': + return ns + elif precision == 'u': + return ns / 1e3 + elif precision == 'ms': + return ns / 1e6 + elif precision == 's': + return ns / 1e9 + raise ValueError(timestamp) @@ -58,7 +63,7 @@ def _force_text(data): return data -def make_lines(data): +def make_lines(data, precision=None): """ Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. @@ -103,7 +108,7 @@ def make_lines(data): # add timestamp if 'time' in point: timestamp = _force_text(str(int( - _convert_timestamp(point['time']) + _convert_timestamp(point['time'], precision) ))) elements.append(timestamp) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 43ecb68e..df73a66b 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -275,17 +275,35 @@ def test_write_points_with_precision(self): ) cli = InfluxDBClient(database='db') - cli.write_points( - self.dummy_points, - time_precision='n' - ) + cli.write_points(self.dummy_points, time_precision='n') self.assertEqual( b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000000000000\n", m.last_request.body, ) + cli.write_points(self.dummy_points, time_precision='u') + self.assertEqual( + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000000000\n", + m.last_request.body, + ) + + cli.write_points(self.dummy_points, time_precision='ms') + self.assertEqual( + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000000\n", + m.last_request.body, + ) + + cli.write_points(self.dummy_points, time_precision='s') + self.assertEqual( + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000\n", + m.last_request.body, + ) + def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index d4aac040..939b28d0 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -119,38 +119,40 @@ def test_write_points_from_dataframe_with_time_precision(self): "http://localhost:8086/write", status_code=204) - points = { - 'database': 'db', - 'points': [ - {'time': '1970-01-01T00:00:00+00:00', - 'fields': { - 'column_one': '1', - 'column_three': 1.0, - 'column_two': 1}, - 'tags': {}, - 'measurement': 'foo'}, - {'time': '1970-01-01T01:00:00+00:00', - 'fields': { - 'column_one': '2', - 'column_three': 2.0, - 'column_two': 2}, - 'tags': {}, - 'measurement': 'foo'}] - } - cli = DataFrameClient(database='db') measurement = "foo" cli.write_points(dataframe, measurement, time_precision='s') self.assertEqual(m.last_request.qs['precision'], ['s']) + self.assertEqual( + b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2 3600\n', + m.last_request.body, + ) - cli.write_points(dataframe, measurement, time_precision='m') - points.update(precision='m') - self.assertEqual(m.last_request.qs['precision'], ['m']) + cli.write_points(dataframe, measurement, time_precision='ms') + self.assertEqual(m.last_request.qs['precision'], ['ms']) + self.assertEqual( + b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2 3600000\n', + m.last_request.body, + ) cli.write_points(dataframe, measurement, time_precision='u') - points.update(precision='u') self.assertEqual(m.last_request.qs['precision'], ['u']) + self.assertEqual( + b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2 3600000000\n', + m.last_request.body, + ) + + cli.write_points(dataframe, measurement, time_precision='n') + self.assertEqual(m.last_request.qs['precision'], ['n']) + self.assertEqual( + b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2 3600000000000\n', + m.last_request.body, + ) @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): From 57f8f75fffd613f47b18906cfd1335f58d626fa5 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Wed, 5 Aug 2015 23:50:17 -0400 Subject: [PATCH 235/536] flake8 fixed: line too long --- influxdb/tests/dataframe_client_test.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 939b28d0..6ad42238 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -149,8 +149,9 @@ def test_write_points_from_dataframe_with_time_precision(self): cli.write_points(dataframe, measurement, time_precision='n') self.assertEqual(m.last_request.qs['precision'], ['n']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2 3600000000000\n', + b'foo column_one="1",column_three=1.0,column_two=1 0\n' + b'foo column_one="2",column_three=2.0,column_two=2 ' + b'3600000000000\n', m.last_request.body, ) From b65e4a46008181fa172495ac5284dea9b8d1c827 Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 6 Aug 2015 10:20:59 -0400 Subject: [PATCH 236/536] Line protocol: handle integer values (Closes: #225) --- influxdb/line_protocol.py | 16 ++++++++++------ influxdb/tests/test_line_protocol.py | 12 ++++++++---- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index c47d4dc8..205cd498 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -12,7 +12,7 @@ def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, int): return timestamp # assume precision is correct if timestamp is int - if isinstance(_force_text(timestamp), text_type): + if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): ns = ( @@ -32,6 +32,7 @@ def _convert_timestamp(timestamp, precision=None): def _escape_tag(tag): + tag = _get_unicode(tag, force=True) return tag.replace( "\\", "\\\\" ).replace( @@ -44,8 +45,8 @@ def _escape_tag(tag): def _escape_value(value): - value = _force_text(value) - if isinstance(value, text_type): + value = _get_unicode(value) + if isinstance(value, text_type) and value != '': return "\"{}\"".format(value.replace( "\"", "\\\"" )) @@ -53,12 +54,14 @@ def _escape_value(value): return str(value) -def _force_text(data): +def _get_unicode(data, force=False): """ Try to return a text aka unicode object from the given data. """ if isinstance(data, binary_type): return data.decode('utf-8') + elif force: + return str(data) else: return data @@ -74,7 +77,7 @@ def make_lines(data, precision=None): elements = [] # add measurement name - measurement = _escape_tag(_force_text( + measurement = _escape_tag(_get_unicode( point.get('measurement', data.get('measurement')) )) key_values = [measurement] @@ -90,6 +93,7 @@ def make_lines(data, precision=None): for tag_key in sorted(tags.keys()): key = _escape_tag(tag_key) value = _escape_tag(tags[tag_key]) + if key != '' and value != '': key_values.append("{key}={value}".format(key=key, value=value)) key_values = ','.join(key_values) @@ -107,7 +111,7 @@ def make_lines(data, precision=None): # add timestamp if 'time' in point: - timestamp = _force_text(str(int( + timestamp = _get_unicode(str(int( _convert_timestamp(point['time'], precision) ))) elements.append(timestamp) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 44c397eb..37759e4f 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -6,16 +6,19 @@ class TestLineProtocol(unittest.TestCase): - def test_empty_tag(self): + def test_make_lines(self): data = { "tags": { - "my_tag": "" + "empty_tag": "", + "integer_tag": 2, + "string_tag": "hello" }, "points": [ { "measurement": "test", "fields": { - "value": "hello!" + "string_val": "hello!", + "int_val": 1, } } ] @@ -23,5 +26,6 @@ def test_empty_tag(self): self.assertEqual( line_protocol.make_lines(data), - 'test value="hello!"\n' + 'test,integer_tag=2,string_tag=hello ' + 'int_val=1,string_val="hello!"\n' ) From 88aeedbebd5931b9ab8a978f18a38a71dd46a1b4 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 6 Aug 2015 11:47:03 -0400 Subject: [PATCH 237/536] Add hour and minute time_precision support. --- influxdb/_dataframe_client.py | 10 +++++-- influxdb/line_protocol.py | 4 +++ influxdb/tests/client_test.py | 40 +++++++++++++++++-------- influxdb/tests/dataframe_client_test.py | 28 +++++++++++++++-- 4 files changed, 65 insertions(+), 17 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 6d9989f6..8b4b505a 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -140,7 +140,9 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, "n": 1, "u": 1e3, "ms": 1e6, - "s": 1e9 + "s": 1e9, + "m": 1e9 * 60, + "h": 1e9 * 3600, }.get(time_precision, 1) points = [ @@ -154,7 +156,11 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() - if time_precision == 's': + if time_precision == 'h': + return seconds / 3600 + elif time_precision == 'm': + return seconds / 60 + elif time_precision == 's': return seconds elif time_precision == 'ms': return seconds * 1e3 diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 205cd498..936a74b0 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -27,6 +27,10 @@ def _convert_timestamp(timestamp, precision=None): return ns / 1e6 elif precision == 's': return ns / 1e9 + elif precision == 'm': + return ns / 1e9 / 60 + elif precision == 'h': + return ns / 1e9 / 3600 raise ValueError(timestamp) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index df73a66b..4f09970d 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -88,7 +88,7 @@ def setUp(self): "host": "server01", "region": "us-west" }, - "time": "2009-11-10T23:00:00Z", + "time": "2009-11-10T23:00:00.123456Z", "fields": { "value": 0.64 } @@ -172,8 +172,8 @@ def test_write_points(self): self.dummy_points, ) self.assertEqual( - "cpu_load_short,host=server01,region=us-west " - "value=0.64 1257894000000000000\n", + 'cpu_load_short,host=server01,region=us-west ' + 'value=0.64 1257894000123456000\n', m.last_request.body.decode('utf-8'), ) @@ -193,8 +193,8 @@ def test_write_points_toplevel_attributes(self): retention_policy="somepolicy" ) self.assertEqual( - "cpu_load_short,host=server01,region=us-west,tag=hello " - "value=0.64 1257894000000000000\n", + 'cpu_load_short,host=server01,region=us-west,tag=hello ' + 'value=0.64 1257894000123456000\n', m.last_request.body.decode('utf-8'), ) @@ -240,8 +240,8 @@ def test_write_points_udp(self): received_data, addr = s.recvfrom(1024) self.assertEqual( - "cpu_load_short,host=server01,region=us-west " - "value=0.64 1257894000000000000\n", + 'cpu_load_short,host=server01,region=us-west ' + 'value=0.64 1257894000123456000\n', received_data.decode() ) @@ -278,22 +278,22 @@ def test_write_points_with_precision(self): cli.write_points(self.dummy_points, time_precision='n') self.assertEqual( - b"cpu_load_short,host=server01,region=us-west " - b"value=0.64 1257894000000000000\n", + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123456000\n', m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='u') self.assertEqual( - b"cpu_load_short,host=server01,region=us-west " - b"value=0.64 1257894000000000\n", + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123456\n', m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='ms') self.assertEqual( - b"cpu_load_short,host=server01,region=us-west " - b"value=0.64 1257894000000\n", + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123\n', m.last_request.body, ) @@ -304,6 +304,20 @@ def test_write_points_with_precision(self): m.last_request.body, ) + cli.write_points(self.dummy_points, time_precision='m') + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 20964900\n', + m.last_request.body, + ) + + cli.write_points(self.dummy_points, time_precision='h') + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 349415\n', + m.last_request.body, + ) + def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 6ad42238..edeb2ed0 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -71,8 +71,8 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): index=[now, now + timedelta(hours=1)]) expected = ( - b"foo,hello=there 0=\"1\",1=1,2=1.0 0\n" - b"foo,hello=there 0=\"2\",1=2,2=2.0 3600000000000\n" + b'foo,hello=there 0=\"1\",1=1,2=1.0 0\n' + b'foo,hello=there 0=\"2\",1=2,2=2.0 3600000000000\n' ) with requests_mock.Mocker() as m: @@ -122,6 +122,22 @@ def test_write_points_from_dataframe_with_time_precision(self): cli = DataFrameClient(database='db') measurement = "foo" + cli.write_points(dataframe, measurement, time_precision='h') + self.assertEqual(m.last_request.qs['precision'], ['h']) + self.assertEqual( + b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2 1\n', + m.last_request.body, + ) + + cli.write_points(dataframe, measurement, time_precision='m') + self.assertEqual(m.last_request.qs['precision'], ['m']) + self.assertEqual( + b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2 60\n', + m.last_request.body, + ) + cli.write_points(dataframe, measurement, time_precision='s') self.assertEqual(m.last_request.qs['precision'], ['s']) self.assertEqual( @@ -298,6 +314,14 @@ def test_datetime_to_epoch(self): cli._datetime_to_epoch(timestamp), 1356998400.0 ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='h'), + 1356998400.0 / 3600 + ) + self.assertEqual( + cli._datetime_to_epoch(timestamp, time_precision='m'), + 1356998400.0 / 60 + ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='s'), 1356998400.0 From 26683a4f22a1d6cede8923605550535ed125c88a Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 6 Aug 2015 12:11:48 -0400 Subject: [PATCH 238/536] Handle None tags and fields --- influxdb/line_protocol.py | 23 ++++++++++++++++------- influxdb/tests/test_line_protocol.py | 19 +++++++++++++++++++ 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 205cd498..25983b0b 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -47,9 +47,13 @@ def _escape_tag(tag): def _escape_value(value): value = _get_unicode(value) if isinstance(value, text_type) and value != '': - return "\"{}\"".format(value.replace( - "\"", "\\\"" - )) + return "\"{}\"".format( + value.replace( + "\"", "\\\"" + ).replace( + "\n", "\\n" + ) + ) else: return str(value) @@ -60,6 +64,8 @@ def _get_unicode(data, force=False): """ if isinstance(data, binary_type): return data.decode('utf-8') + elif data is None: + return '' elif force: return str(data) else: @@ -102,10 +108,13 @@ def make_lines(data, precision=None): # add fields field_values = [] for field_key in sorted(point['fields'].keys()): - field_values.append("{key}={value}".format( - key=_escape_tag(field_key), - value=_escape_value(point['fields'][field_key]), - )) + key = _escape_tag(field_key) + value = _escape_value(point['fields'][field_key]) + if key != '' and value != '': + field_values.append("{key}={value}".format( + key=key, + value=value + )) field_values = ','.join(field_values) elements.append(field_values) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 37759e4f..6596e7b0 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -10,6 +10,7 @@ def test_make_lines(self): data = { "tags": { "empty_tag": "", + "none_tag": None, "integer_tag": 2, "string_tag": "hello" }, @@ -19,6 +20,7 @@ def test_make_lines(self): "fields": { "string_val": "hello!", "int_val": 1, + "none_field": None, } } ] @@ -29,3 +31,20 @@ def test_make_lines(self): 'test,integer_tag=2,string_tag=hello ' 'int_val=1,string_val="hello!"\n' ) + + def test_string_val_newline(self): + data = { + "points": [ + { + "measurement": "m1", + "fields": { + "multi_line": "line1\nline1\nline3" + } + } + ] + } + + self.assertEqual( + line_protocol.make_lines(data), + 'm1 multi_line="line1\\nline1\\nline3"\n' + ) From 7e2dd29f9da81e7597b879512332e4115cfe130a Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 6 Aug 2015 14:00:09 -0400 Subject: [PATCH 239/536] Released 2.8.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index a686ec6a..60a9f35f 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.7.3' +__version__ = '2.8.0' From 7092ef2d6f5acc58ee80492e2b07288fbc262e1d Mon Sep 17 00:00:00 2001 From: Johan Forsberg Date: Thu, 6 Aug 2015 21:45:28 +0200 Subject: [PATCH 240/536] generalize integer timestamp handling --- influxdb/line_protocol.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 496511ab..20ae74b2 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -4,13 +4,14 @@ from calendar import timegm from copy import copy from datetime import datetime +from numbers import Integral from dateutil.parser import parse from six import binary_type, text_type def _convert_timestamp(timestamp, precision=None): - if isinstance(timestamp, int): + if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) From f5bc155e0aecb9a31a17563a778b77fbcbf6ad46 Mon Sep 17 00:00:00 2001 From: Michael Perez Date: Thu, 27 Aug 2015 11:00:59 -0400 Subject: [PATCH 241/536] if value is int, type cast and add 'i' per changes in new influx --- influxdb/line_protocol.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 20ae74b2..2e21eb18 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -116,6 +116,8 @@ def make_lines(data, precision=None): key = _escape_tag(field_key) value = _escape_value(point['fields'][field_key]) if key != '' and value != '': + if isinstance(value, int): + value = str(value) + 'i' field_values.append("{key}={value}".format( key=key, value=value From f02d1a2d6d34af88da8f05d1e320713ab0cdb31d Mon Sep 17 00:00:00 2001 From: Joe Lombrozo Date: Thu, 27 Aug 2015 15:14:52 -0700 Subject: [PATCH 242/536] Moved 'i' flag where it should be added --- influxdb/line_protocol.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 2e21eb18..027c54da 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -59,6 +59,8 @@ def _escape_value(value): "\n", "\\n" ) ) + if isinstance(value, int): + return str(value) + 'i' else: return str(value) @@ -116,8 +118,6 @@ def make_lines(data, precision=None): key = _escape_tag(field_key) value = _escape_value(point['fields'][field_key]) if key != '' and value != '': - if isinstance(value, int): - value = str(value) + 'i' field_values.append("{key}={value}".format( key=key, value=value From f08bc34d5de83fe7e17aed6c2065b2a416798148 Mon Sep 17 00:00:00 2001 From: Joe Lombrozo Date: Thu, 27 Aug 2015 15:14:59 -0700 Subject: [PATCH 243/536] Fixed tests --- influxdb/tests/dataframe_client_test.py | 36 +++++++++---------- .../server_tests/client_test_with_server.py | 19 ++++++---- .../tests/server_tests/influxdb.conf.template | 3 +- .../tests/server_tests/influxdb_instance.py | 1 + influxdb/tests/test_line_protocol.py | 3 +- 5 files changed, 35 insertions(+), 27 deletions(-) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index edeb2ed0..0583ffba 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -32,8 +32,8 @@ def test_write_points_from_dataframe(self): columns=["column_one", "column_two", "column_three"]) expected = ( - b"foo column_one=\"1\",column_three=1.0,column_two=1 0\n" - b"foo column_one=\"2\",column_three=2.0,column_two=2 " + b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n" + b"foo column_one=\"2\",column_three=2.0,column_two=2i " b"3600000000000\n" ) @@ -71,8 +71,8 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): index=[now, now + timedelta(hours=1)]) expected = ( - b'foo,hello=there 0=\"1\",1=1,2=1.0 0\n' - b'foo,hello=there 0=\"2\",1=2,2=2.0 3600000000000\n' + b'foo,hello=there 0=\"1\",1=1i,2=1.0 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.0 3600000000000\n' ) with requests_mock.Mocker() as m: @@ -92,8 +92,8 @@ def test_write_points_from_dataframe_with_period_index(self): columns=["column_one", "column_two", "column_three"]) expected = ( - b"foo column_one=\"1\",column_three=1.0,column_two=1 0\n" - b"foo column_one=\"2\",column_three=2.0,column_two=2 " + b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n" + b"foo column_one=\"2\",column_three=2.0,column_two=2i " b"86400000000000\n" ) @@ -125,48 +125,48 @@ def test_write_points_from_dataframe_with_time_precision(self): cli.write_points(dataframe, measurement, time_precision='h') self.assertEqual(m.last_request.qs['precision'], ['h']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2 1\n', + b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2i 1\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='m') self.assertEqual(m.last_request.qs['precision'], ['m']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2 60\n', + b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2i 60\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='s') self.assertEqual(m.last_request.qs['precision'], ['s']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2 3600\n', + b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2i 3600\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='ms') self.assertEqual(m.last_request.qs['precision'], ['ms']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2 3600000\n', + b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2i 3600000\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='u') self.assertEqual(m.last_request.qs['precision'], ['u']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2 3600000000\n', + b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' + b'column_one="2",column_three=2.0,column_two=2i 3600000000\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='n') self.assertEqual(m.last_request.qs['precision'], ['n']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1 0\n' - b'foo column_one="2",column_three=2.0,column_two=2 ' + b'foo column_one="1",column_three=1.0,column_two=1i 0\n' + b'foo column_one="2",column_three=2.0,column_two=2i ' b'3600000000000\n', m.last_request.body, ) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 2cc2d1c3..ee150fa5 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -303,8 +303,8 @@ def test_write_check_read(self): self.test_write() time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db') - self.assertListEqual([{'value': 0.64, - 'time': '2009-11-10T23:00:00Z'}], + self.assertListEqual([{'value': 0.64, 'time': '2009-11-10T23:00:00Z', + "host": "server01", "region": "us-west"}], list(rsp.get_points())) def test_write_points(self): @@ -328,7 +328,8 @@ def test_write_points_check_read(self): self.assertEqual( list(rsp), - [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]] + [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z', + "host": "server01", "region": "us-west"}]] ) rsp2 = list(rsp.get_points()) @@ -337,7 +338,8 @@ def test_write_points_check_read(self): self.assertEqual( pt, - {'time': '2009-11-10T23:00:00Z', 'value': 0.64} + {'time': '2009-11-10T23:00:00Z', 'value': 0.64, + "host": "server01", "region": "us-west"} ) @unittest.skip("Broken as of 0.9.0") @@ -367,7 +369,8 @@ def test_write_multiple_points_different_series(self): lrsp = list(rsp) self.assertEqual( - [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]], + [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z', + "host": "server01", "region": "us-west"}]], lrsp ) @@ -375,7 +378,8 @@ def test_write_multiple_points_different_series(self): self.assertEqual( rsp, - [[{'value': 33, 'time': '2009-11-10T23:01:35Z'}]] + [[{'value': 33, 'time': '2009-11-10T23:01:35Z', + "host": "server01", "region": "us-west"}]] ) @unittest.skip("Broken as of 0.9.0") @@ -678,6 +682,7 @@ def test_write_points_udp(self): self.assertEqual( # this is dummy_points : - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z', + "host": "server01", "region": "us-west"}], list(rsp['cpu_load_short']) ) diff --git a/influxdb/tests/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template index 92e381ee..a55c4190 100644 --- a/influxdb/tests/server_tests/influxdb.conf.template +++ b/influxdb/tests/server_tests/influxdb.conf.template @@ -10,6 +10,7 @@ [data] dir = "{data_dir}" + wal-dir = "{wal_dir}" retention-auto-create = true retention-check-enabled = true retention-check-period = "10m0s" @@ -54,7 +55,7 @@ retention-policy = "" consistency-level = "one" -[udp] +[[udp]] enabled = {udp_enabled} bind-address = ":{udp_port}" database = "db" diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index ae32bab6..700f2467 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -51,6 +51,7 @@ def __init__(self, conf_data = dict( meta_dir=os.path.join(tempdir, 'meta'), data_dir=os.path.join(tempdir, 'data'), + wal_dir=os.path.join(tempdir, 'wal'), cluster_dir=os.path.join(tempdir, 'state'), handoff_dir=os.path.join(tempdir, 'handoff'), logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 6596e7b0..b7fba730 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -20,6 +20,7 @@ def test_make_lines(self): "fields": { "string_val": "hello!", "int_val": 1, + "float_val": 1.1, "none_field": None, } } @@ -29,7 +30,7 @@ def test_make_lines(self): self.assertEqual( line_protocol.make_lines(data), 'test,integer_tag=2,string_tag=hello ' - 'int_val=1,string_val="hello!"\n' + 'float_val=1.1,int_val=1i,string_val="hello!"\n' ) def test_string_val_newline(self): From a5b9a79fd5f3339aae230e25b11c6e207ebea600 Mon Sep 17 00:00:00 2001 From: Joe Lombrozo Date: Thu, 27 Aug 2015 16:10:50 -0700 Subject: [PATCH 244/536] Used six for compatibility --- influxdb/line_protocol.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 027c54da..6097dbe1 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -7,7 +7,7 @@ from numbers import Integral from dateutil.parser import parse -from six import binary_type, text_type +from six import binary_type, text_type, integer_types def _convert_timestamp(timestamp, precision=None): @@ -59,7 +59,7 @@ def _escape_value(value): "\n", "\\n" ) ) - if isinstance(value, int): + elif isinstance(value, integer_types): return str(value) + 'i' else: return str(value) From b5f1311efb1722db6c58dab71b939093195190c0 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 28 Aug 2015 09:18:16 -0400 Subject: [PATCH 245/536] Test with InfluxDB 0.9.3 --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index e19ddf22..5de4454d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,10 +13,10 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.2_amd64.deb + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.3_amd64.deb - dpkg -x influxdb_*_amd64.deb influxdb_install script: - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.2/influxd + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.3/influxd - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi From 78e5adad441767361bcc1a85393140a984394f1a Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 28 Aug 2015 09:49:34 -0400 Subject: [PATCH 246/536] Catch RuntimeError when starting InfluxDB --- influxdb/tests/server_tests/influxdb_instance.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 700f2467..c16e5053 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -31,6 +31,18 @@ def __init__(self, self.influxd_path = self.find_influxd_path() + errors = 0 + while True: + try: + self._start_server(conf_template, udp_enabled) + break + except RuntimeError: # Happens when the ports are already in use. + errors += 1 + if errors > 2: + raise e + + def _start_server(self, conf_template, udp_enabled): + # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() From df8b201c2b67533543fd6e83d07c96c7f86dcef5 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 28 Aug 2015 12:06:19 -0400 Subject: [PATCH 247/536] Release 2.9.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 60a9f35f..400f1d4c 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.8.0' +__version__ = '2.9.0' From cec332f0c13c8b633ddce31774c1cb5eca7e0014 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 28 Aug 2015 15:24:56 -0400 Subject: [PATCH 248/536] except RuntimeError -> except RuntimeError as e --- influxdb/tests/server_tests/influxdb_instance.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index c16e5053..0972a63e 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -36,7 +36,8 @@ def __init__(self, try: self._start_server(conf_template, udp_enabled) break - except RuntimeError: # Happens when the ports are already in use. + # Happens when the ports are already in use. + except RuntimeError as e: errors += 1 if errors > 2: raise e From b081fa1c49289980fd45ecaabec55d0bd853ad30 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 30 Aug 2015 21:17:40 -0400 Subject: [PATCH 249/536] Don't append 'i' to bools (Closes: #235) --- influxdb/line_protocol.py | 2 +- influxdb/tests/test_line_protocol.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 6097dbe1..74430a28 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -59,7 +59,7 @@ def _escape_value(value): "\n", "\\n" ) ) - elif isinstance(value, integer_types): + elif isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' else: return str(value) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index b7fba730..168323fb 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -22,6 +22,7 @@ def test_make_lines(self): "int_val": 1, "float_val": 1.1, "none_field": None, + "bool_val": True, } } ] @@ -30,7 +31,7 @@ def test_make_lines(self): self.assertEqual( line_protocol.make_lines(data), 'test,integer_tag=2,string_tag=hello ' - 'float_val=1.1,int_val=1i,string_val="hello!"\n' + 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n' ) def test_string_val_newline(self): From 037a0b813c34fa47dc2c29a74dc969260de4a64b Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 30 Aug 2015 21:18:19 -0400 Subject: [PATCH 250/536] Release 2.9.1 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 400f1d4c..fdffc939 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.9.0' +__version__ = '2.9.1' From 769963a817c8da49c1c09a9d61cd09c9a942aedc Mon Sep 17 00:00:00 2001 From: Pierre Fersing Date: Wed, 2 Sep 2015 16:19:01 +0200 Subject: [PATCH 251/536] Raise InfluxDBServerError for server error --- influxdb/client.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 7bab357d..6c180ce4 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -202,8 +202,10 @@ def request(self, url, method='GET', params=None, data=None, :type expected_response_code: int :returns: the response from the request :rtype: :class:`requests.Response` + :raises InfluxDBServerError: if the response code is any server error + code (5xx) :raises InfluxDBClientError: if the response code is not the - same as `expected_response_code` + same as `expected_response_code` and is not a server error code """ url = "{0}/{1}".format(self._baseurl, url) @@ -237,7 +239,9 @@ def request(self, url, method='GET', params=None, data=None, else: raise e - if response.status_code == expected_response_code: + if response.status_code >= 500 and response.status_code < 600: + raise InfluxDBServerError(response.content) + elif response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code) From a1d33669733a451165f1f68ad6d45716603ee500 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Sun, 6 Sep 2015 20:19:27 -0400 Subject: [PATCH 252/536] The issues are the TODO ;) --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 8a363540..5b1ba3f5 100644 --- a/README.rst +++ b/README.rst @@ -167,7 +167,7 @@ problems or submit contributions. TODO ==== -The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdb/influxdb-python/issues/109 +The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdb/influxdb-python/issues Source code From 2b2feea016c034995575286a47571367e884d5b7 Mon Sep 17 00:00:00 2001 From: Benjamin Jorand Date: Wed, 9 Sep 2015 09:19:37 +0200 Subject: [PATCH 253/536] Fix typo in resultset.rst --- docs/source/resultset.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst index 0a1b7541..6c36463f 100644 --- a/docs/source/resultset.rst +++ b/docs/source/resultset.rst @@ -22,7 +22,7 @@ Using ``rs.get_points('cpu')`` will return a generator for all the points that a :: rs = cli.query("SELECT * from cpu") - cpu_points = list(rs.get_points(measurement='cpu')]) + cpu_points = list(rs.get_points(measurement='cpu')) Filtering by tags ----------------- From ade6565fdd8a4942bbe8d0f29148e5c3afead612 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Fri, 11 Sep 2015 10:05:20 -0400 Subject: [PATCH 254/536] Enh/Fix: add a get_free_ports() "enhanced" helper function. This fix the problem with get_free_port() used multiple times which can lead to the same port being being reused. --- influxdb/tests/misc.py | 36 ++++++++++++------- .../tests/server_tests/influxdb_instance.py | 14 ++++---- 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/influxdb/tests/misc.py b/influxdb/tests/misc.py index 70e16f11..6a3857b0 100644 --- a/influxdb/tests/misc.py +++ b/influxdb/tests/misc.py @@ -1,21 +1,33 @@ - +# -*- coding: utf-8 -*- import socket -import time -def get_free_port(ip='127.0.0.1'): - sock = socket.socket() +def get_free_ports(num_ports, ip='127.0.0.1'): + """Get `num_ports` free/available ports on the interface linked to the `ip´ + :param int num_ports: The number of free ports to get + :param str ip: The ip on which the ports have to be taken + :return: a set of ports number + """ + sock_ports = [] + ports = set() try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind((ip, 0)) - return sock.getsockname()[1] + for _ in range(num_ports): + sock = socket.socket() + cur = [sock, -1] + # append the socket directly, + # so that it'll be also closed (no leaked resource) + # in the finally here after. + sock_ports.append(cur) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.bind((ip, 0)) + cur[1] = sock.getsockname()[1] finally: - sock.close() - - # Is there a better way than a sleep? - # There were issues on Travis where the port was not yet free. - time.sleep(0.1) + for sock, port in sock_ports: + sock.close() + ports.add(port) + assert num_ports == len(ports) + return ports def is_port_open(port, ip='127.0.0.1'): diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 0972a63e..1f053956 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -12,7 +12,7 @@ import unittest import sys -from influxdb.tests.misc import get_free_port, is_port_open +from influxdb.tests.misc import is_port_open, get_free_ports class InfluxDbInstance(object): @@ -54,12 +54,12 @@ def _start_server(self, conf_template, udp_enabled): dir=self.temp_dir_base) # find a couple free ports : - ports = dict( - http_port=get_free_port(), - admin_port=get_free_port(), - meta_port=get_free_port(), - udp_port=get_free_port() if udp_enabled else -1, - ) + free_ports = get_free_ports(4) + ports = {} + for service in 'http', 'admin', 'meta', 'udp': + ports[service + '_port'] = free_ports.pop() + if not udp_enabled: + ports['udp_port'] = -1 conf_data = dict( meta_dir=os.path.join(tempdir, 'meta'), From 05a6b9f1f1e9ea759549d025fabfeceebadc9129 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Tue, 15 Sep 2015 12:59:28 -0400 Subject: [PATCH 255/536] Test with influxdb 0.9.4 --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5de4454d..4d3cac07 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,10 +13,10 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.3_amd64.deb + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.4_amd64.deb - dpkg -x influxdb_*_amd64.deb influxdb_install script: - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.3/influxd + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.4/influxd - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi From 65292f23c257b895daccdba5814be45221bc0a45 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 16 Sep 2015 10:18:02 -0400 Subject: [PATCH 256/536] Tests: get_list_database returns _internal --- .../tests/server_tests/client_test_with_server.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index ee150fa5..4d6785b8 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -121,14 +121,18 @@ class SimpleTests(SingleTestCaseWithServerMixin, influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_fresh_server_no_db(self): - self.assertEqual([], self.cli.get_list_database()) + self.assertEqual([{"name": "_internal"}], + self.cli.get_list_database()) def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( - self.cli.get_list_database(), - [{'name': 'new_db_1'}, {'name': 'new_db_2'}] + sorted(self.cli.get_list_database(), key=lambda a: a['name']), + sorted([{'name': '_internal'}, + {'name': 'new_db_1'}, + {'name': 'new_db_2'}], + key=lambda a: a['name']) ) def test_create_database_fails(self): @@ -150,7 +154,8 @@ def test_get_list_series_empty_DF(self): def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) - self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) + self.assertEqual([{'name': '_internal'}, {'name': 'new_db_2'}], + self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: From 8cbf463a217956e9814458e163226d48d487cba8 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 16 Sep 2015 10:51:40 -0400 Subject: [PATCH 257/536] Test with influxdb 0.9.4.1 --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4d3cac07..5c2be658 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,10 +13,10 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.4_amd64.deb + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.4.1_amd64.deb - dpkg -x influxdb_*_amd64.deb influxdb_install script: - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.4/influxd + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.4.1/influxd - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi From 6e5e6585ccb35d5c21adafe87d413640c63de331 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 16 Sep 2015 11:34:25 -0400 Subject: [PATCH 258/536] Revert "Tests: get_list_database returns _internal" This reverts commit 65292f23c257b895daccdba5814be45221bc0a45. --- .../tests/server_tests/client_test_with_server.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 4d6785b8..ee150fa5 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -121,18 +121,14 @@ class SimpleTests(SingleTestCaseWithServerMixin, influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_fresh_server_no_db(self): - self.assertEqual([{"name": "_internal"}], - self.cli.get_list_database()) + self.assertEqual([], self.cli.get_list_database()) def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( - sorted(self.cli.get_list_database(), key=lambda a: a['name']), - sorted([{'name': '_internal'}, - {'name': 'new_db_1'}, - {'name': 'new_db_2'}], - key=lambda a: a['name']) + self.cli.get_list_database(), + [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) def test_create_database_fails(self): @@ -154,8 +150,7 @@ def test_get_list_series_empty_DF(self): def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) - self.assertEqual([{'name': '_internal'}, {'name': 'new_db_2'}], - self.cli.get_list_database()) + self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: From f659ced81c89bc0949ec9d1d0ee5bf9d7acdd9f8 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 16 Sep 2015 11:34:41 -0400 Subject: [PATCH 259/536] Disable monitoring --- influxdb/tests/server_tests/influxdb.conf.template | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/influxdb/tests/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template index a55c4190..1e9ddf31 100644 --- a/influxdb/tests/server_tests/influxdb.conf.template +++ b/influxdb/tests/server_tests/influxdb.conf.template @@ -62,9 +62,8 @@ batch-size = 0 batch-timeout = "0" -[monitoring] - enabled = false - write-interval = "1m0s" +[monitor] + store-enabled = false [continuous_queries] enabled = true From e25576f324c225ff37fd12b1e190b314b8514b45 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Fri, 18 Sep 2015 21:25:58 +0000 Subject: [PATCH 260/536] Refactor in InfluxDBClusterClient. Use a single client and maintain a list of hosts instead of list of clients. Also, to avoid code duplication, parsing DSNs is now moved to a separate function that is called from both InfluxDBClient.fromDSN and InfluxDBClusterClient.fromDSN classmethods --- influxdb/client.py | 182 +++++++++++++++++++--------------- influxdb/tests/client_test.py | 102 +++++++------------ 2 files changed, 138 insertions(+), 146 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 6c180ce4..0897e051 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -132,36 +132,10 @@ def from_DSN(dsn, **kwargs): additional `udp_port` parameter (cf. examples). """ - init_args = {} - conn_params = urlparse(dsn) - scheme_info = conn_params.scheme.split('+') - if len(scheme_info) == 1: - scheme = scheme_info[0] - modifier = None - else: - modifier, scheme = scheme_info - - if scheme != 'influxdb': - raise ValueError('Unknown scheme "{}".'.format(scheme)) - if modifier: - if modifier == 'udp': - init_args['use_udp'] = True - elif modifier == 'https': - init_args['ssl'] = True - else: - raise ValueError('Unknown modifier "{}".'.format(modifier)) - - if conn_params.hostname: - init_args['host'] = conn_params.hostname - if conn_params.port: - init_args['port'] = conn_params.port - if conn_params.username: - init_args['username'] = conn_params.username - if conn_params.password: - init_args['password'] = conn_params.password - if conn_params.path and len(conn_params.path) > 1: - init_args['database'] = conn_params.path[1:] - + init_args = parse_dsn(dsn) + host, port = init_args.pop('hosts')[0] + init_args['host'] = host + init_args['port'] = port init_args.update(kwargs) return InfluxDBClient(**init_args) @@ -720,8 +694,8 @@ def send_packet(self, packet): class InfluxDBClusterClient(object): """The :class:`~.InfluxDBClusterClient` is the client for connecting - to a cluster of InfluxDB servers. It's basically a proxy to multiple - InfluxDBClients. + to a cluster of InfluxDB servers. Each query hits different host from the + list of hosts. :param hosts: all hosts to be included in the cluster, each of which should be in the format (address, port), @@ -731,7 +705,7 @@ class InfluxDBClusterClient(object): :param shuffle: whether the queries should hit servers evenly(randomly), defaults to True :type shuffle: bool - :param client_base_class: the base class for all clients in the cluster. + :param client_base_class: the base class for the cluster client. This parameter is used to enable the support of different client types. Defaults to :class:`~.InfluxDBClient` """ @@ -749,26 +723,27 @@ def __init__(self, shuffle=True, client_base_class=InfluxDBClient, ): - self.clients = [] - self.bad_clients = [] # Corresponding server has failures in history + self.clients = [self] # Keep it backwards compatible + self.hosts = hosts + self.bad_hosts = [] # Corresponding server has failures in history self.shuffle = shuffle - for h in hosts: - self.clients.append(client_base_class(host=h[0], port=h[1], - username=username, - password=password, - database=database, - ssl=ssl, - verify_ssl=verify_ssl, - timeout=timeout, - use_udp=use_udp, - udp_port=udp_port)) + host, port = self.hosts[0] + self._client = client_base_class(host=host, + port=port, + username=username, + password=password, + database=database, + ssl=ssl, + verify_ssl=verify_ssl, + timeout=timeout, + use_udp=use_udp, + udp_port=udp_port) for method in dir(client_base_class): - if method.startswith('_'): - continue - orig_func = getattr(client_base_class, method) - if not callable(orig_func): + orig_attr = getattr(client_base_class, method, '') + if method.startswith('_') or not callable(orig_attr): continue - setattr(self, method, self._make_func(orig_func)) + + setattr(self, method, self._make_func(orig_attr)) @staticmethod def from_DSN(dsn, client_base_class=InfluxDBClient, @@ -791,53 +766,100 @@ def from_DSN(dsn, client_base_class=InfluxDBClient, @host1:8086,usr:pwd@host2:8086/db_name', timeout=5) >> type(cluster) - >> cluster.clients - [, + >> cluster.hosts + [('host1', 8086), ('host2', 8086)] + >> cluster._client ] """ - conn_params = urlparse(dsn) - netlocs = conn_params.netloc.split(',') - cluster_client = InfluxDBClusterClient( - hosts=[], - client_base_class=client_base_class, - shuffle=shuffle, - **kwargs) - for netloc in netlocs: - single_dsn = '%(scheme)s://%(netloc)s%(path)s' % ( - {'scheme': conn_params.scheme, - 'netloc': netloc, - 'path': conn_params.path} - ) - cluster_client.clients.append(client_base_class.from_DSN( - single_dsn, - **kwargs)) + init_args = parse_dsn(dsn) + init_args.update(**kwargs) + init_args['shuffle'] = shuffle + init_args['client_base_class'] = client_base_class + cluster_client = InfluxDBClusterClient(**init_args) return cluster_client + def _update_client_host(self, host): + self._client._host, self._client._port = host + self._client._baseurl = "{0}://{1}:{2}".format(self._client._scheme, + self._client._host, + self._client._port) + def _make_func(self, orig_func): @wraps(orig_func) def func(*args, **kwargs): if self.shuffle: - random.shuffle(self.clients) - clients = self.clients + self.bad_clients - for c in clients: - bad_client = False + random.shuffle(self.hosts) + + hosts = self.hosts + self.bad_hosts + for h in hosts: + bad_host = False try: - return orig_func(c, *args, **kwargs) + self._update_client_host(h) + return orig_func(self._client, *args, **kwargs) except InfluxDBClientError as e: # Errors caused by user's requests, re-raise raise e except Exception as e: # Errors that might caused by server failure, try another - bad_client = True - if c in self.clients: - self.clients.remove(c) - self.bad_clients.append(c) + bad_host = True + if h in self.hosts: + self.hosts.remove(h) + self.bad_hosts.append(h) finally: - if not bad_client and c in self.bad_clients: - self.bad_clients.remove(c) - self.clients.append(c) + if not bad_host and h in self.bad_hosts: + self.bad_hosts.remove(h) + self.hosts.append(h) raise InfluxDBServerError("InfluxDB: no viable server!") return func + + +def parse_dsn(dsn): + conn_params = urlparse(dsn) + init_args = {} + scheme_info = conn_params.scheme.split('+') + if len(scheme_info) == 1: + scheme = scheme_info[0] + modifier = None + else: + modifier, scheme = scheme_info + + if scheme != 'influxdb': + raise ValueError('Unknown scheme "{}".'.format(scheme)) + + if modifier: + if modifier == 'udp': + init_args['use_udp'] = True + elif modifier == 'https': + init_args['ssl'] = True + else: + raise ValueError('Unknown modifier "{}".'.format(modifier)) + + netlocs = conn_params.netloc.split(',') + + init_args['hosts'] = [] + for netloc in netlocs: + parsed = _parse_netloc(netloc) + init_args['hosts'].append((parsed['host'], int(parsed['port']))) + init_args['username'] = parsed['username'] + init_args['password'] = parsed['password'] + + if conn_params.path and len(conn_params.path) > 1: + init_args['database'] = conn_params.path[1:] + + return init_args + + +def _parse_netloc(netloc): + import re + parsed = re.findall(r'(\w*):(\w*)@(\w*):(\d*)', netloc) + if not parsed: + raise ValueError('Invalid netloc "{}".'.format(netloc)) + + info = parsed[0] + return {'username': info[0] or None, + 'password': info[1] or None, + 'host': info[2] or 'localhost', + 'port': info[3] or 8086} diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 4f09970d..7fc912cb 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -715,7 +715,9 @@ def test_revoke_privilege_invalid(self): class FakeClient(InfluxDBClient): - fail = False + + def __init__(self, *args, **kwargs): + super(FakeClient, self).__init__(*args, **kwargs) def query(self, query, @@ -724,9 +726,10 @@ def query(self, database=None): if query == 'Fail': raise Exception("Fail") - - if self.fail: - raise Exception("Fail") + elif query == 'Fail once' and self._host == 'host1': + raise Exception("Fail Once") + elif query == 'Fail twice' and self._host in 'host1 host2': + raise Exception("Fail Twice") else: return "Success" @@ -747,32 +750,28 @@ def test_init(self): database='database', shuffle=False, client_base_class=FakeClient) - self.assertEqual(3, len(cluster.clients)) - self.assertEqual(0, len(cluster.bad_clients)) - for idx, client in enumerate(cluster.clients): - self.assertEqual(self.hosts[idx][0], client._host) - self.assertEqual(self.hosts[idx][1], client._port) + self.assertEqual(3, len(cluster.hosts)) + self.assertEqual(0, len(cluster.bad_hosts)) + self.assertIn((cluster._client._host, + cluster._client._port), cluster.hosts) def test_one_server_fails(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=False, client_base_class=FakeClient) - cluster.clients[0].fail = True - self.assertEqual('Success', cluster.query('')) - self.assertEqual(2, len(cluster.clients)) - self.assertEqual(1, len(cluster.bad_clients)) + self.assertEqual('Success', cluster.query('Fail once')) + self.assertEqual(2, len(cluster.hosts)) + self.assertEqual(1, len(cluster.bad_hosts)) def test_two_servers_fail(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=False, client_base_class=FakeClient) - cluster.clients[0].fail = True - cluster.clients[1].fail = True - self.assertEqual('Success', cluster.query('')) - self.assertEqual(1, len(cluster.clients)) - self.assertEqual(2, len(cluster.bad_clients)) + self.assertEqual('Success', cluster.query('Fail twice')) + self.assertEqual(1, len(cluster.hosts)) + self.assertEqual(2, len(cluster.bad_hosts)) def test_all_fail(self): cluster = InfluxDBClusterClient(hosts=self.hosts, @@ -781,8 +780,8 @@ def test_all_fail(self): client_base_class=FakeClient) with self.assertRaises(InfluxDBServerError): cluster.query('Fail') - self.assertEqual(0, len(cluster.clients)) - self.assertEqual(3, len(cluster.bad_clients)) + self.assertEqual(0, len(cluster.hosts)) + self.assertEqual(3, len(cluster.bad_hosts)) def test_all_good(self): cluster = InfluxDBClusterClient(hosts=self.hosts, @@ -790,8 +789,8 @@ def test_all_good(self): shuffle=True, client_base_class=FakeClient) self.assertEqual('Success', cluster.query('')) - self.assertEqual(3, len(cluster.clients)) - self.assertEqual(0, len(cluster.bad_clients)) + self.assertEqual(3, len(cluster.hosts)) + self.assertEqual(0, len(cluster.bad_hosts)) def test_recovery(self): cluster = InfluxDBClusterClient(hosts=self.hosts, @@ -801,68 +800,39 @@ def test_recovery(self): with self.assertRaises(InfluxDBServerError): cluster.query('Fail') self.assertEqual('Success', cluster.query('')) - self.assertEqual(1, len(cluster.clients)) - self.assertEqual(2, len(cluster.bad_clients)) + self.assertEqual(1, len(cluster.hosts)) + self.assertEqual(2, len(cluster.bad_hosts)) def test_dsn(self): cli = InfluxDBClusterClient.from_DSN(self.dsn_string) - self.assertEqual(2, len(cli.clients)) - self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) - self.assertEqual('uSr', cli.clients[0]._username) - self.assertEqual('pWd', cli.clients[0]._password) - self.assertEqual('db', cli.clients[0]._database) - self.assertFalse(cli.clients[0].use_udp) - self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) - self.assertEqual('uSr', cli.clients[1]._username) - self.assertEqual('pWd', cli.clients[1]._password) - self.assertEqual('db', cli.clients[1]._database) - self.assertFalse(cli.clients[1].use_udp) + self.assertEqual([('host1', 8086), ('host2', 8086)], cli.hosts) + self.assertEqual('http://host1:8086', cli._client._baseurl) + self.assertEqual('uSr', cli._client._username) + self.assertEqual('pWd', cli._client._password) + self.assertEqual('db', cli._client._database) + self.assertFalse(cli._client.use_udp) cli = InfluxDBClusterClient.from_DSN('udp+' + self.dsn_string) - self.assertTrue(cli.clients[0].use_udp) - self.assertTrue(cli.clients[1].use_udp) + self.assertTrue(cli._client.use_udp) cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string) - self.assertEqual('https://host1:8086', cli.clients[0]._baseurl) - self.assertEqual('https://host2:8086', cli.clients[1]._baseurl) + self.assertEqual('https://host1:8086', cli._client._baseurl) cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) - self.assertEqual('http://host1:8086', cli.clients[0]._baseurl) - self.assertEqual('http://host2:8086', cli.clients[1]._baseurl) - - def test_dsn_single_client(self): - cli = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd@host:8086/db') - self.assertEqual('http://host:8086', cli.clients[0]._baseurl) - self.assertEqual('usr', cli.clients[0]._username) - self.assertEqual('pwd', cli.clients[0]._password) - self.assertEqual('db', cli.clients[0]._database) - self.assertFalse(cli.clients[0].use_udp) - - cli = InfluxDBClusterClient.from_DSN( - 'udp+influxdb://usr:pwd@host:8086/db') - self.assertTrue(cli.clients[0].use_udp) - - cli = InfluxDBClusterClient.from_DSN( - 'https+influxdb://usr:pwd@host:8086/db') - self.assertEqual('https://host:8086', cli.clients[0]._baseurl) - - cli = InfluxDBClusterClient.from_DSN( - 'https+influxdb://usr:pwd@host:8086/db', - **{'ssl': False}) - self.assertEqual('http://host:8086', cli.clients[0]._baseurl) + self.assertEqual('http://host1:8086', cli._client._baseurl) def test_dsn_password_caps(self): cli = InfluxDBClusterClient.from_DSN( 'https+influxdb://usr:pWd@host:8086/db') - self.assertEqual('pWd', cli.clients[0]._password) + self.assertEqual('pWd', cli._client._password) def test_dsn_mixed_scheme_case(self): cli = InfluxDBClusterClient.from_DSN( 'hTTps+inFLUxdb://usr:pWd@host:8086/db') - self.assertEqual('pWd', cli.clients[0]._password) - self.assertEqual('https://host:8086', cli.clients[0]._baseurl) + self.assertEqual('pWd', cli._client._password) + self.assertEqual('https://host:8086', cli._client._baseurl) cli = InfluxDBClusterClient.from_DSN( 'uDP+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') - self.assertTrue(cli.clients[0].use_udp) + self.assertTrue(cli._client.use_udp) From 2b427757ec0c0e63ee440fa67ac23ed6343f2806 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sun, 13 Sep 2015 15:14:26 +0000 Subject: [PATCH 261/536] Fix PEP8 warning in client tests --- influxdb/tests/client_test.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 7fc912cb..27f2c6f3 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -337,12 +337,13 @@ def test_write_points_with_precision_fails(self): cli.write_points_with_precision([]) def test_query(self): - example_response = \ - '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \ - '"columns": ["time", "value"], "values": ' \ - '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \ - '[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' \ + example_response = ( + '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' + '"columns": ["time", "value"], "values": ' + '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' + '[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' + ) with requests_mock.Mocker() as m: m.register_uri( From 81cf0d9a073e9eed0824eced873e77e9b0a01329 Mon Sep 17 00:00:00 2001 From: Georgi Dimitrov Date: Sat, 19 Sep 2015 17:36:04 +0000 Subject: [PATCH 262/536] Add exceptions to the documentation page --- docs/source/exceptions.rst | 12 ++++++++++++ docs/source/index.rst | 1 + 2 files changed, 13 insertions(+) create mode 100644 docs/source/exceptions.rst diff --git a/docs/source/exceptions.rst b/docs/source/exceptions.rst new file mode 100644 index 00000000..178255b8 --- /dev/null +++ b/docs/source/exceptions.rst @@ -0,0 +1,12 @@ + +.. _exceptions: + +========== +Exceptions +========== + + +.. currentmodule:: influxdb.exceptions + +.. autoclass:: InfluxDBClientError +.. autoclass:: InfluxDBServerError diff --git a/docs/source/index.rst b/docs/source/index.rst index a110ad2a..6e5b2ef3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -19,6 +19,7 @@ Contents include-readme api-documentation + exceptions resultset examples From 172099aa667cb88a055af41361686fdab7eff47b Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 7 Oct 2015 16:39:58 -0400 Subject: [PATCH 263/536] six>=1.9.0 (Closes: #252) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3445ca42..43c09bd1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ python-dateutil>=2.0.0 pytz requests>=1.0.3 -six==1.9.0 +six>=1.9.0 From e693e31b33bfc355e4a2490c3185d467fab4e8c1 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 7 Oct 2015 16:43:08 -0400 Subject: [PATCH 264/536] release 2.9.2 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index fdffc939..e85f0ce4 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.9.1' +__version__ = '2.9.2' From 3281b64f9107690f2cf1b193485fb038697834bb Mon Sep 17 00:00:00 2001 From: Piotr Popieluch Date: Thu, 15 Oct 2015 11:40:22 +0200 Subject: [PATCH 265/536] Please add license file to pip package text from the mit license: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 1395a993..18b23910 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ include requirements.txt include test-requirements.txt include dev-requirements.txt +include LICENSE From 0c41b0481f16ce3f3ef4f2755febbd7867238eec Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Mon, 26 Oct 2015 14:20:50 -0400 Subject: [PATCH 266/536] Fix pandas FutureWarning of sort Issue #258 --- influxdb/influxdb08/dataframe_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index 197422a6..c1665b3e 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -105,9 +105,9 @@ def _to_dataframe(self, json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], columns=json_result['columns']) if 'sequence_number' in dataframe.keys(): - dataframe.sort(['time', 'sequence_number'], inplace=True) + dataframe.sort_values(['time', 'sequence_number'], inplace=True) else: - dataframe.sort(['time'], inplace=True) + dataframe.sort_values(['time'], inplace=True) pandas_time_unit = time_precision if time_precision == 'm': pandas_time_unit = 'ms' From 00d864df19c4b0c599f25c66dd97532f7ba37de8 Mon Sep 17 00:00:00 2001 From: Pierre Fersing Date: Tue, 27 Oct 2015 16:02:59 +0100 Subject: [PATCH 267/536] Avoid mutable value for defaults of functions --- influxdb/client.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 6c180ce4..e4450441 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -280,7 +280,7 @@ def write(self, data, params=None, expected_response_code=204): def query(self, query, - params={}, + params=None, epoch=None, expected_response_code=200, database=None, @@ -307,6 +307,9 @@ def query(self, :returns: the queried data :rtype: :class:`~.ResultSet` """ + if params is None: + params = {} + params['q'] = query params['db'] = database or self._database From 9ccd8dd11d69384a3a50ebd4cd168099527de841 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 30 Oct 2015 16:12:25 -0400 Subject: [PATCH 268/536] released 2.9.3 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index e85f0ce4..348b12af 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.9.2' +__version__ = '2.9.3' From 2f129c26fc1abccf821e6fb6185c47d02a6d4dab Mon Sep 17 00:00:00 2001 From: Robert Habermann Date: Sat, 7 Nov 2015 21:56:37 +0100 Subject: [PATCH 269/536] add option to use Requests http(s) proxy MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The influxdb-python client does not have support for a web proxy although the Requests library provides the possibility to use a web proxy for both http and https requests. This PR (optionally) adds the feature to use a http(s) proxy by adding an additional parameter to InfluxDBClient initialization called´"proxies". This parameter expects that a dict is provided. If this is not set it will default to { }. The syntax for the dict is the same as specified in Requests docs: http://docs.python-requests.org/en/latest/user/advanced/#proxies proxies = { "http": "http://10.10.1.10:3128", "https": "http://10.10.1.10:1080", } --- influxdb/client.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 6c180ce4..eb79a552 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -55,6 +55,8 @@ class InfluxDBClient(object): :type use_udp: int :param udp_port: UDP port to connect to InfluxDB, defaults to 4444 :type udp_port: int + :param proxies: HTTP(S) proxy to use for Requests, defaults to {} + :type proxies: dict """ def __init__(self, @@ -68,6 +70,7 @@ def __init__(self, timeout=None, use_udp=False, udp_port=4444, + proxies=None, ): """Construct a new InfluxDBClient object.""" self._host = host @@ -90,6 +93,11 @@ def __init__(self, if ssl is True: self._scheme = "https" + if proxies is None: + self._proxies = {} + else: + self._proxies = proxies + self._baseurl = "{0}://{1}:{2}".format( self._scheme, self._host, @@ -229,6 +237,7 @@ def request(self, url, method='GET', params=None, data=None, params=params, data=data, headers=headers, + proxies=self._proxies, verify=self._verify_ssl, timeout=self._timeout ) From bbe35d5e0aca8a8a9f33519ce06098c01d9b2ec5 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 13 Nov 2015 15:02:59 -0500 Subject: [PATCH 270/536] Python 3.5 fix: replace __dict__ by getattr --- influxdb/helper.py | 4 ++-- influxdb/influxdb08/helper.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index a5efb461..941aa4b8 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -143,10 +143,10 @@ def _json_body_(cls): } for field in cls._fields: - json_point['fields'][field] = point.__dict__[field] + json_point['fields'][field] = getattr(point, field) for tag in cls._tags: - json_point['tags'][tag] = point.__dict__[tag] + json_point['tags'][tag] = getattr(point, tag) json.append(json_point) return json diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py index b2f8f8bb..391e9ccd 100644 --- a/influxdb/influxdb08/helper.py +++ b/influxdb/influxdb08/helper.py @@ -137,7 +137,7 @@ def _json_body_(cls): for series_name, data in six.iteritems(cls._datapoints): json.append({'name': series_name, 'columns': cls._fields, - 'points': [[point.__dict__[k] for k in cls._fields] + 'points': [[getattr(point, k) for k in cls._fields] for point in data] }) return json From 5971d766c8aacf6f5ce5021ea5228cfd97935410 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 13 Nov 2015 15:03:41 -0500 Subject: [PATCH 271/536] Released 2.10.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 348b12af..dab1c004 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.9.3' +__version__ = '2.10.0' From 4b1e87d491f791c24379e7a8d86b3c753feb8a1d Mon Sep 17 00:00:00 2001 From: Pierre Fersing Date: Tue, 17 Nov 2015 19:13:59 +0100 Subject: [PATCH 272/536] Make InfluxDBClusterClient thread-safe --- influxdb/client.py | 65 +++++++++++++++++++++++++++++++++++----------- 1 file changed, 50 insertions(+), 15 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index d16c4937..b8bda92d 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -6,6 +6,7 @@ from functools import wraps import json import socket +import threading import random import requests import requests.exceptions @@ -73,7 +74,7 @@ def __init__(self, proxies=None, ): """Construct a new InfluxDBClient object.""" - self._host = host + self.__host = host self._port = port self._username = username self._password = password @@ -98,7 +99,7 @@ def __init__(self, else: self._proxies = proxies - self._baseurl = "{0}://{1}:{2}".format( + self.__baseurl = "{0}://{1}:{2}".format( self._scheme, self._host, self._port) @@ -108,6 +109,22 @@ def __init__(self, 'Accept': 'text/plain' } + # _baseurl and _host are properties to allow InfluxDBClusterClient + # to override them with thread-local variables + @property + def _baseurl(self): + return self._get_baseurl() + + def _get_baseurl(self): + return self.__baseurl + + @property + def _host(self): + return self._get_host() + + def _get_host(self): + return self.__host + @staticmethod def from_DSN(dsn, **kwargs): """Return an instance of :class:`~.InfluxDBClient` from the provided @@ -740,6 +757,8 @@ def __init__(self, self.bad_hosts = [] # Corresponding server has failures in history self.shuffle = shuffle host, port = self.hosts[0] + self._hosts_lock = threading.Lock() + self._thread_local = threading.local() self._client = client_base_class(host=host, port=port, username=username, @@ -757,6 +776,10 @@ def __init__(self, setattr(self, method, self._make_func(orig_attr)) + self._client._get_host = self._get_host + self._client._get_baseurl = self._get_baseurl + self._update_client_host(self.hosts[0]) + @staticmethod def from_DSN(dsn, client_base_class=InfluxDBClient, shuffle=True, **kwargs): @@ -791,19 +814,29 @@ def from_DSN(dsn, client_base_class=InfluxDBClient, return cluster_client def _update_client_host(self, host): - self._client._host, self._client._port = host - self._client._baseurl = "{0}://{1}:{2}".format(self._client._scheme, - self._client._host, - self._client._port) + self._thread_local.host, self._thread_local.port = host + self._thread_local.baseurl = "{0}://{1}:{2}".format( + self._client._scheme, + self._client._host, + self._client._port + ) + + def _get_baseurl(self): + return self._thread_local.baseurl + + def _get_host(self): + return self._thread_local.host def _make_func(self, orig_func): @wraps(orig_func) def func(*args, **kwargs): - if self.shuffle: - random.shuffle(self.hosts) + with self._hosts_lock: + if self.shuffle: + random.shuffle(self.hosts) + + hosts = self.hosts + self.bad_hosts - hosts = self.hosts + self.bad_hosts for h in hosts: bad_host = False try: @@ -815,13 +848,15 @@ def func(*args, **kwargs): except Exception as e: # Errors that might caused by server failure, try another bad_host = True - if h in self.hosts: - self.hosts.remove(h) - self.bad_hosts.append(h) + with self._hosts_lock: + if h in self.hosts: + self.hosts.remove(h) + self.bad_hosts.append(h) finally: - if not bad_host and h in self.bad_hosts: - self.bad_hosts.remove(h) - self.hosts.append(h) + with self._hosts_lock: + if not bad_host and h in self.bad_hosts: + self.bad_hosts.remove(h) + self.hosts.append(h) raise InfluxDBServerError("InfluxDB: no viable server!") From 6587076b11a1a183e15be67d8cce03b2b8f20208 Mon Sep 17 00:00:00 2001 From: William Pain Date: Wed, 18 Nov 2015 09:59:55 +0100 Subject: [PATCH 273/536] Allow netloc with "." --- influxdb/client.py | 2 +- influxdb/tests/client_test.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index d16c4937..51e59921 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -866,7 +866,7 @@ def parse_dsn(dsn): def _parse_netloc(netloc): import re - parsed = re.findall(r'(\w*):(\w*)@(\w*):(\d*)', netloc) + parsed = re.findall(r'(\w*):(\w*)@([a-zA-Z0-9_\.]*):(\d*)', netloc) if not parsed: raise ValueError('Invalid netloc "{}".'.format(netloc)) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 27f2c6f3..8df4bd64 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -95,7 +95,7 @@ def setUp(self): } ] - self.dsn_string = 'influxdb://uSr:pWd@host:1886/db' + self.dsn_string = 'influxdb://uSr:pWd@my.host.fr:1886/db' def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') @@ -108,7 +108,7 @@ def test_scheme(self): def test_dsn(self): cli = InfluxDBClient.from_DSN(self.dsn_string) - self.assertEqual('http://host:1886', cli._baseurl) + self.assertEqual('http://my.host.fr:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) @@ -118,11 +118,11 @@ def test_dsn(self): self.assertTrue(cli.use_udp) cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) - self.assertEqual('https://host:1886', cli._baseurl) + self.assertEqual('https://my.host.fr:1886', cli._baseurl) cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) - self.assertEqual('http://host:1886', cli._baseurl) + self.assertEqual('http://my.host.fr:1886', cli._baseurl) def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') From 9f893b559e7a8f31500c6006e747116089ee9cfa Mon Sep 17 00:00:00 2001 From: Pierre Fersing Date: Thu, 19 Nov 2015 14:02:13 +0100 Subject: [PATCH 274/536] Added healing feature on InfluxClusterClient --- influxdb/client.py | 15 +++++++++++++++ influxdb/tests/client_test.py | 19 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 83d04697..9f762fa8 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -6,6 +6,7 @@ from functools import wraps import json import socket +import time import threading import random import requests @@ -737,6 +738,9 @@ class InfluxDBClusterClient(object): :param client_base_class: the base class for the cluster client. This parameter is used to enable the support of different client types. Defaults to :class:`~.InfluxDBClient` + :param healing_delay: the delay in seconds, counting from last failure of + a server, before re-adding server to the list of working servers. + Defaults to 15 minutes (900 seconds) """ def __init__(self, @@ -751,11 +755,14 @@ def __init__(self, udp_port=4444, shuffle=True, client_base_class=InfluxDBClient, + healing_delay=900, ): self.clients = [self] # Keep it backwards compatible self.hosts = hosts self.bad_hosts = [] # Corresponding server has failures in history self.shuffle = shuffle + self.healing_delay = healing_delay + self._last_healing = time.time() host, port = self.hosts[0] self._hosts_lock = threading.Lock() self._thread_local = threading.local() @@ -831,7 +838,14 @@ def _make_func(self, orig_func): @wraps(orig_func) def func(*args, **kwargs): + now = time.time() with self._hosts_lock: + if (self.bad_hosts + and self._last_healing + self.healing_delay < now): + h = self.bad_hosts.pop(0) + self.hosts.append(h) + self._last_healing = now + if self.shuffle: random.shuffle(self.hosts) @@ -852,6 +866,7 @@ def func(*args, **kwargs): if h in self.hosts: self.hosts.remove(h) self.bad_hosts.append(h) + self._last_healing = now finally: with self._hosts_lock: if not bad_host and h in self.bad_hosts: diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 8df4bd64..621f3436 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -18,6 +18,7 @@ import requests import requests.exceptions import socket +import time import unittest import requests_mock import random @@ -804,6 +805,24 @@ def test_recovery(self): self.assertEqual(1, len(cluster.hosts)) self.assertEqual(2, len(cluster.bad_hosts)) + def test_healing(self): + cluster = InfluxDBClusterClient(hosts=self.hosts, + database='database', + shuffle=True, + healing_delay=1, + client_base_class=FakeClient) + with self.assertRaises(InfluxDBServerError): + cluster.query('Fail') + self.assertEqual('Success', cluster.query('')) + time.sleep(1.1) + self.assertEqual('Success', cluster.query('')) + self.assertEqual(2, len(cluster.hosts)) + self.assertEqual(1, len(cluster.bad_hosts)) + time.sleep(1.1) + self.assertEqual('Success', cluster.query('')) + self.assertEqual(3, len(cluster.hosts)) + self.assertEqual(0, len(cluster.bad_hosts)) + def test_dsn(self): cli = InfluxDBClusterClient.from_DSN(self.dsn_string) self.assertEqual([('host1', 8086), ('host2', 8086)], cli.hosts) From 55d479e61f0d67059076666068fb7457ade4509d Mon Sep 17 00:00:00 2001 From: Pawel Slomka Date: Fri, 27 Nov 2015 09:05:34 +0100 Subject: [PATCH 275/536] Fix create/drop numeric-named databases --- influxdb/client.py | 4 ++-- influxdb/tests/client_test.py | 30 ++++++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 9f762fa8..b9fcb0c3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -460,7 +460,7 @@ def create_database(self, dbname): :param dbname: the name of the database to create :type dbname: str """ - self.query("CREATE DATABASE %s" % dbname) + self.query("CREATE DATABASE \"%s\"" % dbname) def drop_database(self, dbname): """Drop a database from InfluxDB. @@ -468,7 +468,7 @@ def drop_database(self, dbname): :param dbname: the name of the database to drop :type dbname: str """ - self.query("DROP DATABASE %s" % dbname) + self.query("DROP DATABASE \"%s\"" % dbname) def create_retention_policy(self, name, duration, replication, database=None, default=False): diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 621f3436..24f5d0a4 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -407,7 +407,20 @@ def test_create_database(self): self.cli.create_database('new_db') self.assertEqual( m.last_request.qs['q'][0], - 'create database new_db' + 'create database "new_db"' + ) + + def test_create_numeric_named_database(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + self.cli.create_database('123') + self.assertEqual( + m.last_request.qs['q'][0], + 'create database "123"' ) @raises(Exception) @@ -425,7 +438,20 @@ def test_drop_database(self): self.cli.drop_database('new_db') self.assertEqual( m.last_request.qs['q'][0], - 'drop database new_db' + 'drop database "new_db"' + ) + + def test_drop_numeric_named_database(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + self.cli.drop_database('123') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop database "123"' ) @raises(Exception) From 1bab21652510b01c0df07cc58ce029bc8cdf7f9c Mon Sep 17 00:00:00 2001 From: slomek Date: Sat, 5 Dec 2015 20:21:44 +0100 Subject: [PATCH 276/536] Create db only if doesn't exist yet --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index b9fcb0c3..99a88720 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -460,7 +460,7 @@ def create_database(self, dbname): :param dbname: the name of the database to create :type dbname: str """ - self.query("CREATE DATABASE \"%s\"" % dbname) + self.query("CREATE DATABASE IF NOT EXISTS \"%s\"" % dbname) def drop_database(self, dbname): """Drop a database from InfluxDB. From a2b30011030aac59c48792636c020cdd35ae22f0 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Sat, 5 Dec 2015 14:27:49 -0500 Subject: [PATCH 277/536] Revert "Create db only if doesn't exist yet" --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 99a88720..b9fcb0c3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -460,7 +460,7 @@ def create_database(self, dbname): :param dbname: the name of the database to create :type dbname: str """ - self.query("CREATE DATABASE IF NOT EXISTS \"%s\"" % dbname) + self.query("CREATE DATABASE \"%s\"" % dbname) def drop_database(self, dbname): """Drop a database from InfluxDB. From 0820328718556e4143d85b7787ae14874da5e2ea Mon Sep 17 00:00:00 2001 From: William Pain Date: Tue, 8 Dec 2015 11:02:47 +0100 Subject: [PATCH 278/536] Use urlparse for parse netloc --- influxdb/client.py | 15 +++++---------- influxdb/tests/client_test.py | 3 +++ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index b9fcb0c3..683e0bcb 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -915,13 +915,8 @@ def parse_dsn(dsn): def _parse_netloc(netloc): - import re - parsed = re.findall(r'(\w*):(\w*)@([a-zA-Z0-9_\.]*):(\d*)', netloc) - if not parsed: - raise ValueError('Invalid netloc "{}".'.format(netloc)) - - info = parsed[0] - return {'username': info[0] or None, - 'password': info[1] or None, - 'host': info[2] or 'localhost', - 'port': info[3] or 8086} + info = urlparse("http://{}".format(netloc)) + return {'username': info.username or None, + 'password': info.password or None, + 'host': info.hostname or 'localhost', + 'port': info.port or 8086} diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 24f5d0a4..dc4ab306 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -108,6 +108,9 @@ def test_scheme(self): self.assertEqual('https://host:8086', cli._baseurl) def test_dsn(self): + cli = InfluxDBClient.from_DSN('influxdb://192.168.0.1:1886') + self.assertEqual('http://192.168.0.1:1886', cli._baseurl) + cli = InfluxDBClient.from_DSN(self.dsn_string) self.assertEqual('http://my.host.fr:1886', cli._baseurl) self.assertEqual('uSr', cli._username) From 797070121d839e1a6faacc9b29e3c526b2425e9b Mon Sep 17 00:00:00 2001 From: Alexandr Skurikhin Date: Mon, 14 Dec 2015 00:06:40 +0300 Subject: [PATCH 279/536] Improve ResultSet.items() performance Use straightforward method of getting points for a serie: no need to try to filter by measurement/tag: it greatly decreases performance. --- influxdb/resultset.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index de7b1a3a..600274c6 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -88,27 +88,22 @@ def get_points(self, measurement=None, tags=None): for serie in self._get_series(): serie_name = serie.get('measurement', serie.get('name', 'results')) + print serie_name if serie_name is None: # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. if tags is None: - for point in serie['values']: - yield self.point_from_cols_vals( - serie['columns'], - point - ) + for item in self._get_points_for_serie(serie): + yield item elif measurement in (None, serie_name): # by default if no tags was provided then # we will matches every returned serie serie_tags = serie.get('tags', {}) if tags is None or self._tag_matches(serie_tags, tags): - for point in serie.get('values', []): - yield self.point_from_cols_vals( - serie['columns'], - point - ) + for item in self._get_points_for_serie(serie): + yield item def __repr__(self): items = [] @@ -166,10 +161,22 @@ def items(self): serie.get('name', 'results')), serie.get('tags', None)) items.append( - (serie_key, self[serie_key]) + (serie_key, self._get_points_for_serie(serie)) ) return items + def _get_points_for_serie(self, serie): + """ Return generator of dict from columns and values of a serie + + :param serie: One serie + :return: Generator of dicts + """ + for point in serie.get('values', []): + yield self.point_from_cols_vals( + serie['columns'], + point + ) + @staticmethod def point_from_cols_vals(cols, vals): """ Creates a dict from columns and values lists From 2c55c71a226abefbfbfbd0688ebc0ef6ee5fc6cd Mon Sep 17 00:00:00 2001 From: Alexandr Skurikhin Date: Mon, 14 Dec 2015 01:18:52 +0300 Subject: [PATCH 280/536] Remove debug print --- influxdb/resultset.py | 1 - 1 file changed, 1 deletion(-) diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 600274c6..8ec7091e 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -88,7 +88,6 @@ def get_points(self, measurement=None, tags=None): for serie in self._get_series(): serie_name = serie.get('measurement', serie.get('name', 'results')) - print serie_name if serie_name is None: # this is a "system" query or a query which # doesn't return a name attribute. From 8e099c070ed68bc18a86047f88e876ef27efa40b Mon Sep 17 00:00:00 2001 From: Alexander Mancevice Date: Fri, 1 Jan 2016 13:10:08 -0500 Subject: [PATCH 281/536] Added get_list_servers() method to client --- influxdb/client.py | 19 +++++++++++++++++++ influxdb/tests/client_test.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 683e0bcb..e9e015df 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -592,6 +592,25 @@ def get_list_series(self, database=None): ) return series + def get_list_servers(self): + """Get the list of servers in InfluxDB cluster. + + :returns: all nodes in InfluxDB cluster + :rtype: list of dictionaries + + :Example: + + :: + + >> servers = client.get_list_servers() + >> servers + [{'cluster_addr': 'server01:8088', + 'id': 1, + 'raft': True, + 'raft-leader': True}] + """ + return list(self.query("SHOW SERVERS").get_points()) + def get_list_users(self): """Get the list of all users in InfluxDB. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index dc4ab306..ebe140a7 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -485,6 +485,39 @@ def test_get_list_database_fails(self): with _mocked_session(cli, 'get', 401): cli.get_list_database() + def test_get_list_servers(self): + data = {'results': [ + {'series': [ + {'columns': ['id', 'cluster_addr', 'raft', 'raft-leader'], + 'values': [ + [1, 'server01:8088', True, True], + [2, 'server02:8088', True, False], + [3, 'server03:8088', True, False]]}]} + ]} + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_servers(), + [{'cluster_addr': 'server01:8088', + 'id': 1, + 'raft': True, + 'raft-leader': True}, + {'cluster_addr': 'server02:8088', + 'id': 2, + 'raft': True, + 'raft-leader': False}, + {'cluster_addr': 'server03:8088', + 'id': 3, + 'raft': True, + 'raft-leader': False}] + ) + + @raises(Exception) + def test_get_list_servers_fails(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 401): + cli.get_list_servers() + def test_get_list_series(self): example_response = \ '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \ From 528e69a1667c77d9e164e04982c88a12402c4d01 Mon Sep 17 00:00:00 2001 From: slomek Date: Sat, 5 Dec 2015 20:21:44 +0100 Subject: [PATCH 282/536] Create db only if doesn't exist yet --- influxdb/client.py | 7 +++++-- influxdb/tests/client_test.py | 13 +++++++++++++ .../tests/server_tests/client_test_with_server.py | 7 ++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index b9fcb0c3..a1a8d7b7 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -454,13 +454,16 @@ def get_list_database(self): """ return list(self.query("SHOW DATABASES").get_points()) - def create_database(self, dbname): + def create_database(self, dbname, if_not_exists=False): """Create a new database in InfluxDB. :param dbname: the name of the database to create :type dbname: str """ - self.query("CREATE DATABASE \"%s\"" % dbname) + if if_not_exists: + self.query("CREATE DATABASE IF NOT EXISTS \"%s\"" % dbname) + else: + self.query("CREATE DATABASE \"%s\"" % dbname) def drop_database(self, dbname): """Drop a database from InfluxDB. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 24f5d0a4..36a0aeb0 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -410,6 +410,19 @@ def test_create_database(self): 'create database "new_db"' ) + def test_create_database_with_exist_check(self): + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + self.cli.create_database('new_db', if_not_exists=True) + self.assertEqual( + m.last_request.qs['q'][0], + 'create database if not exists "new_db"' + ) + def test_create_numeric_named_database(self): with requests_mock.Mocker() as m: m.register_uri( diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index ee150fa5..82f19d13 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -131,7 +131,12 @@ def test_create_database(self): [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) - def test_create_database_fails(self): + def test_create_database_twice_if_not_exist(self): + self.assertIsNone(self.cli.create_database('new_db')) + self.assertIsNone( + self.cli.create_database('new_db', if_not_exists=True)) + + def test_create_database_twice_fails(self): self.assertIsNone(self.cli.create_database('new_db')) with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_database('new_db') From 77bc4c752561fc38fec5a0902c8e936438e82b8d Mon Sep 17 00:00:00 2001 From: Adrian Lopez Date: Fri, 8 Jan 2016 14:56:01 +0100 Subject: [PATCH 283/536] Python 2.6 does not support keyword arguments in decode http://docs.python.org/library/stdtypes.html#str.decode Changed in version 2.7: Support for keyword arguments added. --- influxdb/exceptions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/exceptions.py b/influxdb/exceptions.py index 33c909b6..cad325c6 100644 --- a/influxdb/exceptions.py +++ b/influxdb/exceptions.py @@ -2,7 +2,7 @@ class InfluxDBClientError(Exception): """Raised when an error occurs in the request.""" def __init__(self, content, code=None): if isinstance(content, type(b'')): - content = content.decode('UTF-8', errors='replace') + content = content.decode('UTF-8', 'replace') if code is not None: message = "%s: %s" % (code, content) From e035d33384660f4346e53f0369774c1b7daee595 Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 11 Jan 2016 19:46:51 -0500 Subject: [PATCH 284/536] Released 2.11.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index dab1c004..16cf32e2 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.10.0' +__version__ = '2.11.0' From fe7e1687442777c81f10017f5f64252a94bc1f44 Mon Sep 17 00:00:00 2001 From: Cameron Sparr Date: Wed, 20 Jan 2016 11:59:57 -0700 Subject: [PATCH 285/536] Change influxdb -> influxdata --- README.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.rst b/README.rst index 5b1ba3f5..1b5c89ce 100644 --- a/README.rst +++ b/README.rst @@ -2,8 +2,8 @@ InfluxDB-Python is a client for interacting with InfluxDB_. -.. image:: https://travis-ci.org/influxdb/influxdb-python.svg?branch=master - :target: https://travis-ci.org/influxdb/influxdb-python +.. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master + :target: https://travis-ci.org/influxdata/influxdb-python .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style :target: http://influxdb-python.readthedocs.org/ @@ -14,7 +14,7 @@ InfluxDB-Python is a client for interacting with InfluxDB_. :alt: Coverage .. image:: https://pypip.in/download/influxdb/badge.svg - :target: https://pypi.python.org/pypi//influxdb/ + :target: https://pypi.python.org/pypi/influxdb/ :alt: Downloads .. image:: https://pypip.in/version/influxdb/badge.svg @@ -31,7 +31,7 @@ InfluxDB-Python is a client for interacting with InfluxDB_. .. _readme-about: -InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdb.com/ +InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdata.com/ .. _installation: @@ -167,15 +167,15 @@ problems or submit contributions. TODO ==== -The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdb/influxdb-python/issues +The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues Source code =========== -The source code is currently available on Github: https://github.com/influxdb/influxdb-python +The source code is currently available on Github: https://github.com/influxdata/influxdb-python -.. _InfluxDB: http://influxdb.com/ +.. _InfluxDB: http://influxdata.com/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org From 9b4ea8400289d0e185c5bb45445294b98252534d Mon Sep 17 00:00:00 2001 From: Pierre Fersing Date: Tue, 26 Jan 2016 10:14:03 +0100 Subject: [PATCH 286/536] Fix issue with cluster on multiple ports --- influxdb/client.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index bca047a8..c602bde5 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -76,7 +76,7 @@ def __init__(self, ): """Construct a new InfluxDBClient object.""" self.__host = host - self._port = port + self.__port = port self._username = username self._password = password self._database = database @@ -110,7 +110,7 @@ def __init__(self, 'Accept': 'text/plain' } - # _baseurl and _host are properties to allow InfluxDBClusterClient + # _baseurl, _host and _port are properties to allow InfluxDBClusterClient # to override them with thread-local variables @property def _baseurl(self): @@ -126,6 +126,13 @@ def _host(self): def _get_host(self): return self.__host + @property + def _port(self): + return self._get_port() + + def _get_port(self): + return self.__port + @staticmethod def from_DSN(dsn, **kwargs): """Return an instance of :class:`~.InfluxDBClient` from the provided @@ -806,6 +813,7 @@ def __init__(self, setattr(self, method, self._make_func(orig_attr)) self._client._get_host = self._get_host + self._client._get_port = self._get_port self._client._get_baseurl = self._get_baseurl self._update_client_host(self.hosts[0]) @@ -856,6 +864,9 @@ def _get_baseurl(self): def _get_host(self): return self._thread_local.host + def _get_port(self): + return self._thread_local.port + def _make_func(self, orig_func): @wraps(orig_func) From e21de35f21f34f8c7a0b2773d20bf5866d5ca74e Mon Sep 17 00:00:00 2001 From: Andy Cobaugh Date: Wed, 27 Jan 2016 20:01:47 -0500 Subject: [PATCH 287/536] tests: update tox config and test scripts to support python 2.6 Define a new py26 env, and add unittest2 as a dependency Add in some simple code that imports unittest2 as unittest if python < 2.7 Fix some .format() calls to add the positional argument specifiers --- influxdb/tests/__init__.py | 5 ++++- influxdb/tests/chunked_json_test.py | 6 +++++- influxdb/tests/client_test.py | 6 +++++- influxdb/tests/dataframe_client_test.py | 6 +++++- influxdb/tests/helper_test.py | 14 ++++++++----- influxdb/tests/influxdb08/client_test.py | 6 +++++- .../tests/influxdb08/dataframe_client_test.py | 6 +++++- influxdb/tests/influxdb08/helper_test.py | 8 +++++-- influxdb/tests/resultset_test.py | 6 +++++- .../server_tests/client_test_with_server.py | 6 +++++- .../tests/server_tests/influxdb_instance.py | 21 ++++++++++++++++++- influxdb/tests/test_line_protocol.py | 7 ++++++- tox.ini | 5 +++-- 13 files changed, 83 insertions(+), 19 deletions(-) diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py index 680c1eaf..31454368 100644 --- a/influxdb/tests/__init__.py +++ b/influxdb/tests/__init__.py @@ -1,6 +1,9 @@ # -*- coding: utf-8 -*- -import unittest import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import os using_pypy = hasattr(sys, "pypy_version_info") diff --git a/influxdb/tests/chunked_json_test.py b/influxdb/tests/chunked_json_test.py index a9a11b69..f9e96ee6 100644 --- a/influxdb/tests/chunked_json_test.py +++ b/influxdb/tests/chunked_json_test.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest from influxdb import chunked_json diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 0f869cdf..c7f980f1 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -19,13 +19,17 @@ import requests.exceptions import socket import time -import unittest import requests_mock import random from nose.tools import raises from mock import patch import warnings import mock +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest from influxdb import InfluxDBClient, InfluxDBClusterClient from influxdb.client import InfluxDBServerError diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 0583ffba..07998803 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -4,7 +4,11 @@ """ from .client_test import _mocked_session -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import json import requests_mock from nose.tools import raises diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 444b36ef..9721a9c9 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import warnings import mock @@ -117,7 +121,7 @@ def testSingleSeriesName(self): self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' - '_json_body_ for one series name: {}.'.format(rcvd)) + '_json_body_ for one series name: {0}.'.format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( TestSeriesHelper.MySeriesHelper._json_body_(), @@ -183,7 +187,7 @@ def testSeveralSeriesNames(self): self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' - '_json_body_ for several series names: {}.' + '_json_body_ for several series names: {0}.' .format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( @@ -245,7 +249,7 @@ class Meta: # the warning only. pass self.assertEqual(len(w), 1, - '{} call should have generated one warning.' + '{0} call should have generated one warning.' .format(WarnBulkSizeZero)) self.assertIn('forced to 1', str(w[-1].message), 'Warning message did not contain "forced to 1".') @@ -267,7 +271,7 @@ class Meta: warnings.simplefilter("always") WarnBulkSizeNoEffect(time=159, server_name='us.east-1') self.assertEqual(len(w), 1, - '{} call should have generated one warning.' + '{0} call should have generated one warning.' .format(WarnBulkSizeNoEffect)) self.assertIn('has no affect', str(w[-1].message), 'Warning message did not contain "has not affect".') diff --git a/influxdb/tests/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py index 343f1d22..87328c5a 100644 --- a/influxdb/tests/influxdb08/client_test.py +++ b/influxdb/tests/influxdb08/client_test.py @@ -6,7 +6,11 @@ import requests import requests.exceptions import socket -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import requests_mock import random from nose.tools import raises diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py index 63a10c93..6ff4724f 100644 --- a/influxdb/tests/influxdb08/dataframe_client_test.py +++ b/influxdb/tests/influxdb08/dataframe_client_test.py @@ -4,7 +4,11 @@ """ from .client_test import _mocked_session -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import json import requests_mock from nose.tools import raises diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py index b0a7ef21..1dbaf32f 100644 --- a/influxdb/tests/influxdb08/helper_test.py +++ b/influxdb/tests/influxdb08/helper_test.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import warnings import mock @@ -75,7 +79,7 @@ def testSingleSeriesName(self): self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' - '_json_body_ for one series name: {}.'.format(rcvd)) + '_json_body_ for one series name: {0}.'.format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( TestSeriesHelper.MySeriesHelper._json_body_(), diff --git a/influxdb/tests/resultset_test.py b/influxdb/tests/resultset_test.py index ce5fd41a..e65bad3c 100644 --- a/influxdb/tests/resultset_test.py +++ b/influxdb/tests/resultset_test.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest from influxdb.exceptions import InfluxDBClientError from influxdb.resultset import ResultSet diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 82f19d13..def193d2 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -15,7 +15,11 @@ from functools import partial import os import time -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest import warnings # By default, raise exceptions on warnings diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 1f053956..40c66d2e 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -9,11 +9,30 @@ import time import shutil import subprocess -import unittest import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest from influxdb.tests.misc import is_port_open, get_free_ports +# hack in check_output if it's not defined, like for python 2.6 +if "check_output" not in dir( subprocess ): + def f(*popenargs, **kwargs): + if 'stdout' in kwargs: + raise ValueError('stdout argument not allowed, it will be overridden.') + process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + output, unused_err = process.communicate() + retcode = process.poll() + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + raise subprocess.CalledProcessError(retcode, cmd) + return output + subprocess.check_output = f + class InfluxDbInstance(object): """ A class to launch of fresh influxdb server instance diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 168323fb..34654f50 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -1,6 +1,11 @@ # -*- coding: utf-8 -*- -import unittest +import sys +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest + from influxdb import line_protocol diff --git a/tox.ini b/tox.ini index 74f733d0..644a7c55 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,13 @@ [tox] -envlist = py34, py27, pypy, flake8 +envlist = py34, py27, py26, pypy, flake8 [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - py27,py32,py33,py34: pandas + py27,py32,py33,py34,py26: pandas + py26: unittest2 # Only install pandas with non-pypy interpreters commands = nosetests -v --with-doctest {posargs} From c730f05703543e5e9e12e656bef2e15115d8ee39 Mon Sep 17 00:00:00 2001 From: Andy Cobaugh Date: Wed, 27 Jan 2016 20:15:37 -0500 Subject: [PATCH 288/536] tests: found a few more places where we were lacking positional arguments --- influxdb/tests/influxdb08/helper_test.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py index 1dbaf32f..3a84c849 100644 --- a/influxdb/tests/influxdb08/helper_test.py +++ b/influxdb/tests/influxdb08/helper_test.py @@ -111,7 +111,7 @@ def testSeveralSeriesNames(self): self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' - '_json_body_ for several series names: {}.' + '_json_body_ for several series names: {0}.' .format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( @@ -171,8 +171,8 @@ class Meta: self.assertGreaterEqual( len(rec_warnings), 1, - '{} call should have generated one warning.' - 'Actual generated warnings: {}'.format( + '{0} call should have generated one warning.' + 'Actual generated warnings: {1}'.format( WarnBulkSizeZero, '\n'.join(map(str, rec_warnings)))) expected_msg = ( @@ -201,8 +201,8 @@ class Meta: self.assertGreaterEqual( len(rec_warnings), 1, - '{} call should have generated one warning.' - 'Actual generated warnings: {}'.format( + '{0} call should have generated one warning.' + 'Actual generated warnings: {1}'.format( WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings)))) expected_msg = ( From 339098599ca72b065ac02ab992d0e23d6864676a Mon Sep 17 00:00:00 2001 From: Andy Cobaugh Date: Wed, 27 Jan 2016 20:26:50 -0500 Subject: [PATCH 289/536] python26: Update all instances of .format() to use positional arguments to support python 2.6 --- influxdb/_dataframe_client.py | 2 +- influxdb/client.py | 28 ++++++++++++------------- influxdb/helper.py | 10 ++++----- influxdb/influxdb08/client.py | 6 +++--- influxdb/influxdb08/dataframe_client.py | 2 +- influxdb/influxdb08/helper.py | 10 ++++----- influxdb/line_protocol.py | 2 +- 7 files changed, 30 insertions(+), 30 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 8b4b505a..deae1beb 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -119,7 +119,7 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, time_precision=None): if not isinstance(dataframe, pd.DataFrame): - raise TypeError('Must be DataFrame, but type was: {}.' + raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): diff --git a/influxdb/client.py b/influxdb/client.py index bca047a8..e823c785 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -530,12 +530,12 @@ def alter_retention_policy(self, name, database=None, should be set. Otherwise the operation will fail. """ query_string = ( - "ALTER RETENTION POLICY {} ON {}" + "ALTER RETENTION POLICY {0} ON {1}" ).format(name, database or self._database) if duration: - query_string += " DURATION {}".format(duration) + query_string += " DURATION {0}".format(duration) if replication: - query_string += " REPLICATION {}".format(replication) + query_string += " REPLICATION {0}".format(replication) if default is True: query_string += " DEFAULT" @@ -643,7 +643,7 @@ def create_user(self, username, password, admin=False): privileges or not :type admin: boolean """ - text = "CREATE USER {} WITH PASSWORD '{}'".format(username, password) + text = "CREATE USER {0} WITH PASSWORD '{1}'".format(username, password) if admin: text += ' WITH ALL PRIVILEGES' self.query(text) @@ -654,7 +654,7 @@ def drop_user(self, username): :param username: the username to drop :type username: str """ - text = "DROP USER {}".format(username) + text = "DROP USER {0}".format(username) self.query(text) def set_user_password(self, username, password): @@ -665,7 +665,7 @@ def set_user_password(self, username, password): :param password: the new password for the user :type password: str """ - text = "SET PASSWORD FOR {} = '{}'".format(username, password) + text = "SET PASSWORD FOR {0} = '{1}'".format(username, password) self.query(text) def delete_series(self, database=None, measurement=None, tags=None): @@ -683,10 +683,10 @@ def delete_series(self, database=None, measurement=None, tags=None): database = database or self._database query_str = 'DROP SERIES' if measurement: - query_str += ' FROM "{}"'.format(measurement) + query_str += ' FROM "{0}"'.format(measurement) if tags: - query_str += ' WHERE ' + ' and '.join(["{}='{}'".format(k, v) + query_str += ' WHERE ' + ' and '.join(["{0}='{1}'".format(k, v) for k, v in tags.items()]) self.query(query_str, database=database) @@ -699,7 +699,7 @@ def revoke_admin_privileges(self, username): .. note:: Only a cluster administrator can create/ drop databases and manage users. """ - text = "REVOKE ALL PRIVILEGES FROM {}".format(username) + text = "REVOKE ALL PRIVILEGES FROM {0}".format(username) self.query(text) def grant_privilege(self, privilege, database, username): @@ -713,7 +713,7 @@ def grant_privilege(self, privilege, database, username): :param username: the username to grant the privilege to :type username: str """ - text = "GRANT {} ON {} TO {}".format(privilege, + text = "GRANT {0} ON {1} TO {2}".format(privilege, database, username) self.query(text) @@ -729,7 +729,7 @@ def revoke_privilege(self, privilege, database, username): :param username: the username to revoke the privilege from :type username: str """ - text = "REVOKE {} ON {} FROM {}".format(privilege, + text = "REVOKE {0} ON {1} FROM {2}".format(privilege, database, username) self.query(text) @@ -911,7 +911,7 @@ def parse_dsn(dsn): modifier, scheme = scheme_info if scheme != 'influxdb': - raise ValueError('Unknown scheme "{}".'.format(scheme)) + raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': @@ -919,7 +919,7 @@ def parse_dsn(dsn): elif modifier == 'https': init_args['ssl'] = True else: - raise ValueError('Unknown modifier "{}".'.format(modifier)) + raise ValueError('Unknown modifier "{0}".'.format(modifier)) netlocs = conn_params.netloc.split(',') @@ -937,7 +937,7 @@ def parse_dsn(dsn): def _parse_netloc(netloc): - info = urlparse("http://{}".format(netloc)) + info = urlparse("http://{0}".format(netloc)) return {'username': info.username or None, 'password': info.password or None, 'host': info.hostname or 'localhost', diff --git a/influxdb/helper.py b/influxdb/helper.py index 941aa4b8..803a9bdd 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -51,7 +51,7 @@ def __new__(cls, *args, **kwargs): _meta = getattr(cls, 'Meta') except AttributeError: raise AttributeError( - 'Missing Meta class in {}.'.format( + 'Missing Meta class in {0}.'.format( cls.__name__)) for attr in ['series_name', 'fields', 'tags']: @@ -59,7 +59,7 @@ def __new__(cls, *args, **kwargs): setattr(cls, '_' + attr, getattr(_meta, attr)) except AttributeError: raise AttributeError( - 'Missing {} in {} Meta class.'.format( + 'Missing {0} in {1} Meta class.'.format( attr, cls.__name__)) @@ -68,14 +68,14 @@ def __new__(cls, *args, **kwargs): cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: raise AttributeError( - 'In {}, autocommit is set to True, but no client is set.' + 'In {0}, autocommit is set to True, but no client is set.' .format(cls.__name__)) try: cls._bulk_size = getattr(_meta, 'bulk_size') if cls._bulk_size < 1 and cls._autocommit: warn( - 'Definition of bulk_size in {} forced to 1, ' + 'Definition of bulk_size in {0} forced to 1, ' 'was less than 1.'.format(cls.__name__)) cls._bulk_size = 1 except AttributeError: @@ -83,7 +83,7 @@ def __new__(cls, *args, **kwargs): else: if not cls._autocommit: warn( - 'Definition of bulk_size in {} has no affect because' + 'Definition of bulk_size in {0} has no affect because' ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 5d8f59c5..a3b31639 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -145,14 +145,14 @@ def from_DSN(dsn, **kwargs): modifier, scheme = scheme_info if scheme != 'influxdb': - raise ValueError('Unknown scheme "{}".'.format(scheme)) + raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: - raise ValueError('Unknown modifier "{}".'.format(modifier)) + raise ValueError('Unknown modifier "{0}".'.format(modifier)) if conn_params.hostname: init_args['host'] = conn_params.hostname @@ -838,7 +838,7 @@ def alter_database_user(self, username, password=None, permissions=None): url = "db/{0}/users/{1}".format(self._database, username) if not password and not permissions: - raise ValueError("Nothing to alter for user {}.".format(username)) + raise ValueError("Nothing to alter for user {0}.".format(username)) data = {} diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index c1665b3e..87ddba8c 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -121,7 +121,7 @@ def _to_dataframe(self, json_result, time_precision): def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): if not isinstance(dataframe, pd.DataFrame): - raise TypeError('Must be DataFrame, but type was: {}.' + raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py index 391e9ccd..734ac960 100644 --- a/influxdb/influxdb08/helper.py +++ b/influxdb/influxdb08/helper.py @@ -51,7 +51,7 @@ def __new__(cls, *args, **kwargs): _meta = getattr(cls, 'Meta') except AttributeError: raise AttributeError( - 'Missing Meta class in {}.'.format( + 'Missing Meta class in {0}.'.format( cls.__name__)) for attr in ['series_name', 'fields']: @@ -59,7 +59,7 @@ def __new__(cls, *args, **kwargs): setattr(cls, '_' + attr, getattr(_meta, attr)) except AttributeError: raise AttributeError( - 'Missing {} in {} Meta class.'.format( + 'Missing {0} in {1} Meta class.'.format( attr, cls.__name__)) @@ -68,14 +68,14 @@ def __new__(cls, *args, **kwargs): cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: raise AttributeError( - 'In {}, autocommit is set to True, but no client is set.' + 'In {0}, autocommit is set to True, but no client is set.' .format(cls.__name__)) try: cls._bulk_size = getattr(_meta, 'bulk_size') if cls._bulk_size < 1 and cls._autocommit: warn( - 'Definition of bulk_size in {} forced to 1, ' + 'Definition of bulk_size in {0} forced to 1, ' 'was less than 1.'.format(cls.__name__)) cls._bulk_size = 1 except AttributeError: @@ -83,7 +83,7 @@ def __new__(cls, *args, **kwargs): else: if not cls._autocommit: warn( - 'Definition of bulk_size in {} has no affect because' + 'Definition of bulk_size in {0} has no affect because' ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 74430a28..8825c7ac 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -52,7 +52,7 @@ def _escape_tag(tag): def _escape_value(value): value = _get_unicode(value) if isinstance(value, text_type) and value != '': - return "\"{}\"".format( + return "\"{0}\"".format( value.replace( "\"", "\\\"" ).replace( From 1b7994e0789a89ad8dac7f8f208b413d3e2a6113 Mon Sep 17 00:00:00 2001 From: Andy Cobaugh Date: Thu, 28 Jan 2016 08:32:45 -0500 Subject: [PATCH 290/536] Add TOX_ENV=py26 to travis-ci config --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 5c2be658..809c3c90 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,6 @@ language: python env: + - TOX_ENV=py26 - TOX_ENV=py27 - TOX_ENV=py32 - TOX_ENV=py33 From f0f0d47d32304fa826822277e3164324b7668cc2 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 29 Jan 2016 08:37:28 -0500 Subject: [PATCH 291/536] Flake8 fixes --- influxdb/client.py | 12 ++++++------ influxdb/tests/__init__.py | 3 ++- influxdb/tests/chunked_json_test.py | 7 ++++--- influxdb/tests/server_tests/influxdb_instance.py | 14 +++++++++----- tox.ini | 3 ++- 5 files changed, 23 insertions(+), 16 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index e823c785..c217b09a 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -714,8 +714,8 @@ def grant_privilege(self, privilege, database, username): :type username: str """ text = "GRANT {0} ON {1} TO {2}".format(privilege, - database, - username) + database, + username) self.query(text) def revoke_privilege(self, privilege, database, username): @@ -730,8 +730,8 @@ def revoke_privilege(self, privilege, database, username): :type username: str """ text = "REVOKE {0} ON {1} FROM {2}".format(privilege, - database, - username) + database, + username) self.query(text) def send_packet(self, packet): @@ -862,8 +862,8 @@ def _make_func(self, orig_func): def func(*args, **kwargs): now = time.time() with self._hosts_lock: - if (self.bad_hosts - and self._last_healing + self.healing_delay < now): + if (self.bad_hosts and + self._last_healing + self.healing_delay < now): h = self.bad_hosts.pop(0) self.hosts.append(h) self._last_healing = now diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py index 31454368..dc1bb22c 100644 --- a/influxdb/tests/__init__.py +++ b/influxdb/tests/__init__.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- + import sys +import os if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest -import os using_pypy = hasattr(sys, "pypy_version_info") skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") diff --git a/influxdb/tests/chunked_json_test.py b/influxdb/tests/chunked_json_test.py index f9e96ee6..6b0f9e65 100644 --- a/influxdb/tests/chunked_json_test.py +++ b/influxdb/tests/chunked_json_test.py @@ -1,13 +1,14 @@ # -*- coding: utf-8 -*- import sys + +from influxdb import chunked_json + if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest -from influxdb import chunked_json - class TestChunkJson(unittest.TestCase): @@ -23,7 +24,7 @@ def test_load(self): '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \ '"columns": ["time", "value"], "values": ' \ '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \ - '[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' \ + '[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' res = list(chunked_json.loads(example_response)) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 40c66d2e..acb7cee6 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -18,11 +18,15 @@ from influxdb.tests.misc import is_port_open, get_free_ports # hack in check_output if it's not defined, like for python 2.6 -if "check_output" not in dir( subprocess ): +if "check_output" not in dir(subprocess): def f(*popenargs, **kwargs): if 'stdout' in kwargs: - raise ValueError('stdout argument not allowed, it will be overridden.') - process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + raise ValueError( + 'stdout argument not allowed, it will be overridden.' + ) + process = subprocess.Popen(stdout=subprocess.PIPE, + *popenargs, + **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: @@ -120,8 +124,8 @@ def _start_server(self, conf_template, udp_enabled): # or you run a 286 @ 1Mhz ? try: while time.time() < timeout: - if (is_port_open(self.http_port) - and is_port_open(self.admin_port)): + if (is_port_open(self.http_port) and + is_port_open(self.admin_port)): # it's hard to check if a UDP port is open.. if udp_enabled: # so let's just sleep 0.5 sec in this case diff --git a/tox.ini b/tox.ini index 644a7c55..e2cb83c9 100644 --- a/tox.ini +++ b/tox.ini @@ -32,6 +32,7 @@ deps = -r{toxinidir}/requirements.txt commands = sphinx-build -b html docs/source docs/build [flake8] -ignore = N802,F821 +ignore = N802,F821,E402 +# E402: module level import not at top of file # N802: nosetests's setUp function # F821: False positive in intluxdb/dataframe_client.py From 333b60b5d4e3ed04f9eadc40e119f84ae56abefb Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 29 Jan 2016 08:41:37 -0500 Subject: [PATCH 292/536] Disabled py32 tests --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 809c3c90..32f73df5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,10 @@ language: python env: - TOX_ENV=py26 - TOX_ENV=py27 - - TOX_ENV=py32 +# - TOX_ENV=py32 +# Disabling py32 tests until the following issue is fixed: +# pip 8.x breaks python 3.2 compatibility +# https://github.com/pypa/pip/issues/3390 - TOX_ENV=py33 - TOX_ENV=py34 - TOX_ENV=pypy From e31b36e1b27051ce315cf0a7b6bd8bb9cbe38d64 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 29 Jan 2016 08:43:03 -0500 Subject: [PATCH 293/536] Test with influxdb 0.9.6.1 --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 32f73df5..e78fce96 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,10 +17,10 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.4.1_amd64.deb + - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.6.1_amd64.deb - dpkg -x influxdb_*_amd64.deb influxdb_install script: - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.4.1/influxd + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.6.1/influxd - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi From ee1f2a8c0b226c0e466b73cc0233b3951984da85 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 29 Jan 2016 08:45:42 -0500 Subject: [PATCH 294/536] Mention python2.6 support --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 1b5c89ce..8c304b4f 100644 --- a/README.rst +++ b/README.rst @@ -57,7 +57,7 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ============ -The InfluxDB-Python distribution is supported and tested on Python 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. +The InfluxDB-Python distribution is supported and tested on Python 2.6, 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. Main dependency is: From c9275a38c468f2b0a97b5af1ce935a4f43a3ecca Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Fri, 29 Jan 2016 08:49:18 -0500 Subject: [PATCH 295/536] Removed dead badges --- README.rst | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/README.rst b/README.rst index 8c304b4f..55ca80c3 100644 --- a/README.rst +++ b/README.rst @@ -13,22 +13,6 @@ InfluxDB-Python is a client for interacting with InfluxDB_. :target: https://coveralls.io/r/influxdb/influxdb-python :alt: Coverage -.. image:: https://pypip.in/download/influxdb/badge.svg - :target: https://pypi.python.org/pypi/influxdb/ - :alt: Downloads - -.. image:: https://pypip.in/version/influxdb/badge.svg - :target: https://pypi.python.org/pypi/influxdb/ - :alt: Latest Version - -.. image:: https://pypip.in/py_versions/influxdb/badge.svg - :target: https://pypi.python.org/pypi/influxdb/ - :alt: Supported Python versions - -.. image:: https://pypip.in/license/influxdb/badge.svg - :target: https://pypi.python.org/pypi/influxdb/ - :alt: License - .. _readme-about: InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdata.com/ From 0cb80fe40a7bc11871b15771fd5bdaa1a4f29795 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Fri, 29 Jan 2016 08:51:45 -0500 Subject: [PATCH 296/536] Mention python3.2 untested --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index 55ca80c3..78b79398 100644 --- a/README.rst +++ b/README.rst @@ -43,6 +43,8 @@ Dependencies The InfluxDB-Python distribution is supported and tested on Python 2.6, 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. +**Note:** Python 3.2 is currently untested. See ``.travis.yml``. + Main dependency is: - Requests: HTTP library for human beings (http://docs.python-requests.org/) From 6a2ff7fb613b3bc3794cf7fed4f488e4b90196dd Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Fri, 29 Jan 2016 08:53:22 -0500 Subject: [PATCH 297/536] Updated InfluxDB link --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 78b79398..94a10ed8 100644 --- a/README.rst +++ b/README.rst @@ -162,6 +162,6 @@ Source code The source code is currently available on Github: https://github.com/influxdata/influxdb-python -.. _InfluxDB: http://influxdata.com/ +.. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org From 0cc8d66260a7edc56a80732e5564375a8db22dab Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 29 Jan 2016 10:08:40 -0500 Subject: [PATCH 298/536] Released 2.12.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 16cf32e2..0f1c26d4 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -13,4 +13,4 @@ ] -__version__ = '2.11.0' +__version__ = '2.12.0' From ffa91103498c78536e0fa742f90191ed8b829801 Mon Sep 17 00:00:00 2001 From: Michal Kren Date: Sat, 30 Jan 2016 18:13:15 +0100 Subject: [PATCH 299/536] create user query - added quotes to username --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index efb9b761..dc1419b4 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -650,7 +650,7 @@ def create_user(self, username, password, admin=False): privileges or not :type admin: boolean """ - text = "CREATE USER {0} WITH PASSWORD '{1}'".format(username, password) + text = "CREATE USER \"{0}\" WITH PASSWORD '{1}'".format(username, password) if admin: text += ' WITH ALL PRIVILEGES' self.query(text) From fe30159b9e62044120ac3c269c2e153d88ba830b Mon Sep 17 00:00:00 2001 From: Michal Kren Date: Sat, 30 Jan 2016 18:31:36 +0100 Subject: [PATCH 300/536] line was too long > 79 chars - fixed --- influxdb/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index dc1419b4..8146d68d 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -650,7 +650,8 @@ def create_user(self, username, password, admin=False): privileges or not :type admin: boolean """ - text = "CREATE USER \"{0}\" WITH PASSWORD '{1}'".format(username, password) + text = "CREATE USER \"{0}\" WITH PASSWORD '{1}'".format(username, + password) if admin: text += ' WITH ALL PRIVILEGES' self.query(text) From 1dcda47dd2d8b6f9a5b057a839242ded2b911881 Mon Sep 17 00:00:00 2001 From: Sean Beckett Date: Wed, 3 Feb 2016 10:55:33 -0800 Subject: [PATCH 301/536] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 94a10ed8..081c747c 100644 --- a/README.rst +++ b/README.rst @@ -1,6 +1,6 @@ -InfluxDB-Python is a client for interacting with InfluxDB_. +InfluxDB-Python is a client for interacting with InfluxDB_. Maintained by @aviau (https://github.com/aviau). .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python From 743e4c961521b58f085e2167ac554c8aee7445bb Mon Sep 17 00:00:00 2001 From: Alexander Mancevice Date: Tue, 2 Feb 2016 16:08:52 -0500 Subject: [PATCH 302/536] Allow multiple statements in single query Allows users to pass in multiple ;-separated queries into the same call. Returns a list of ResultSet instances, as if the query() method had been mapped across the different queries. ```python client.query("SELECT * FROM mymeas WHERE time>now()-1h; " \ "SELECT * FROM othermeas WHERE time>now-1h") ``` --- influxdb/_dataframe_client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index deae1beb..73a01745 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -101,6 +101,8 @@ def get_list_series(self, database=None): def _to_dataframe(self, rs): result = {} + if isinstance(rs, list): + return map(self._to_dataframe, rs) for key, data in rs.items(): name, tags = key if tags is None: From e0384b9b15a66ead55b426f2512b6b1f26b4e30d Mon Sep 17 00:00:00 2001 From: Alexander Mancevice Date: Thu, 4 Feb 2016 15:12:10 -0500 Subject: [PATCH 303/536] Added test for multiple statements per query --- influxdb/tests/dataframe_client_test.py | 50 +++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 07998803..f76ca498 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -244,6 +244,56 @@ def test_query_into_dataframe(self): for k in expected: assert_frame_equal(expected[k], result[k]) + def test_multiquery_into_dataframe(self): + data = { + "results": [ + { + "series": [ + { + "name": "cpu_load_short", + "columns": ["time","value"], + "values": [ + ["2015-01-29T21:55:43.702900257Z", 0.55], + ["2015-01-29T21:55:43.702900257Z", 23422], + ["2015-06-11T20:46:02Z", 0.64] + ] + } + ] + }, { + "series": [ + { + "name": "cpu_load_short", + "columns": ["time","count"], + "values": [ + ["1970-01-01T00:00:00Z", 3] + ] + } + ] + } + ] + } + + pd1 = pd.DataFrame( + [[0.55], [23422.0], [0.64]], columns=['value'], + index=pd.to_datetime([ + "2015-01-29 21:55:43.702900257+0000", + "2015-01-29 21:55:43.702900257+0000", + "2015-06-11 20:46:02+0000"])).tz_localize('UTC') + pd2 = pd.DataFrame( + [[3]], columns=['count'], + index=pd.to_datetime(["1970-01-01 00:00:00+00:00"])).tz_localize('UTC') + expected = [{'cpu_load_short':pd1}, {'cpu_load_short':pd2}] + + + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + with _mocked_session(cli, 'GET', 200, data): + result = cli.query( + "SELECT value FROM cpu_load_short WHERE region='us-west';"\ + "SELECT count(value) FROM cpu_load_short WHERE region='us-west'") + for r,e in zip(result, expected): + for k in e: + assert_frame_equal(e[k], r[k]) + def test_query_with_empty_result(self): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'GET', 200, {"results": [{}]}): From 66169efcff04b24f94f9c39623ad1fb448e076e2 Mon Sep 17 00:00:00 2001 From: Alexander Mancevice Date: Thu, 4 Feb 2016 15:50:12 -0500 Subject: [PATCH 304/536] flake8 fixes --- influxdb/tests/dataframe_client_test.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index f76ca498..721d1aeb 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -251,7 +251,7 @@ def test_multiquery_into_dataframe(self): "series": [ { "name": "cpu_load_short", - "columns": ["time","value"], + "columns": ["time", "value"], "values": [ ["2015-01-29T21:55:43.702900257Z", 0.55], ["2015-01-29T21:55:43.702900257Z", 23422], @@ -263,7 +263,7 @@ def test_multiquery_into_dataframe(self): "series": [ { "name": "cpu_load_short", - "columns": ["time","count"], + "columns": ["time", "count"], "values": [ ["1970-01-01T00:00:00Z", 3] ] @@ -281,16 +281,16 @@ def test_multiquery_into_dataframe(self): "2015-06-11 20:46:02+0000"])).tz_localize('UTC') pd2 = pd.DataFrame( [[3]], columns=['count'], - index=pd.to_datetime(["1970-01-01 00:00:00+00:00"])).tz_localize('UTC') - expected = [{'cpu_load_short':pd1}, {'cpu_load_short':pd2}] - + index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))\ + .tz_localize('UTC') + expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}] cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + iql = "SELECT value FROM cpu_load_short WHERE region='us-west';"\ + "SELECT count(value) FROM cpu_load_short WHERE region='us-west'" with _mocked_session(cli, 'GET', 200, data): - result = cli.query( - "SELECT value FROM cpu_load_short WHERE region='us-west';"\ - "SELECT count(value) FROM cpu_load_short WHERE region='us-west'") - for r,e in zip(result, expected): + result = cli.query(iql) + for r, e in zip(result, expected): for k in e: assert_frame_equal(e[k], r[k]) From 5dfecd1c8628afde87700a43771e1e2efc649398 Mon Sep 17 00:00:00 2001 From: Roman Vynar Date: Wed, 10 Feb 2016 14:29:38 +0200 Subject: [PATCH 305/536] Added drop_retention_policy() --- influxdb/client.py | 14 +++++++++++++ influxdb/tests/client_test.py | 21 +++++++++++++++++++ .../server_tests/client_test_with_server.py | 21 +++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 8146d68d..58a29c7b 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -548,6 +548,20 @@ def alter_retention_policy(self, name, database=None, self.query(query_string) + def drop_retention_policy(self, name, database=None): + """Drop an existing retention policy for a database. + + :param name: the name of the retention policy to drop + :type name: str + :param database: the database for which the retention policy is + dropped. Defaults to current client's database + :type database: str + """ + query_string = ( + "DROP RETENTION POLICY {0} ON {1}" + ).format(name, database or self._database) + self.query(query_string) + def get_list_retention_policies(self, database=None): """Get the list of retention policies for a database. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index c7f980f1..2d2b7fae 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -632,6 +632,27 @@ def test_alter_retention_policy_invalid(self): with _mocked_session(cli, 'get', 400): self.cli.alter_retention_policy('somename', 'db') + def test_drop_retention_policy(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.drop_retention_policy('somename', 'db') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop retention policy somename on db' + ) + + @raises(Exception) + def test_drop_retention_policy_fails(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'delete', 401): + cli.drop_retention_policy('default', 'db') + def test_get_list_retention_policies(self): example_response = \ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index def193d2..2c6b408e 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -600,6 +600,27 @@ def test_alter_retention_policy_invalid(self): rsp ) + def test_drop_retention_policy(self): + self.cli.create_retention_policy('somename', '1d', 1) + + # Test drop retention + self.cli.drop_retention_policy('somename', 'db') + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [{'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}], + rsp + ) + + def test_drop_retention_policy_default(self): + # Test drop default retention + with self.assertRaises(InfluxDBClientError) as ctx: + self.cli.drop_retention_policy('default', 'db') + + self.assertEqual(400, ctx.exception.code) + self.assertIn('{"error":"error parsing query: found DEFAULT, expected POLICY', + ctx.exception.content) + def test_issue_143(self): pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ From 64d03be47af9d82fe51a688f624cafd1a6a5e736 Mon Sep 17 00:00:00 2001 From: Mikhail Sokolov Date: Wed, 17 Feb 2016 09:43:43 +0700 Subject: [PATCH 306/536] tests: add unicode test for line_protocol.make_line. I'm discovered problems, while try to write data, with tags in unicode. --- influxdb/tests/test_line_protocol.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 34654f50..1100dc49 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from __future__ import unicode_literals import sys if sys.version_info < (2, 7): @@ -55,3 +56,23 @@ def test_string_val_newline(self): line_protocol.make_lines(data), 'm1 multi_line="line1\\nline1\\nline3"\n' ) + + def test_make_lines_unicode(self): + data = { + "tags": { + "unicode_tag": "\'Привет!\'" # Hello! in Russian + }, + "points": [ + { + "measurement": "test", + "fields": { + "unicode_val": "Привет!", # Hello! in Russian + } + } + ] + } + + self.assertEqual( + line_protocol.make_lines(data), + 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n' + ) From f132aae9ce00cdd8120efdcc28f7eddc101ac5fc Mon Sep 17 00:00:00 2001 From: Mikhail Sokolov Date: Wed, 17 Feb 2016 10:34:01 +0700 Subject: [PATCH 307/536] line_protocol: handle py2/py3 difference in _get_unicode(). Fix test_make_lines_unicode() test. _get_unicode() with flag fails under py27 if tag contains unicode. --- influxdb/line_protocol.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 8825c7ac..da0e83ec 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -7,7 +7,7 @@ from numbers import Integral from dateutil.parser import parse -from six import binary_type, text_type, integer_types +from six import binary_type, text_type, integer_types, PY2 def _convert_timestamp(timestamp, precision=None): @@ -74,7 +74,10 @@ def _get_unicode(data, force=False): elif data is None: return '' elif force: - return str(data) + if PY2: + return unicode(data) + else: + return str(data) else: return data From 4428208be690ab5399c4e1150c8f2b4d11d65f7d Mon Sep 17 00:00:00 2001 From: dylan d Date: Sun, 28 Feb 2016 13:54:02 -0800 Subject: [PATCH 308/536] Force InfluxDBClient to force an integer as the port. - Includes unit tests - Does not make the change to influxdb08 - Also a small change that was needed to tox.ini that was needed for tox to pass, since setup.py reads README.rst --- influxdb/client.py | 2 +- influxdb/tests/client_test.py | 4 ++++ influxdb/tests/server_tests/client_test_with_server.py | 4 ++++ tox.ini | 1 + 4 files changed, 10 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 8146d68d..d10fce78 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -76,7 +76,7 @@ def __init__(self, ): """Construct a new InfluxDBClient object.""" self.__host = host - self.__port = port + self.__port = int(port) self._username = username self._password = password self._database = database diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index c7f980f1..45c0ba0f 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -794,6 +794,10 @@ def test_revoke_privilege_invalid(self): with _mocked_session(cli, 'get', 400): self.cli.revoke_privilege('', 'testdb', 'test') + def test_invalid_port_fails(self): + with self.assertRaises(ValueError): + InfluxDBClient('host', '80/redir', 'username', 'password') + class FakeClient(InfluxDBClient): diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index def193d2..cfac4de9 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -295,6 +295,10 @@ def test_revoke_privilege_invalid(self): self.assertIn('{"error":"error parsing query: ', ctx.exception.content) + def test_invalid_port_fails(self): + with self.assertRaises(ValueError): + InfluxDBClient('host', '80/redir', 'username', 'password') + @skipServerTests class CommonTests(ManyTestCasesWithServerMixin, diff --git a/tox.ini b/tox.ini index e2cb83c9..c17c21d1 100644 --- a/tox.ini +++ b/tox.ini @@ -20,6 +20,7 @@ commands = flake8 influxdb [testenv:coverage] deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt + -r{toxinidir}/README.rst pandas coverage commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb From aeb49669848668630b34ede008177e944dc9f361 Mon Sep 17 00:00:00 2001 From: dylan d Date: Sun, 28 Feb 2016 20:39:11 -0800 Subject: [PATCH 309/536] Tox mistake on last push. --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index c17c21d1..e2cb83c9 100644 --- a/tox.ini +++ b/tox.ini @@ -20,7 +20,6 @@ commands = flake8 influxdb [testenv:coverage] deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - -r{toxinidir}/README.rst pandas coverage commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb From 9282fb4992dce78b41e303869fb661d5c20ef820 Mon Sep 17 00:00:00 2001 From: mbagic Date: Mon, 29 Feb 2016 11:08:30 +0100 Subject: [PATCH 310/536] Added support for rpm making with 'python setup.py bdist_rpm' --- MANIFEST.in | 1 + setup.cfg | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 18b23910..58fdabcf 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,3 +2,4 @@ include requirements.txt include test-requirements.txt include dev-requirements.txt include LICENSE +include README.rst \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 5e409001..dc2cbf3f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,8 @@ +[install] +install_lib=/usr/lib64/python2.6/site-packages + +[bdist_rpm] +requires=python-dateutil + [wheel] universal = 1 From 852b7720d70a6fbe98f726a8df17945c28c6bba1 Mon Sep 17 00:00:00 2001 From: mbagic Date: Mon, 29 Feb 2016 11:10:37 +0100 Subject: [PATCH 311/536] newline --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 58fdabcf..76466bf2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,4 +2,4 @@ include requirements.txt include test-requirements.txt include dev-requirements.txt include LICENSE -include README.rst \ No newline at end of file +include README.rst From 79166df300ca38f309f0f650c564a9784f45524b Mon Sep 17 00:00:00 2001 From: mbagic Date: Mon, 29 Feb 2016 11:15:05 +0100 Subject: [PATCH 312/536] removed unneeded python directory tag --- setup.cfg | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index dc2cbf3f..217d437b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,3 @@ -[install] -install_lib=/usr/lib64/python2.6/site-packages - [bdist_rpm] requires=python-dateutil From c25ec08926e398a6e013f2d0bae34e3c40e92c7d Mon Sep 17 00:00:00 2001 From: George Goldberg Date: Tue, 8 Mar 2016 11:58:10 +0000 Subject: [PATCH 313/536] Allow setting the time of a point manually. Point can be specified as either a number of nanoseconds, a python datetime object (with or without timezone) or a string in ISO datetime format. If a time is not specified, the Helper sets the time at the time of assembling the point fields so that multiple unique points with the same tags can be committed simultaneously without them failing to add due to all being assigned the same automatic time by the InfluxDB server. This fix is based upon the discussion in #130 but also includes the outstanding items for it to be merged. I'm happy to receive suggestions for further ways to add test coverage to this change. This also fixes #264 and fixes #259. --- influxdb/helper.py | 28 ++++++++++++++++++- influxdb/tests/helper_test.py | 52 +++++++++++++++++++++++++++-------- test-requirements.txt | 3 +- 3 files changed, 69 insertions(+), 14 deletions(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index 803a9bdd..900df8d7 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -3,6 +3,7 @@ Helper class for InfluxDB """ from collections import namedtuple, defaultdict +from datetime import datetime from warnings import warn import six @@ -16,6 +17,16 @@ class SeriesHelper(object): Each subclass can write to its own database. The time series names can also be based on one or more defined fields. + A field "time" can be used to write data points at a specific time, + rather than the default current time. The time field can take any of + the following forms: + * An integer unix timestamp in nanoseconds, assumed to be in UTC. + * A string in the ISO time format, including a timezone. + * A naive python datetime, which will be treated as UTC. + * A localized python datetime, which will use the chosen timezone. + If no time field is provided, the current UTC system time in microseconds + at the time of assembling the point data will be used. + Annotated example:: class MySeriesHelper(SeriesHelper): @@ -142,8 +153,23 @@ def _json_body_(cls): "tags": {}, } + ts = getattr(point, 'time', None) + if not ts: + # No time provided. Use current UTC time. + ts = datetime.utcnow().isoformat() + "+00:00" + elif isinstance(ts, datetime): + if ts.tzinfo is None or ts.tzinfo.utcoffset(ts) is None: + # Assuming naive datetime provided. Format with UTC tz. + ts = ts.isoformat() + "+00:00" + else: + # Assuming localized datetime provided. + ts = ts.isoformat() + # Neither of the above match. Assuming correct string or int. + json_point['time'] = ts + for field in cls._fields: - json_point['fields'][field] = getattr(point, field) + if field != 'time': + json_point['fields'][field] = getattr(point, field) for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 9721a9c9..ac2872f1 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +import datetime +import pytz import sys if sys.version_info < (2, 7): import unittest2 as unittest @@ -38,6 +40,18 @@ class Meta: TestSeriesHelper.MySeriesHelper = MySeriesHelper + class MySeriesTimeHelper(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['time', 'some_stat'] + tags = ['server_name', 'other_tag'] + bulk_size = 5 + autocommit = True + + TestSeriesHelper.MySeriesTimeHelper = MySeriesTimeHelper + def test_auto_commit(self): """ Tests that write_points is called after the right number of events @@ -66,14 +80,20 @@ def testSingleSeriesName(self): """ Tests JSON conversion when there is only one series name. """ - TestSeriesHelper.MySeriesHelper( - server_name='us.east-1', other_tag='ello', some_stat=159) - TestSeriesHelper.MySeriesHelper( - server_name='us.east-1', other_tag='ello', some_stat=158) - TestSeriesHelper.MySeriesHelper( - server_name='us.east-1', other_tag='ello', some_stat=157) - TestSeriesHelper.MySeriesHelper( - server_name='us.east-1', other_tag='ello', some_stat=156) + dt = datetime.datetime(2016, 1, 2, 3, 4, 5, 678912) + ts1 = dt + ts2 = "2016-10-11T01:02:03.123456789-04:00" + ts3 = 1234567890123456789 + ts4 = pytz.timezone("Europe/Berlin").localize(dt) + + TestSeriesHelper.MySeriesTimeHelper( + time=ts1, server_name='us.east-1', other_tag='ello', some_stat=159) + TestSeriesHelper.MySeriesTimeHelper( + time=ts2, server_name='us.east-1', other_tag='ello', some_stat=158) + TestSeriesHelper.MySeriesTimeHelper( + time=ts3, server_name='us.east-1', other_tag='ello', some_stat=157) + TestSeriesHelper.MySeriesTimeHelper( + time=ts4, server_name='us.east-1', other_tag='ello', some_stat=156) expectation = [ { "measurement": "events.stats.us.east-1", @@ -84,6 +104,7 @@ def testSingleSeriesName(self): "fields": { "some_stat": 159 }, + "time": "2016-01-02T03:04:05.678912+00:00", }, { "measurement": "events.stats.us.east-1", @@ -94,6 +115,7 @@ def testSingleSeriesName(self): "fields": { "some_stat": 158 }, + "time": "2016-10-11T01:02:03.123456789-04:00", }, { "measurement": "events.stats.us.east-1", @@ -104,6 +126,7 @@ def testSingleSeriesName(self): "fields": { "some_stat": 157 }, + "time": 1234567890123456789, }, { "measurement": "events.stats.us.east-1", @@ -114,23 +137,24 @@ def testSingleSeriesName(self): "fields": { "some_stat": 156 }, + "time": "2016-01-02T03:04:05.678912+01:00", } ] - rcvd = TestSeriesHelper.MySeriesHelper._json_body_() + rcvd = TestSeriesHelper.MySeriesTimeHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for one series name: {0}.'.format(rcvd)) - TestSeriesHelper.MySeriesHelper._reset_() + TestSeriesHelper.MySeriesTimeHelper._reset_() self.assertEqual( - TestSeriesHelper.MySeriesHelper._json_body_(), + TestSeriesHelper.MySeriesTimeHelper._json_body_(), [], 'Resetting helper did not empty datapoints.') def testSeveralSeriesNames(self): ''' - Tests JSON conversion when there is only one series name. + Tests JSON conversion when there are multiple series names. ''' TestSeriesHelper.MySeriesHelper( server_name='us.east-1', some_stat=159, other_tag='ello') @@ -184,6 +208,10 @@ def testSeveralSeriesNames(self): ] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() + for r in rcvd: + self.assertTrue(r.get('time'), + "No time field in received JSON body.") + del(r["time"]) self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' diff --git a/test-requirements.txt b/test-requirements.txt index cbc6add3..9e18b7d2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,5 @@ nose nose-cov mock -requests-mock \ No newline at end of file +requests-mock +pytz From 414b8bb771e5aa7a0c111d90b2a6124b3691a004 Mon Sep 17 00:00:00 2001 From: Francesco Uliana Date: Tue, 29 Mar 2016 15:18:35 +0200 Subject: [PATCH 314/536] type use_udp: bool --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index d10fce78..2f08d150 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -54,7 +54,7 @@ class InfluxDBClient(object): establish a connection, defaults to None :type timeout: int :param use_udp: use UDP to connect to InfluxDB, defaults to False - :type use_udp: int + :type use_udp: bool :param udp_port: UDP port to connect to InfluxDB, defaults to 4444 :type udp_port: int :param proxies: HTTP(S) proxy to use for Requests, defaults to {} From edaa4d458cc89170f1a4c0ba202eeb1bcab5962c Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 8 Apr 2016 15:36:35 -0400 Subject: [PATCH 315/536] which influxdb -> influxd --- influxdb/tests/server_tests/influxdb_instance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index acb7cee6..1b662589 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -159,7 +159,7 @@ def find_influxd_path(self): if not influxdb_bin_path: try: influxdb_bin_path = subprocess.check_output( - ['which', 'influxdb'] + ['which', 'influxd'] ).strip() except subprocess.CalledProcessError: # fallback on : From e89831779ce8e2b67f056ae1f74c9e5106c7323c Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 8 Apr 2016 23:02:12 -0400 Subject: [PATCH 316/536] flake8 fix --- influxdb/tests/server_tests/client_test_with_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index e39f3f23..78bc1fc2 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -622,8 +622,9 @@ def test_drop_retention_policy_default(self): self.cli.drop_retention_policy('default', 'db') self.assertEqual(400, ctx.exception.code) - self.assertIn('{"error":"error parsing query: found DEFAULT, expected POLICY', - ctx.exception.content) + self.assertIn( + '{"error":"error parsing query: found DEFAULT, expected POLICY', + ctx.exception.content) def test_issue_143(self): pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') From 3b7039e79b422e02d602e74cb15b5a5f52c0977e Mon Sep 17 00:00:00 2001 From: Matt Snider Date: Thu, 21 Apr 2016 05:51:44 +0200 Subject: [PATCH 317/536] Add time parameter to SeriesHelper (#306) * Revert Allow setting the time of a point manually This reverts commit c25ec08926e398a6e013f2d0bae34e3c40e92c7d, which this commit is part of PR #304. * Allow time to be specified in SeriesHelper.__init__() * Extract SeriesHelper default timestamp into method for testability * Use datetime.utcnow() as default timestamp in SeriesHelper This is preferable to time.time() because _convert_timestamp() from influxdb.line_protocol will do precision handling and conversion if a datetime object is given. * Get existing tests working by mocking SeriesHelper._current_timestamp() * Add additional tests for SeriesHelper time field * Move _reset_() calls in TestSeriesHelper to tearDown() * Use mock.patch() instead of unittest.mock.patch() for py27 * Update SeriesHelper docstring --- influxdb/helper.py | 44 ++++------- influxdb/tests/helper_test.py | 144 +++++++++++++++++++++------------- test-requirements.txt | 3 +- 3 files changed, 107 insertions(+), 84 deletions(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index 900df8d7..7f64de46 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -16,16 +16,9 @@ class SeriesHelper(object): All data points are immutable, insuring they do not get overwritten. Each subclass can write to its own database. The time series names can also be based on one or more defined fields. - - A field "time" can be used to write data points at a specific time, - rather than the default current time. The time field can take any of - the following forms: - * An integer unix timestamp in nanoseconds, assumed to be in UTC. - * A string in the ISO time format, including a timezone. - * A naive python datetime, which will be treated as UTC. - * A localized python datetime, which will use the chosen timezone. - If no time field is provided, the current UTC system time in microseconds - at the time of assembling the point data will be used. + The field "time" can be specified when creating a point, and may be any of + the time types supported by the client (i.e. str, datetime, int). + If the time is not specified, the current system time (utc) will be used. Annotated example:: @@ -98,8 +91,11 @@ def __new__(cls, *args, **kwargs): ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) - cls._type = namedtuple(cls.__name__, cls._fields + cls._tags) + if 'time' in cls._fields: + cls._fields.remove('time') + cls._type = namedtuple(cls.__name__, + cls._fields + cls._tags + ['time']) return super(SeriesHelper, cls).__new__(cls) def __init__(self, **kw): @@ -110,6 +106,7 @@ def __init__(self, **kw): :warning: Data points are *immutable* (`namedtuples`). """ cls = self.__class__ + timestamp = kw.pop('time', self._current_timestamp()) if sorted(cls._fields + cls._tags) != sorted(kw.keys()): raise NameError( @@ -117,7 +114,9 @@ def __init__(self, **kw): sorted(cls._fields + cls._tags), kw.keys())) - cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw)) + cls._datapoints[cls._series_name.format(**kw)].append( + cls._type(time=timestamp, **kw) + ) if cls._autocommit and \ sum(len(series) for series in cls._datapoints.values()) \ @@ -151,25 +150,11 @@ def _json_body_(cls): "measurement": series_name, "fields": {}, "tags": {}, + "time": getattr(point, "time") } - ts = getattr(point, 'time', None) - if not ts: - # No time provided. Use current UTC time. - ts = datetime.utcnow().isoformat() + "+00:00" - elif isinstance(ts, datetime): - if ts.tzinfo is None or ts.tzinfo.utcoffset(ts) is None: - # Assuming naive datetime provided. Format with UTC tz. - ts = ts.isoformat() + "+00:00" - else: - # Assuming localized datetime provided. - ts = ts.isoformat() - # Neither of the above match. Assuming correct string or int. - json_point['time'] = ts - for field in cls._fields: - if field != 'time': - json_point['fields'][field] = getattr(point, field) + json_point['fields'][field] = getattr(point, field) for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) @@ -183,3 +168,6 @@ def _reset_(cls): Reset data storage. """ cls._datapoints = defaultdict(list) + + def _current_timestamp(self): + return datetime.utcnow() diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index ac2872f1..405b5ed8 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -import datetime -import pytz import sys if sys.version_info < (2, 7): import unittest2 as unittest @@ -10,6 +8,7 @@ import warnings import mock +from datetime import datetime, timedelta from influxdb import SeriesHelper, InfluxDBClient from requests.exceptions import ConnectionError @@ -40,17 +39,13 @@ class Meta: TestSeriesHelper.MySeriesHelper = MySeriesHelper - class MySeriesTimeHelper(SeriesHelper): - - class Meta: - client = TestSeriesHelper.client - series_name = 'events.stats.{server_name}' - fields = ['time', 'some_stat'] - tags = ['server_name', 'other_tag'] - bulk_size = 5 - autocommit = True - - TestSeriesHelper.MySeriesTimeHelper = MySeriesTimeHelper + def tearDown(self): + super(TestSeriesHelper, self).tearDown() + TestSeriesHelper.MySeriesHelper._reset_() + self.assertEqual( + TestSeriesHelper.MySeriesHelper._json_body_(), + [], + 'Resetting helper did not empty datapoints.') def test_auto_commit(self): """ @@ -76,24 +71,20 @@ class Meta: AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg') self.assertTrue(fake_write_points.called) - def testSingleSeriesName(self): + @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') + def testSingleSeriesName(self, current_timestamp): """ Tests JSON conversion when there is only one series name. """ - dt = datetime.datetime(2016, 1, 2, 3, 4, 5, 678912) - ts1 = dt - ts2 = "2016-10-11T01:02:03.123456789-04:00" - ts3 = 1234567890123456789 - ts4 = pytz.timezone("Europe/Berlin").localize(dt) - - TestSeriesHelper.MySeriesTimeHelper( - time=ts1, server_name='us.east-1', other_tag='ello', some_stat=159) - TestSeriesHelper.MySeriesTimeHelper( - time=ts2, server_name='us.east-1', other_tag='ello', some_stat=158) - TestSeriesHelper.MySeriesTimeHelper( - time=ts3, server_name='us.east-1', other_tag='ello', some_stat=157) - TestSeriesHelper.MySeriesTimeHelper( - time=ts4, server_name='us.east-1', other_tag='ello', some_stat=156) + current_timestamp.return_value = current_date = datetime.today() + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=159) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=158) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=157) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', some_stat=156) expectation = [ { "measurement": "events.stats.us.east-1", @@ -104,7 +95,7 @@ def testSingleSeriesName(self): "fields": { "some_stat": 159 }, - "time": "2016-01-02T03:04:05.678912+00:00", + "time": current_date, }, { "measurement": "events.stats.us.east-1", @@ -115,7 +106,7 @@ def testSingleSeriesName(self): "fields": { "some_stat": 158 }, - "time": "2016-10-11T01:02:03.123456789-04:00", + "time": current_date, }, { "measurement": "events.stats.us.east-1", @@ -126,7 +117,7 @@ def testSingleSeriesName(self): "fields": { "some_stat": 157 }, - "time": 1234567890123456789, + "time": current_date, }, { "measurement": "events.stats.us.east-1", @@ -137,25 +128,22 @@ def testSingleSeriesName(self): "fields": { "some_stat": 156 }, - "time": "2016-01-02T03:04:05.678912+01:00", + "time": current_date, } ] - rcvd = TestSeriesHelper.MySeriesTimeHelper._json_body_() + rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for one series name: {0}.'.format(rcvd)) - TestSeriesHelper.MySeriesTimeHelper._reset_() - self.assertEqual( - TestSeriesHelper.MySeriesTimeHelper._json_body_(), - [], - 'Resetting helper did not empty datapoints.') - def testSeveralSeriesNames(self): - ''' + @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') + def testSeveralSeriesNames(self, current_timestamp): + """ Tests JSON conversion when there are multiple series names. - ''' + """ + current_timestamp.return_value = current_date = datetime.today() TestSeriesHelper.MySeriesHelper( server_name='us.east-1', some_stat=159, other_tag='ello') TestSeriesHelper.MySeriesHelper( @@ -173,7 +161,8 @@ def testSeveralSeriesNames(self): 'tags': { 'other_tag': 'ello', 'server_name': 'lu.lux' - } + }, + "time": current_date, }, { 'fields': { @@ -183,7 +172,8 @@ def testSeveralSeriesNames(self): 'tags': { 'other_tag': 'ello', 'server_name': 'uk.london' - } + }, + "time": current_date, }, { 'fields': { @@ -193,7 +183,8 @@ def testSeveralSeriesNames(self): 'tags': { 'other_tag': 'ello', 'server_name': 'fr.paris-10' - } + }, + "time": current_date, }, { 'fields': { @@ -203,25 +194,70 @@ def testSeveralSeriesNames(self): 'tags': { 'other_tag': 'ello', 'server_name': 'us.east-1' - } + }, + "time": current_date, } ] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() - for r in rcvd: - self.assertTrue(r.get('time'), - "No time field in received JSON body.") - del(r["time"]) self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for several series names: {0}.' .format(rcvd)) - TestSeriesHelper.MySeriesHelper._reset_() - self.assertEqual( - TestSeriesHelper.MySeriesHelper._json_body_(), - [], - 'Resetting helper did not empty datapoints.') + + @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') + def testSeriesWithoutTimeField(self, current_timestamp): + """ + Tests that time is optional on a series without a time field. + """ + current_date = datetime.today() + yesterday = current_date - timedelta(days=1) + current_timestamp.return_value = yesterday + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', + some_stat=159, time=current_date + ) + TestSeriesHelper.MySeriesHelper( + server_name='us.east-1', other_tag='ello', + some_stat=158, + ) + point1, point2 = TestSeriesHelper.MySeriesHelper._json_body_() + self.assertTrue('time' in point1 and 'time' in point2) + self.assertEqual(point1['time'], current_date) + self.assertEqual(point2['time'], yesterday) + + @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') + def testSeriesWithTimeField(self, current_timestamp): + """ + Test that time is optional on a series with a time field. + """ + current_date = datetime.today() + yesterday = current_date - timedelta(days=1) + current_timestamp.return_value = yesterday + + class MyTimeFieldSeriesHelper(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['some_stat', 'time'] + tags = ['server_name', 'other_tag'] + bulk_size = 5 + autocommit = True + + MyTimeFieldSeriesHelper( + server_name='us.east-1', other_tag='ello', + some_stat=159, time=current_date + ) + MyTimeFieldSeriesHelper( + server_name='us.east-1', other_tag='ello', + some_stat=158, + ) + point1, point2 = MyTimeFieldSeriesHelper._json_body_() + self.assertTrue('time' in point1 and 'time' in point2) + self.assertEqual(point1['time'], current_date) + self.assertEqual(point2['time'], yesterday) def testInvalidHelpers(self): ''' diff --git a/test-requirements.txt b/test-requirements.txt index 9e18b7d2..cbc6add3 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,5 +1,4 @@ nose nose-cov mock -requests-mock -pytz +requests-mock \ No newline at end of file From f3acbd85207856c035de323d8833410d0658cda4 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Wed, 20 Apr 2016 23:53:38 -0400 Subject: [PATCH 318/536] disable py26 tests --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e78fce96..22c366ad 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,9 @@ language: python env: - - TOX_ENV=py26 +# - TOX_ENV=py26 +# If someone opens a PR to re-enable py26 tests, I will merge it. +# However, that cannot be at the cost of disabling pandas tests in other +# environments. - TOX_ENV=py27 # - TOX_ENV=py32 # Disabling py32 tests until the following issue is fixed: From 103c7dc85607b3bb157af4cca4972c006bae7780 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 20 Apr 2016 23:57:12 -0400 Subject: [PATCH 319/536] Updated influxdb version --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 22c366ad..01600606 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,10 +20,10 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.6.1_amd64.deb - - dpkg -x influxdb_*_amd64.deb influxdb_install + - wget https://s3.amazonaws.com/influxdb/influxdb_0.12.1-1_amd64.deb + - dpkg -x influxdb*.deb influxdb_install script: - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.6.1/influxd + - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/usr/bin/influxd - travis_wait 30 tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi From 3322fde5ba5cd6b6ed02ff541588af29e47ed59b Mon Sep 17 00:00:00 2001 From: aviau Date: Thu, 21 Apr 2016 00:01:06 -0400 Subject: [PATCH 320/536] simplify config template --- .../tests/server_tests/influxdb.conf.template | 52 ------------------- 1 file changed, 52 deletions(-) diff --git a/influxdb/tests/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template index 1e9ddf31..9a289635 100644 --- a/influxdb/tests/server_tests/influxdb.conf.template +++ b/influxdb/tests/server_tests/influxdb.conf.template @@ -2,31 +2,10 @@ dir = "{meta_dir}" hostname = "localhost" bind-address = ":{meta_port}" - retention-autocreate = true - election-timeout = "1s" - heartbeat-timeout = "1s" - leader-lease-timeout = "500ms" - commit-timeout = "50ms" [data] dir = "{data_dir}" wal-dir = "{wal_dir}" - retention-auto-create = true - retention-check-enabled = true - retention-check-period = "10m0s" - retention-create-period = "45m0s" - -[cluster] - shard-writer-timeout = "5s" - -[retention] - enabled = true - check-interval = "10m0s" - -[shard-precreation] - enabled = true - check-interval = "10m0s" - advance-period = "30m0s" [admin] enabled = true @@ -36,24 +15,6 @@ enabled = true bind-address = ":{http_port}" auth-enabled = false - log-enabled = true - write-tracing = false - -[collectd] - enabled = false - bind-address = ":25826" - database = "collectd" - retention-policy = "" - batch-size = 5000 - batch-timeout = "10s" - typesdb = "/usr/share/collectd/types.db" - -[opentsdb] - enabled = false - bind-address = ":4242" - database = "opentsdb" - retention-policy = "" - consistency-level = "one" [[udp]] enabled = {udp_enabled} @@ -65,18 +26,5 @@ [monitor] store-enabled = false -[continuous_queries] - enabled = true - recompute-previous-n = 2 - recompute-no-older-than = "10m0s" - compute-runs-per-interval = 10 - compute-no-more-than = "2m0s" - [hinted-handoff] - enabled = false dir = "{handoff_dir}" - max-size = 1073741824 - max-age = "168h0m0s" - retry-rate-limit = 0 - retry-interval = "1s" - From b9beaeb89df12b985b46b1ab4a7f94c89593c931 Mon Sep 17 00:00:00 2001 From: afelle1 Date: Fri, 13 May 2016 09:32:05 -0400 Subject: [PATCH 321/536] Fixed code and updated tests for issue 320 (#321) Thanks @afelle1 ! --- influxdb/client.py | 8 ++++---- influxdb/tests/client_test.py | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 391d1f85..be28a39e 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -502,7 +502,7 @@ def create_retention_policy(self, name, duration, replication, :type default: bool """ query_string = \ - "CREATE RETENTION POLICY %s ON %s " \ + "CREATE RETENTION POLICY \"%s\" ON \"%s\" " \ "DURATION %s REPLICATION %s" % \ (name, database or self._database, duration, replication) @@ -537,7 +537,7 @@ def alter_retention_policy(self, name, database=None, should be set. Otherwise the operation will fail. """ query_string = ( - "ALTER RETENTION POLICY {0} ON {1}" + "ALTER RETENTION POLICY \"{0}\" ON \"{1}\"" ).format(name, database or self._database) if duration: query_string += " DURATION {0}".format(duration) @@ -558,7 +558,7 @@ def drop_retention_policy(self, name, database=None): :type database: str """ query_string = ( - "DROP RETENTION POLICY {0} ON {1}" + "DROP RETENTION POLICY \"{0}\" ON \"{1}\"" ).format(name, database or self._database) self.query(query_string) @@ -583,7 +583,7 @@ def get_list_retention_policies(self, database=None): u'replicaN': 1}] """ rsp = self.query( - "SHOW RETENTION POLICIES ON %s" % (database or self._database) + "SHOW RETENTION POLICIES ON \"%s\"" % (database or self._database) ) return list(rsp.get_points()) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 39f7b68c..90f6964a 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -571,8 +571,8 @@ def test_create_retention_policy_default(self): self.assertEqual( m.last_request.qs['q'][0], - 'create retention policy somename on ' - 'db duration 1d replication 4 default' + 'create retention policy "somename" on ' + '"db" duration 1d replication 4 default' ) def test_create_retention_policy(self): @@ -590,8 +590,8 @@ def test_create_retention_policy(self): self.assertEqual( m.last_request.qs['q'][0], - 'create retention policy somename on ' - 'db duration 1d replication 4' + 'create retention policy "somename" on ' + '"db" duration 1d replication 4' ) def test_alter_retention_policy(self): @@ -608,14 +608,14 @@ def test_alter_retention_policy(self): duration='4d') self.assertEqual( m.last_request.qs['q'][0], - 'alter retention policy somename on db duration 4d' + 'alter retention policy "somename" on "db" duration 4d' ) # Test alter replication self.cli.alter_retention_policy('somename', 'db', replication=4) self.assertEqual( m.last_request.qs['q'][0], - 'alter retention policy somename on db replication 4' + 'alter retention policy "somename" on "db" replication 4' ) # Test alter default @@ -623,7 +623,7 @@ def test_alter_retention_policy(self): default=True) self.assertEqual( m.last_request.qs['q'][0], - 'alter retention policy somename on db default' + 'alter retention policy "somename" on "db" default' ) @raises(Exception) @@ -644,7 +644,7 @@ def test_drop_retention_policy(self): self.cli.drop_retention_policy('somename', 'db') self.assertEqual( m.last_request.qs['q'][0], - 'drop retention policy somename on db' + 'drop retention policy "somename" on "db"' ) @raises(Exception) From 3e93191a00d4b3d2e0601c416100c4012e158dec Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Wed, 8 Jun 2016 11:39:14 -0400 Subject: [PATCH 322/536] README: help-needed section --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index 081c747c..ddffb92f 100644 --- a/README.rst +++ b/README.rst @@ -2,6 +2,8 @@ InfluxDB-Python is a client for interacting with InfluxDB_. Maintained by @aviau (https://github.com/aviau). +**Help needed:** Development of this library is made by the community and help is needed. To contribute, take a look at the issues list of simply contact @aviau. + .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python From 3b0cdaadef5ed17e4d641bd7c897aa97ffdcdb6d Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Thu, 9 Jun 2016 15:46:15 +0200 Subject: [PATCH 323/536] Change 'an user' to 'a user' (#332) --- influxdb/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index be28a39e..9903ee17 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -671,7 +671,7 @@ def create_user(self, username, password, admin=False): self.query(text) def drop_user(self, username): - """Drop an user from InfluxDB. + """Drop a user from InfluxDB. :param username: the username to drop :type username: str @@ -713,7 +713,7 @@ def delete_series(self, database=None, measurement=None, tags=None): self.query(query_str, database=database) def revoke_admin_privileges(self, username): - """Revoke cluster administration privileges from an user. + """Revoke cluster administration privileges from a user. :param username: the username to revoke privileges from :type username: str @@ -725,7 +725,7 @@ def revoke_admin_privileges(self, username): self.query(text) def grant_privilege(self, privilege, database, username): - """Grant a privilege on a database to an user. + """Grant a privilege on a database to a user. :param privilege: the privilege to grant, one of 'read', 'write' or 'all'. The string is case-insensitive @@ -741,7 +741,7 @@ def grant_privilege(self, privilege, database, username): self.query(text) def revoke_privilege(self, privilege, database, username): - """Revoke a privilege on a database from an user. + """Revoke a privilege on a database from a user. :param privilege: the privilege to revoke, one of 'read', 'write' or 'all'. The string is case-insensitive From d9c5c1cb817247f147ec21790f4c85b91af9e0c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adri=C3=A1n=20L=C3=B3pez?= Date: Thu, 9 Jun 2016 15:49:30 +0200 Subject: [PATCH 324/536] Some exceptions could be raised before sending data to server (#284) (thanks @adrianlzt!) --- influxdb/client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 9903ee17..69209a7f 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -907,6 +907,8 @@ def func(*args, **kwargs): except InfluxDBClientError as e: # Errors caused by user's requests, re-raise raise e + except ValueError as e: + raise e except Exception as e: # Errors that might caused by server failure, try another bad_host = True From 81300de0cc1fb05589a0023e451737caebfb63d2 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Thu, 9 Jun 2016 10:54:04 -0400 Subject: [PATCH 325/536] mention a possible co-maintainer --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index ddffb92f..9d834be2 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ InfluxDB-Python is a client for interacting with InfluxDB_. Maintained by @aviau (https://github.com/aviau). -**Help needed:** Development of this library is made by the community and help is needed. To contribute, take a look at the issues list of simply contact @aviau. +**Help needed:** Development of this library is made by the community and help is needed. A co-maintainer would be welcome. To contribute, take a look at the issues list of simply contact @aviau. .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python From 68c95e5c0df3a9fc7c5e9f32772ac6a06d6b344e Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 15:50:45 +0200 Subject: [PATCH 326/536] Test suite improvements (#333) (Thanks @babilen!) * Import py3 compatibility future statements * Consistent formatting for dictionary literals This makes it easier to see changes to test data in diffs. * Add 'shardGroupDuration' in retention policy tests This information was introduced in InfluxDB 0.10.0 and tests fail without it. * Perform correct default retention policy switching There can only be a single default retention policy per database. * Update InfluxDB version to 0.13.0 This will run tests on travis using 0.13.0 * travis: upgrade wget * Drop Python 2.6 support This removes tests and compatibility code for Python 2.6. * Fix PEP8 errors --- .travis.yml | 10 +- influxdb/__init__.py | 6 + influxdb/_dataframe_client.py | 5 + influxdb/chunked_json.py | 5 + influxdb/client.py | 4 + influxdb/dataframe_client.py | 4 + influxdb/exceptions.py | 6 + influxdb/helper.py | 5 + influxdb/influxdb08/__init__.py | 6 + influxdb/influxdb08/chunked_json.py | 5 + influxdb/influxdb08/dataframe_client.py | 5 + influxdb/influxdb08/helper.py | 5 + influxdb/line_protocol.py | 4 + influxdb/resultset.py | 5 + influxdb/tests/__init__.py | 11 +- influxdb/tests/chunked_json_test.py | 10 +- influxdb/tests/client_test.py | 11 +- influxdb/tests/dataframe_client_test.py | 11 +- influxdb/tests/helper_test.py | 11 +- influxdb/tests/influxdb08/client_test.py | 6 +- .../tests/influxdb08/dataframe_client_test.py | 6 +- influxdb/tests/influxdb08/helper_test.py | 6 +- influxdb/tests/misc.py | 5 + influxdb/tests/resultset_test.py | 11 +- influxdb/tests/server_tests/base.py | 5 + .../server_tests/client_test_with_server.py | 141 +++++++++++++----- .../tests/server_tests/influxdb_instance.py | 8 +- influxdb/tests/test_line_protocol.py | 10 +- tox.ini | 3 +- 29 files changed, 232 insertions(+), 98 deletions(-) diff --git a/.travis.yml b/.travis.yml index 01600606..1aeb0949 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,9 @@ language: python +addons: + apt: + packages: + - wget env: -# - TOX_ENV=py26 -# If someone opens a PR to re-enable py26 tests, I will merge it. -# However, that cannot be at the cost of disabling pandas tests in other -# environments. - TOX_ENV=py27 # - TOX_ENV=py32 # Disabling py32 tests until the following issue is fixed: @@ -20,7 +20,7 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://s3.amazonaws.com/influxdb/influxdb_0.12.1-1_amd64.deb + - wget https://dl.influxdata.com/influxdb/releases/influxdb_0.13.0_amd64.deb - dpkg -x influxdb*.deb influxdb_install script: - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/usr/bin/influxd diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 0f1c26d4..2269f59d 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -1,4 +1,10 @@ # -*- coding: utf-8 -*- + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from .client import InfluxDBClient from .client import InfluxDBClusterClient from .dataframe_client import DataFrameClient diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 73a01745..2e129bae 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -2,6 +2,11 @@ """ DataFrame client for InfluxDB """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import math import pandas as pd diff --git a/influxdb/chunked_json.py b/influxdb/chunked_json.py index 50d304f1..ae748e51 100644 --- a/influxdb/chunked_json.py +++ b/influxdb/chunked_json.py @@ -5,6 +5,11 @@ # Source: https://gist.github.com/sampsyo/920215 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import json _decoder = json.JSONDecoder() diff --git a/influxdb/client.py b/influxdb/client.py index 69209a7f..bb1a1123 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -2,6 +2,10 @@ """ Python client for InfluxDB """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals from functools import wraps import json diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 41a6796c..2d90fda3 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -2,6 +2,10 @@ """ DataFrame client for InfluxDB """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals __all__ = ['DataFrameClient'] diff --git a/influxdb/exceptions.py b/influxdb/exceptions.py index cad325c6..6860f420 100644 --- a/influxdb/exceptions.py +++ b/influxdb/exceptions.py @@ -1,3 +1,9 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + + class InfluxDBClientError(Exception): """Raised when an error occurs in the request.""" def __init__(self, content, code=None): diff --git a/influxdb/helper.py b/influxdb/helper.py index 7f64de46..0bae6f92 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -2,6 +2,11 @@ """ Helper class for InfluxDB """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from collections import namedtuple, defaultdict from datetime import datetime from warnings import warn diff --git a/influxdb/influxdb08/__init__.py b/influxdb/influxdb08/__init__.py index 6ba218e3..c718cdce 100644 --- a/influxdb/influxdb08/__init__.py +++ b/influxdb/influxdb08/__init__.py @@ -1,4 +1,10 @@ # -*- coding: utf-8 -*- + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from .client import InfluxDBClient from .dataframe_client import DataFrameClient from .helper import SeriesHelper diff --git a/influxdb/influxdb08/chunked_json.py b/influxdb/influxdb08/chunked_json.py index 50d304f1..ae748e51 100644 --- a/influxdb/influxdb08/chunked_json.py +++ b/influxdb/influxdb08/chunked_json.py @@ -5,6 +5,11 @@ # Source: https://gist.github.com/sampsyo/920215 # +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import json _decoder = json.JSONDecoder() diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index 87ddba8c..28173e6e 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -2,6 +2,11 @@ """ DataFrame client for InfluxDB """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import math import warnings diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py index 734ac960..e5827fa1 100644 --- a/influxdb/influxdb08/helper.py +++ b/influxdb/influxdb08/helper.py @@ -2,6 +2,11 @@ """ Helper class for InfluxDB """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from collections import namedtuple, defaultdict from warnings import warn diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index da0e83ec..4d3dc544 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -1,4 +1,8 @@ # -*- coding: utf-8 -*- + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function from __future__ import unicode_literals from calendar import timegm diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 8ec7091e..c44ac908 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,5 +1,10 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import warnings from influxdb.exceptions import InfluxDBClientError diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py index dc1bb22c..9dadbb05 100644 --- a/influxdb/tests/__init__.py +++ b/influxdb/tests/__init__.py @@ -1,11 +1,14 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import sys import os -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest + +import unittest using_pypy = hasattr(sys, "pypy_version_info") skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") diff --git a/influxdb/tests/chunked_json_test.py b/influxdb/tests/chunked_json_test.py index 6b0f9e65..48e3a736 100644 --- a/influxdb/tests/chunked_json_test.py +++ b/influxdb/tests/chunked_json_test.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -import sys +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals from influxdb import chunked_json -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest class TestChunkJson(unittest.TestCase): diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 90f6964a..b8c106df 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -14,6 +14,11 @@ See client_test_with_server.py for tests against a running server instance. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import json import requests import requests.exceptions @@ -25,11 +30,7 @@ from mock import patch import warnings import mock -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest from influxdb import InfluxDBClient, InfluxDBClusterClient from influxdb.client import InfluxDBServerError diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 721d1aeb..170617a2 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -2,13 +2,14 @@ """ unit tests for misc module """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + from .client_test import _mocked_session -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest import json import requests_mock from nose.tools import raises diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 405b5ed8..10546286 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import unittest import warnings import mock diff --git a/influxdb/tests/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py index 87328c5a..bcf1702d 100644 --- a/influxdb/tests/influxdb08/client_test.py +++ b/influxdb/tests/influxdb08/client_test.py @@ -7,10 +7,7 @@ import requests.exceptions import socket import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest import requests_mock import random from nose.tools import raises @@ -21,7 +18,6 @@ from influxdb.influxdb08 import InfluxDBClient from influxdb.influxdb08.client import session -import sys if sys.version < '3': import codecs diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py index 6ff4724f..63a10c93 100644 --- a/influxdb/tests/influxdb08/dataframe_client_test.py +++ b/influxdb/tests/influxdb08/dataframe_client_test.py @@ -4,11 +4,7 @@ """ from .client_test import _mocked_session -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest import json import requests_mock from nose.tools import raises diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py index 3a84c849..e744d1e4 100644 --- a/influxdb/tests/influxdb08/helper_test.py +++ b/influxdb/tests/influxdb08/helper_test.py @@ -1,10 +1,6 @@ # -*- coding: utf-8 -*- -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest import warnings import mock diff --git a/influxdb/tests/misc.py b/influxdb/tests/misc.py index 6a3857b0..7dffc219 100644 --- a/influxdb/tests/misc.py +++ b/influxdb/tests/misc.py @@ -1,5 +1,10 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import socket diff --git a/influxdb/tests/resultset_test.py b/influxdb/tests/resultset_test.py index e65bad3c..4717b88c 100644 --- a/influxdb/tests/resultset_test.py +++ b/influxdb/tests/resultset_test.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import unittest from influxdb.exceptions import InfluxDBClientError from influxdb.resultset import ResultSet diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py index 7bd17eaf..3566d7ba 100644 --- a/influxdb/tests/server_tests/base.py +++ b/influxdb/tests/server_tests/base.py @@ -1,5 +1,10 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + import sys from influxdb.tests import using_pypy diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 78bc1fc2..9bad5e1b 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -9,17 +9,15 @@ but without mocking around every call. """ - +from __future__ import absolute_import +from __future__ import division from __future__ import print_function +from __future__ import unicode_literals from functools import partial import os import time -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest import warnings # By default, raise exceptions on warnings @@ -341,8 +339,12 @@ def test_write_points_check_read(self): self.assertEqual( list(rsp), - [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z', - "host": "server01", "region": "us-west"}]] + [[ + {'value': 0.64, + 'time': '2009-11-10T23:00:00Z', + "host": "server01", + "region": "us-west"} + ]] ) rsp2 = list(rsp.get_points()) @@ -351,8 +353,10 @@ def test_write_points_check_read(self): self.assertEqual( pt, - {'time': '2009-11-10T23:00:00Z', 'value': 0.64, - "host": "server01", "region": "us-west"} + {'time': '2009-11-10T23:00:00Z', + 'value': 0.64, + "host": "server01", + "region": "us-west"} ) @unittest.skip("Broken as of 0.9.0") @@ -382,8 +386,12 @@ def test_write_multiple_points_different_series(self): lrsp = list(rsp) self.assertEqual( - [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z', - "host": "server01", "region": "us-west"}]], + [[ + {'value': 0.64, + 'time': '2009-11-10T23:00:00Z', + "host": "server01", + "region": "us-west"} + ]], lrsp ) @@ -391,8 +399,12 @@ def test_write_multiple_points_different_series(self): self.assertEqual( rsp, - [[{'value': 33, 'time': '2009-11-10T23:01:35Z', - "host": "server01", "region": "us-west"}]] + [[ + {'value': 33, + 'time': '2009-11-10T23:01:35Z', + "host": "server01", + "region": "us-west"} + ]] ) @unittest.skip("Broken as of 0.9.0") @@ -510,6 +522,7 @@ def test_default_retention_policy(self): {'name': 'default', 'duration': '0', 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', 'default': True} ], rsp @@ -525,14 +538,17 @@ def test_create_retention_policy_default(self): {'duration': '0', 'default': False, 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', 'name': 'default'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 1, + 'shardGroupDuration': u'1h0m0s', 'name': 'somename'}, {'duration': '48h0m0s', 'default': False, 'replicaN': 1, + 'shardGroupDuration': u'24h0m0s', 'name': 'another'} ], rsp @@ -542,10 +558,18 @@ def test_create_retention_policy(self): self.cli.create_retention_policy('somename', '1d', 1) rsp = self.cli.get_list_retention_policies() self.assertEqual( - [{'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}, - {'duration': '24h0m0s', 'default': False, - 'replicaN': 1, 'name': 'somename'}], + [ + {'duration': '0', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'default'}, + {'duration': '24h0m0s', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'1h0m0s', + 'name': 'somename'} + ], rsp ) @@ -557,10 +581,18 @@ def test_alter_retention_policy(self): duration='4d') rsp = self.cli.get_list_retention_policies() self.assertEqual( - [{'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}, - {'duration': '96h0m0s', 'default': False, - 'replicaN': 1, 'name': 'somename'}], + [ + {'duration': '0', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'default'}, + {'duration': '96h0m0s', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'24h0m0s', + 'name': 'somename'} + ], rsp ) @@ -569,10 +601,18 @@ def test_alter_retention_policy(self): replication=4) rsp = self.cli.get_list_retention_policies() self.assertEqual( - [{'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}, - {'duration': '96h0m0s', 'default': False, - 'replicaN': 4, 'name': 'somename'}], + [ + {'duration': '0', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'default'}, + {'duration': '96h0m0s', + 'default': False, + 'replicaN': 4, + 'shardGroupDuration': u'24h0m0s', + 'name': 'somename'} + ], rsp ) @@ -581,10 +621,18 @@ def test_alter_retention_policy(self): default=True) rsp = self.cli.get_list_retention_policies() self.assertEqual( - [{'duration': '0', 'default': False, - 'replicaN': 1, 'name': 'default'}, - {'duration': '96h0m0s', 'default': True, - 'replicaN': 4, 'name': 'somename'}], + [ + {'duration': '0', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'default'}, + {'duration': '96h0m0s', + 'default': True, + 'replicaN': 4, + 'shardGroupDuration': u'24h0m0s', + 'name': 'somename'} + ], rsp ) @@ -597,10 +645,18 @@ def test_alter_retention_policy_invalid(self): ctx.exception.content) rsp = self.cli.get_list_retention_policies() self.assertEqual( - [{'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}, - {'duration': '24h0m0s', 'default': False, - 'replicaN': 1, 'name': 'somename'}], + [ + {'duration': '0', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'default'}, + {'duration': '24h0m0s', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'1h0m0s', + 'name': 'somename'} + ], rsp ) @@ -611,8 +667,13 @@ def test_drop_retention_policy(self): self.cli.drop_retention_policy('somename', 'db') rsp = self.cli.get_list_retention_policies() self.assertEqual( - [{'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}], + [ + {'duration': '0', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'default'} + ], rsp ) @@ -717,7 +778,11 @@ def test_write_points_udp(self): self.assertEqual( # this is dummy_points : - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z', - "host": "server01", "region": "us-west"}], + [ + {'value': 0.64, + 'time': '2009-11-10T23:00:00Z', + "host": "server01", + "region": "us-west"} + ], list(rsp['cpu_load_short']) ) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 1b662589..59cbcd4f 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -1,6 +1,9 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division from __future__ import print_function +from __future__ import unicode_literals import datetime import os @@ -10,10 +13,7 @@ import shutil import subprocess import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest from influxdb.tests.misc import is_port_open, get_free_ports diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 1100dc49..2a95c4db 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function from __future__ import unicode_literals -import sys -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest +import unittest from influxdb import line_protocol diff --git a/tox.ini b/tox.ini index e2cb83c9..ddc15514 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py34, py27, py26, pypy, flake8 +envlist = py34, py27, pypy, flake8 [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH @@ -7,7 +7,6 @@ setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt py27,py32,py33,py34,py26: pandas - py26: unittest2 # Only install pandas with non-pypy interpreters commands = nosetests -v --with-doctest {posargs} From 4e06277b203cd13433367b857e3f5f78d19d95b8 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 16:31:04 +0200 Subject: [PATCH 327/536] Creating a database multiple times is a no-op Creating a database multiple times does no longer throw an error and passing 'IF NOT EXISTS' to CREATE DATABASE does nothing at the moment and will be deprecated in 1.0. --- influxdb/client.py | 7 ++----- influxdb/tests/client_test.py | 13 ------------- .../tests/server_tests/client_test_with_server.py | 12 ------------ 3 files changed, 2 insertions(+), 30 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index bb1a1123..15957ce3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -465,16 +465,13 @@ def get_list_database(self): """ return list(self.query("SHOW DATABASES").get_points()) - def create_database(self, dbname, if_not_exists=False): + def create_database(self, dbname): """Create a new database in InfluxDB. :param dbname: the name of the database to create :type dbname: str """ - if if_not_exists: - self.query("CREATE DATABASE IF NOT EXISTS \"%s\"" % dbname) - else: - self.query("CREATE DATABASE \"%s\"" % dbname) + self.query("CREATE DATABASE \"%s\"" % dbname) def drop_database(self, dbname): """Drop a database from InfluxDB. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index b8c106df..a674f265 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -418,19 +418,6 @@ def test_create_database(self): 'create database "new_db"' ) - def test_create_database_with_exist_check(self): - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.GET, - "http://localhost:8086/query", - text='{"results":[{}]}' - ) - self.cli.create_database('new_db', if_not_exists=True) - self.assertEqual( - m.last_request.qs['q'][0], - 'create database if not exists "new_db"' - ) - def test_create_numeric_named_database(self): with requests_mock.Mocker() as m: m.register_uri( diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 9bad5e1b..4af850e8 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -133,18 +133,6 @@ def test_create_database(self): [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) - def test_create_database_twice_if_not_exist(self): - self.assertIsNone(self.cli.create_database('new_db')) - self.assertIsNone( - self.cli.create_database('new_db', if_not_exists=True)) - - def test_create_database_twice_fails(self): - self.assertIsNone(self.cli.create_database('new_db')) - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.create_database('new_db') - self.assertEqual('database already exists', - ctx.exception.content) - def test_get_list_series_empty(self): rsp = self.cli.get_list_series() self.assertEqual([], rsp) From f7a5158d21402ecd63c3acd39e5d909e6b785697 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 16:34:33 +0200 Subject: [PATCH 328/536] Non-existent databases can be dropped InfluxDB no longer raises an error when dropping a non-existing database. --- influxdb/tests/client_test.py | 6 ------ influxdb/tests/server_tests/client_test_with_server.py | 6 ------ 2 files changed, 12 deletions(-) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index a674f265..9598eff2 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -462,12 +462,6 @@ def test_drop_numeric_named_database(self): 'drop database "123"' ) - @raises(Exception) - def test_drop_database_fails(self): - cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') - with _mocked_session(cli, 'delete', 401): - cli.drop_database('old_db') - def test_get_list_database(self): data = {'results': [ {'series': [ diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 4af850e8..ef1293f2 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -147,12 +147,6 @@ def test_drop_database(self): self.assertIsNone(self.cli.drop_database('new_db_1')) self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) - def test_drop_database_fails(self): - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.drop_database('db') - self.assertIn('database not found: db', - ctx.exception.content) - def test_query_fail(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.query('select column_one from foo') From bc456b38c647a7940eb91ae344833aee3c6651d2 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 16:40:44 +0200 Subject: [PATCH 329/536] Usernames with spaces no longer cause problems Apparently a username with spaces in name does no longer cause problems. --- influxdb/tests/server_tests/client_test_with_server.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index ef1293f2..9ad38be2 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -199,16 +199,6 @@ def test_create_user_blank_username(self): rsp = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(rsp, []) - def test_create_user_invalid_username(self): - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.create_user('very invalid', 'secret_password') - self.assertEqual(400, ctx.exception.code) - self.assertIn('{"error":"error parsing query: ' - 'found invalid, expected WITH', - ctx.exception.content) - rsp = list(self.cli.query("SHOW USERS")['results']) - self.assertEqual(rsp, []) - def test_drop_user(self): self.cli.query("CREATE USER test WITH PASSWORD 'test'") self.cli.drop_user('test') From 59e233b5853613e1111191cbbe0fec4bcfd77cd3 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 17:07:21 +0200 Subject: [PATCH 330/536] Remove get_list_series SHOW SERIES returns data in line format since 0.11 and the current functionality is broken. We have to implement a proper line format parser before we can reintroduce this functionality. --- influxdb/_dataframe_client.py | 14 ------ influxdb/client.py | 29 ------------- influxdb/tests/client_test.py | 21 --------- influxdb/tests/dataframe_client_test.py | 43 ------------------- .../server_tests/client_test_with_server.py | 43 ------------------- 5 files changed, 150 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 2e129bae..2980be90 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -90,20 +90,6 @@ def query(self, query, chunked=False, database=None): else: return results - def get_list_series(self, database=None): - """ - Get the list of series, in DataFrame - - """ - results = super(DataFrameClient, self)\ - .query("SHOW SERIES", database=database) - if len(results): - return dict( - (key[0], pd.DataFrame(data)) for key, data in results.items() - ) - else: - return {} - def _to_dataframe(self, rs): result = {} if isinstance(rs, list): diff --git a/influxdb/client.py b/influxdb/client.py index 15957ce3..b1c7c976 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -588,35 +588,6 @@ def get_list_retention_policies(self, database=None): ) return list(rsp.get_points()) - def get_list_series(self, database=None): - """Get the list of series for a database. - - :param database: the name of the database, defaults to the client's - current database - :type database: str - :returns: all series in the specified database - :rtype: list of dictionaries - - :Example: - - >> series = client.get_list_series('my_database') - >> series - [{'name': u'cpu_usage', - 'tags': [{u'_id': 1, - u'host': u'server01', - u'region': u'us-west'}]}] - """ - rsp = self.query("SHOW SERIES", database=database) - series = [] - for serie in rsp.items(): - series.append( - { - "name": serie[0][0], - "tags": list(serie[1]) - } - ) - return series - def get_list_servers(self): """Get the list of servers in InfluxDB cluster. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 9598eff2..f1adaa9b 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -517,27 +517,6 @@ def test_get_list_servers_fails(self): with _mocked_session(cli, 'get', 401): cli.get_list_servers() - def test_get_list_series(self): - example_response = \ - '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \ - '["_id", "host", "region"], "values": ' \ - '[[1, "server01", "us-west"]]}]}]}' - - with requests_mock.Mocker() as m: - m.register_uri( - requests_mock.GET, - "http://localhost:8086/query", - text=example_response - ) - - self.assertListEqual( - self.cli.get_list_series(), - [{'name': 'cpu_load_short', - 'tags': [ - {'host': 'server01', '_id': 1, 'region': 'us-west'} - ]}] - ) - def test_create_retention_policy_default(self): example_response = '{"results":[{}]}' diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 170617a2..5b868d14 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -301,49 +301,6 @@ def test_query_with_empty_result(self): result = cli.query('select column_one from foo;') self.assertEqual(result, {}) - def test_list_series(self): - response = { - 'results': [ - {'series': [ - { - 'columns': ['host'], - 'measurement': 'cpu', - 'values': [ - ['server01']] - }, - { - 'columns': [ - 'host', - 'region' - ], - 'measurement': 'network', - 'values': [ - [ - 'server01', - 'us-west' - ], - [ - 'server01', - 'us-east' - ] - ] - } - ]} - ] - } - - expected = { - 'cpu': pd.DataFrame([['server01']], columns=['host']), - 'network': pd.DataFrame( - [['server01', 'us-west'], ['server01', 'us-east']], - columns=['host', 'region'])} - - cli = DataFrameClient('host', 8086, 'username', 'password', 'db') - with _mocked_session(cli, 'GET', 200, response): - series = cli.get_list_series() - assert_frame_equal(series['cpu'], expected['cpu']) - assert_frame_equal(series['network'], expected['network']) - def test_get_list_database(self): data = {'results': [ {'series': [ diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 9ad38be2..9d061bab 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -133,15 +133,6 @@ def test_create_database(self): [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) - def test_get_list_series_empty(self): - rsp = self.cli.get_list_series() - self.assertEqual([], rsp) - - @unittest.skip("Broken as of 0.9.0") - def test_get_list_series_empty_DF(self): - rsp = self.cliDF.get_list_series() - self.assertEqual({}, rsp) - def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) @@ -449,44 +440,10 @@ def test_query_chunked(self): del example_object # TODO ? - def test_get_list_series_and_delete(self): - self.cli.write_points(dummy_point) - rsp = self.cli.get_list_series() - self.assertEqual( - [ - {'name': 'cpu_load_short', - 'tags': [ - {'host': 'server01', - 'region': 'us-west', - '_key': - 'cpu_load_short,host=server01,region=us-west'}]} - ], - rsp - ) - def test_delete_series_invalid(self): with self.assertRaises(InfluxDBClientError): self.cli.delete_series() - def test_delete_series(self): - self.assertEqual(len(self.cli.get_list_series()), 0) - self.cli.write_points(dummy_points) - self.assertEqual(len(self.cli.get_list_series()), 2) - self.cli.delete_series(measurement='cpu_load_short') - self.assertEqual(len(self.cli.get_list_series()), 1) - self.cli.delete_series(tags={'region': 'us-west'}) - self.assertEqual(len(self.cli.get_list_series()), 0) - - @unittest.skip("Broken as of 0.9.0") - def test_get_list_series_DF(self): - self.cli.write_points(dummy_point) - rsp = self.cliDF.get_list_series() - - expected = pd.DataFrame( - [[1, 'server01', 'us-west']], - columns=['_id', 'host', 'region']) - assert_frame_equal(rsp['cpu_load_short'], expected) - def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( From 60e4054e1ce1c4ff1e52931679ab21e8ac59a841 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 17:34:05 +0200 Subject: [PATCH 331/536] Fix test_create_user_blank_username Creating a user with an empty password now returns a more expressive error message. --- influxdb/tests/server_tests/client_test_with_server.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 9bad5e1b..51ce6a0d 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -210,9 +210,7 @@ def test_get_list_users(self): def test_create_user_blank_username(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_user('', 'secret_password') - self.assertEqual(400, ctx.exception.code) - self.assertIn('{"error":"error parsing query: ' - 'found WITH, expected identifier', + self.assertIn('username required', ctx.exception.content) rsp = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(rsp, []) From a99d800d6935d76fe29ce714c1b8b6890a01edec Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Mon, 13 Jun 2016 17:49:53 +0200 Subject: [PATCH 332/536] Allow for dropping of the default retention policy This no longer raises an error and simply results in no retention policy being set for the database. --- influxdb/tests/server_tests/client_test_with_server.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index fba5aa1b..560c506a 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -604,16 +604,6 @@ def test_drop_retention_policy(self): rsp ) - def test_drop_retention_policy_default(self): - # Test drop default retention - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.drop_retention_policy('default', 'db') - - self.assertEqual(400, ctx.exception.code) - self.assertIn( - '{"error":"error parsing query: found DEFAULT, expected POLICY', - ctx.exception.content) - def test_issue_143(self): pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ From f7231276b66c989d99e236da9bf4367c62c03c55 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Thu, 9 Jun 2016 09:04:00 +0200 Subject: [PATCH 333/536] Add grant_admin_privileges() to InfluxDBClient This allows users to easily alter the 'admin' attribute of a user account and complements the already existing 'revove_admin_privileges' method. --- influxdb/client.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index b1c7c976..4192614a 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -684,6 +684,18 @@ def delete_series(self, database=None, measurement=None, tags=None): for k, v in tags.items()]) self.query(query_str, database=database) + def grant_admin_privileges(self, username): + """Grant cluster administration privileges to a user. + + :param username: the username to grant privileges to + :type username: str + + .. note:: Only a cluster administrator can create/drop databases + and manage users. + """ + text = "GRANT ALL PRIVILEGES TO {0}".format(username) + self.query(text) + def revoke_admin_privileges(self, username): """Revoke cluster administration privileges from a user. From df4ee82642b8bdc043b21682f12e992c34e40fed Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Thu, 9 Jun 2016 10:33:57 +0200 Subject: [PATCH 334/536] Add tests for grant_admin_privileges() These tests simply check for correct query generation and exception handling if used incorrectly by passing in an empty string as username. --- influxdb/tests/client_test.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index f1adaa9b..dabf8a8a 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -710,6 +710,28 @@ def test_get_list_users_empty(self): self.assertListEqual(self.cli.get_list_users(), []) + def test_grant_admin_privileges(self): + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.cli.grant_admin_privileges('test') + + self.assertEqual( + m.last_request.qs['q'][0], + 'grant all privileges to test' + ) + + @raises(Exception) + def test_grant_admin_privileges_invalid(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + self.cli.grant_admin_privileges('') + def test_revoke_admin_privileges(self): example_response = '{"results":[{}]}' From a1287be9b59c0a3addda8117ecb902631bcf1862 Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Wed, 8 Jun 2016 11:24:58 +0200 Subject: [PATCH 335/536] Add get_list_privileges() to InfluxDBClient This allows library users to get a list of all privileges an InfluxDB user has been granted. --- influxdb/client.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index b1c7c976..e7dedd69 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -728,6 +728,28 @@ def revoke_privilege(self, privilege, database, username): username) self.query(text) + def get_list_privileges(self, username): + """Get the list of all privileges granted to given user. + + :param username: the username to get privileges of + :type username: str + + :returns: all privileges granted to given user + :rtype: list of dictionaries + + :Example: + + :: + + >> privileges = client.get_list_privileges('user1') + >> privileges + [{u'privilege': u'WRITE', u'database': u'db1'}, + {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, + {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] + """ + text = "SHOW GRANTS FOR {0}".format(username) + return list(self.query(text).get_points()) + def send_packet(self, packet): """Send an UDP packet. From 3742a0ec68b0b4204697ceb36debd6f9dd9725fa Mon Sep 17 00:00:00 2001 From: Wolodja Wentland Date: Thu, 9 Jun 2016 10:48:28 +0200 Subject: [PATCH 336/536] Add tests for get_list_privileges() These test check for correct return value and exception handling in case of incorrect usage. --- influxdb/tests/client_test.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index f1adaa9b..997a0431 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -776,6 +776,31 @@ def test_revoke_privilege_invalid(self): with _mocked_session(cli, 'get', 400): self.cli.revoke_privilege('', 'testdb', 'test') + def test_get_list_privileges(self): + data = {'results': [ + {'series': [ + {'columns': ['database', 'privilege'], + 'values': [ + ['db1', 'READ'], + ['db2', 'ALL PRIVILEGES'], + ['db3', 'NO PRIVILEGES']]} + ]} + ]} + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_privileges('test'), + [{'database': 'db1', 'privilege': 'READ'}, + {'database': 'db2', 'privilege': 'ALL PRIVILEGES'}, + {'database': 'db3', 'privilege': 'NO PRIVILEGES'}] + ) + + @raises(Exception) + def test_get_list_privileges_fails(self): + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 401): + cli.get_list_privileges('test') + def test_invalid_port_fails(self): with self.assertRaises(ValueError): InfluxDBClient('host', '80/redir', 'username', 'password') From 8c343b9c86353a7f0dac3002a84711ce7730f88f Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Mon, 13 Jun 2016 13:36:26 -0400 Subject: [PATCH 337/536] coveralls badge: influxdb -> influxdata --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 9d834be2..d8ba7938 100644 --- a/README.rst +++ b/README.rst @@ -11,8 +11,8 @@ InfluxDB-Python is a client for interacting with InfluxDB_. Maintained by @aviau :target: http://influxdb-python.readthedocs.org/ :alt: Documentation Status -.. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg - :target: https://coveralls.io/r/influxdb/influxdb-python +.. image:: https://img.shields.io/coveralls/influxdata/influxdb-python.svg + :target: https://coveralls.io/r/influxdata/influxdb-python :alt: Coverage .. _readme-about: From 62e9bb8faf80097e1a89c112801a35aa6959e3b3 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Mon, 13 Jun 2016 13:41:55 -0400 Subject: [PATCH 338/536] Updated list of supported python versions --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index d8ba7938..10f8317b 100644 --- a/README.rst +++ b/README.rst @@ -43,7 +43,7 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ============ -The InfluxDB-Python distribution is supported and tested on Python 2.6, 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. +The InfluxDB-Python distribution is supported and tested on Python 2.7, 3.3, 3.4, PyPy and PyPy3. **Note:** Python 3.2 is currently untested. See ``.travis.yml``. From 17c1e1e8f14ab3ed43ba3121b58dac49e0f7d677 Mon Sep 17 00:00:00 2001 From: aviau Date: Sun, 26 Jun 2016 21:46:57 +0200 Subject: [PATCH 339/536] 3.0.0 release --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 2269f59d..ad0f6052 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -19,4 +19,4 @@ ] -__version__ = '2.12.0' +__version__ = '3.0.0' From 38afc0389367d2db7f80da569c45bd8b8e88a079 Mon Sep 17 00:00:00 2001 From: Jin Wook Kim Date: Thu, 28 Jul 2016 11:29:36 +0900 Subject: [PATCH 340/536] Data types I wanted to know supported data types of influxDB. I know it's in docs, but this might help too!! --- examples/tutorial.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/examples/tutorial.py b/examples/tutorial.py index 64e95778..e790ece5 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -19,7 +19,10 @@ def main(host='localhost', port=8086): }, "time": "2009-11-10T23:00:00Z", "fields": { - "value": 0.64 + "Float_value": 0.64, + "Int_value": 3, + "String_value": "Text", + "Bool_value": True } } ] From 6bd549b3589706d96421f51ccea24806ce8b834a Mon Sep 17 00:00:00 2001 From: Greg Aker Date: Fri, 29 Jul 2016 13:08:24 -0500 Subject: [PATCH 341/536] Fix tutorial server data error. Fixes influxdata/influxdb-python#213 --- examples/tutorial_server_data.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/examples/tutorial_server_data.py b/examples/tutorial_server_data.py index b5b26d1c..f9ed3f5a 100644 --- a/examples/tutorial_server_data.py +++ b/examples/tutorial_server_data.py @@ -1,3 +1,5 @@ + +from __future__ import print_function import argparse from influxdb import InfluxDBClient @@ -38,7 +40,8 @@ def main(host='localhost', port=8086, nb_day=15): }, } series.append(pointValues) - print series + + print(series) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) @@ -51,7 +54,7 @@ def main(host='localhost', port=8086, nb_day=15): client.create_database(DBNAME) print("Create a retention policy") - retention_policy = 'awesome_policy' + retention_policy = 'server_data' client.create_retention_policy(retention_policy, '3d', 3, default=True) print("Write points #: {0}".format(total_records)) @@ -59,12 +62,12 @@ def main(host='localhost', port=8086, nb_day=15): time.sleep(2) - query = "SELECT MEAN(value) FROM %s WHERE time > now() - 10d GROUP BY time(500m)" % (metric) - result = client.query(query, database=DBNAME, raw=False) - print (result) + query = "SELECT MEAN(value) FROM {} WHERE time > now() - 10d GROUP BY time(500m)".format(metric) + result = client.query(query, database=DBNAME) + print(result) print("Result: {0}".format(result)) - print("Drop database: " + DBNAME) + print("Drop database: {}".format(DBNAME)) client.drop_database(DBNAME) From ff342dd0d1dc6d5077008aecc47b1d0062aaebdb Mon Sep 17 00:00:00 2001 From: Greg Aker Date: Sat, 30 Jul 2016 00:05:04 -0500 Subject: [PATCH 342/536] Remove get_list_servers() Fix influxdata/influxdb-python#342 ``SHOW_SERVERS`` was removed in influxdata/influxdb#6470 --- influxdb/client.py | 19 ------------------- influxdb/tests/client_test.py | 33 --------------------------------- 2 files changed, 52 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 5e60011c..e65254a3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -588,25 +588,6 @@ def get_list_retention_policies(self, database=None): ) return list(rsp.get_points()) - def get_list_servers(self): - """Get the list of servers in InfluxDB cluster. - - :returns: all nodes in InfluxDB cluster - :rtype: list of dictionaries - - :Example: - - :: - - >> servers = client.get_list_servers() - >> servers - [{'cluster_addr': 'server01:8088', - 'id': 1, - 'raft': True, - 'raft-leader': True}] - """ - return list(self.query("SHOW SERVERS").get_points()) - def get_list_users(self): """Get the list of all users in InfluxDB. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e602cdff..a50d93ce 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -484,39 +484,6 @@ def test_get_list_database_fails(self): with _mocked_session(cli, 'get', 401): cli.get_list_database() - def test_get_list_servers(self): - data = {'results': [ - {'series': [ - {'columns': ['id', 'cluster_addr', 'raft', 'raft-leader'], - 'values': [ - [1, 'server01:8088', True, True], - [2, 'server02:8088', True, False], - [3, 'server03:8088', True, False]]}]} - ]} - - with _mocked_session(self.cli, 'get', 200, json.dumps(data)): - self.assertListEqual( - self.cli.get_list_servers(), - [{'cluster_addr': 'server01:8088', - 'id': 1, - 'raft': True, - 'raft-leader': True}, - {'cluster_addr': 'server02:8088', - 'id': 2, - 'raft': True, - 'raft-leader': False}, - {'cluster_addr': 'server03:8088', - 'id': 3, - 'raft': True, - 'raft-leader': False}] - ) - - @raises(Exception) - def test_get_list_servers_fails(self): - cli = InfluxDBClient('host', 8086, 'username', 'password') - with _mocked_session(cli, 'get', 401): - cli.get_list_servers() - def test_create_retention_policy_default(self): example_response = '{"results":[{}]}' From 7debaca04591ad34b6d0cf9545a82b40d9abc6b5 Mon Sep 17 00:00:00 2001 From: Greg Aker Date: Wed, 3 Aug 2016 09:10:30 -0500 Subject: [PATCH 343/536] Merge pull request #354 from gaker/remove-cluster-client (Thanks @gaker!) Remove cluster client --- README.rst | 14 --- docs/source/api-documentation.rst | 10 -- influxdb/__init__.py | 2 - influxdb/client.py | 168 ------------------------------ influxdb/tests/client_test.py | 126 +--------------------- 5 files changed, 1 insertion(+), 319 deletions(-) diff --git a/README.rst b/README.rst index 10f8317b..d3a8d775 100644 --- a/README.rst +++ b/README.rst @@ -108,20 +108,6 @@ Here's a basic example (for more see the examples directory):: >>> print("Result: {0}".format(result)) -If you want to connect to a cluster, you could initialize a ``InfluxDBClusterClient``:: - - $ python - - >>> from influxdb import InfluxDBClusterClient - - >>> cc = InfluxDBClusterClient(hosts = [('192.168.0.1', 8086), - ('192.168.0.2', 8086), - ('192.168.0.3', 8086)], - username='root', - password='root', - database='example') - -``InfluxDBClusterClient`` has the same methods as ``InfluxDBClient``, it basically is a proxy to multiple InfluxDBClients. Testing ======= diff --git a/docs/source/api-documentation.rst b/docs/source/api-documentation.rst index c6178fed..d00600e6 100644 --- a/docs/source/api-documentation.rst +++ b/docs/source/api-documentation.rst @@ -45,16 +45,6 @@ These clients are initiated in the same way as the :members: :undoc-members: ------------------------------- -:class:`InfluxDBClusterClient` ------------------------------- - - -.. currentmodule:: influxdb.InfluxDBClusterClient -.. autoclass:: influxdb.InfluxDBClusterClient - :members: - :undoc-members: - ------------------------ :class:`DataFrameClient` ------------------------ diff --git a/influxdb/__init__.py b/influxdb/__init__.py index ad0f6052..fc2e4261 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -6,14 +6,12 @@ from __future__ import unicode_literals from .client import InfluxDBClient -from .client import InfluxDBClusterClient from .dataframe_client import DataFrameClient from .helper import SeriesHelper __all__ = [ 'InfluxDBClient', - 'InfluxDBClusterClient', 'DataFrameClient', 'SeriesHelper', ] diff --git a/influxdb/client.py b/influxdb/client.py index e65254a3..b6bbc72f 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -7,12 +7,8 @@ from __future__ import print_function from __future__ import unicode_literals -from functools import wraps import json import socket -import time -import threading -import random import requests import requests.exceptions from sys import version_info @@ -114,8 +110,6 @@ def __init__(self, 'Accept': 'text/plain' } - # _baseurl, _host and _port are properties to allow InfluxDBClusterClient - # to override them with thread-local variables @property def _baseurl(self): return self._get_baseurl() @@ -753,168 +747,6 @@ def send_packet(self, packet): self.udp_socket.sendto(data, (self._host, self.udp_port)) -class InfluxDBClusterClient(object): - """The :class:`~.InfluxDBClusterClient` is the client for connecting - to a cluster of InfluxDB servers. Each query hits different host from the - list of hosts. - - :param hosts: all hosts to be included in the cluster, each of which - should be in the format (address, port), - e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)]. Defaults to - [('localhost', 8086)] - :type hosts: list of tuples - :param shuffle: whether the queries should hit servers evenly(randomly), - defaults to True - :type shuffle: bool - :param client_base_class: the base class for the cluster client. - This parameter is used to enable the support of different client - types. Defaults to :class:`~.InfluxDBClient` - :param healing_delay: the delay in seconds, counting from last failure of - a server, before re-adding server to the list of working servers. - Defaults to 15 minutes (900 seconds) - """ - - def __init__(self, - hosts=[('localhost', 8086)], - username='root', - password='root', - database=None, - ssl=False, - verify_ssl=False, - timeout=None, - use_udp=False, - udp_port=4444, - shuffle=True, - client_base_class=InfluxDBClient, - healing_delay=900, - ): - self.clients = [self] # Keep it backwards compatible - self.hosts = hosts - self.bad_hosts = [] # Corresponding server has failures in history - self.shuffle = shuffle - self.healing_delay = healing_delay - self._last_healing = time.time() - host, port = self.hosts[0] - self._hosts_lock = threading.Lock() - self._thread_local = threading.local() - self._client = client_base_class(host=host, - port=port, - username=username, - password=password, - database=database, - ssl=ssl, - verify_ssl=verify_ssl, - timeout=timeout, - use_udp=use_udp, - udp_port=udp_port) - for method in dir(client_base_class): - orig_attr = getattr(client_base_class, method, '') - if method.startswith('_') or not callable(orig_attr): - continue - - setattr(self, method, self._make_func(orig_attr)) - - self._client._get_host = self._get_host - self._client._get_port = self._get_port - self._client._get_baseurl = self._get_baseurl - self._update_client_host(self.hosts[0]) - - @staticmethod - def from_DSN(dsn, client_base_class=InfluxDBClient, - shuffle=True, **kwargs): - """Same as :meth:`~.InfluxDBClient.from_DSN`, but supports - multiple servers. - - :param shuffle: whether the queries should hit servers - evenly(randomly), defaults to True - :type shuffle: bool - :param client_base_class: the base class for all clients in the - cluster. This parameter is used to enable the support of - different client types. Defaults to :class:`~.InfluxDBClient` - - :Example: - - :: - - >> cluster = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd\ -@host1:8086,usr:pwd@host2:8086/db_name', timeout=5) - >> type(cluster) - - >> cluster.hosts - [('host1', 8086), ('host2', 8086)] - >> cluster._client - ] - """ - init_args = parse_dsn(dsn) - init_args.update(**kwargs) - init_args['shuffle'] = shuffle - init_args['client_base_class'] = client_base_class - cluster_client = InfluxDBClusterClient(**init_args) - return cluster_client - - def _update_client_host(self, host): - self._thread_local.host, self._thread_local.port = host - self._thread_local.baseurl = "{0}://{1}:{2}".format( - self._client._scheme, - self._client._host, - self._client._port - ) - - def _get_baseurl(self): - return self._thread_local.baseurl - - def _get_host(self): - return self._thread_local.host - - def _get_port(self): - return self._thread_local.port - - def _make_func(self, orig_func): - - @wraps(orig_func) - def func(*args, **kwargs): - now = time.time() - with self._hosts_lock: - if (self.bad_hosts and - self._last_healing + self.healing_delay < now): - h = self.bad_hosts.pop(0) - self.hosts.append(h) - self._last_healing = now - - if self.shuffle: - random.shuffle(self.hosts) - - hosts = self.hosts + self.bad_hosts - - for h in hosts: - bad_host = False - try: - self._update_client_host(h) - return orig_func(self._client, *args, **kwargs) - except InfluxDBClientError as e: - # Errors caused by user's requests, re-raise - raise e - except ValueError as e: - raise e - except Exception as e: - # Errors that might caused by server failure, try another - bad_host = True - with self._hosts_lock: - if h in self.hosts: - self.hosts.remove(h) - self.bad_hosts.append(h) - self._last_healing = now - finally: - with self._hosts_lock: - if not bad_host and h in self.bad_hosts: - self.bad_hosts.remove(h) - self.hosts.append(h) - - raise InfluxDBServerError("InfluxDB: no viable server!") - - return func - - def parse_dsn(dsn): conn_params = urlparse(dsn) init_args = {} diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index a50d93ce..18de20f8 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -23,7 +23,6 @@ import requests import requests.exceptions import socket -import time import requests_mock import random from nose.tools import raises @@ -32,8 +31,7 @@ import mock import unittest -from influxdb import InfluxDBClient, InfluxDBClusterClient -from influxdb.client import InfluxDBServerError +from influxdb import InfluxDBClient def _build_response_object(status_code=200, content=""): @@ -813,125 +811,3 @@ def query(self, raise Exception("Fail Twice") else: return "Success" - - -class TestInfluxDBClusterClient(unittest.TestCase): - - def setUp(self): - # By default, raise exceptions on warnings - warnings.simplefilter('error', FutureWarning) - - self.hosts = [('host1', 8086), ('host2', 8086), ('host3', 8086)] - self.dsn_string = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db' - - def test_init(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - username='username', - password='password', - database='database', - shuffle=False, - client_base_class=FakeClient) - self.assertEqual(3, len(cluster.hosts)) - self.assertEqual(0, len(cluster.bad_hosts)) - self.assertIn((cluster._client._host, - cluster._client._port), cluster.hosts) - - def test_one_server_fails(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - database='database', - shuffle=False, - client_base_class=FakeClient) - self.assertEqual('Success', cluster.query('Fail once')) - self.assertEqual(2, len(cluster.hosts)) - self.assertEqual(1, len(cluster.bad_hosts)) - - def test_two_servers_fail(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - database='database', - shuffle=False, - client_base_class=FakeClient) - self.assertEqual('Success', cluster.query('Fail twice')) - self.assertEqual(1, len(cluster.hosts)) - self.assertEqual(2, len(cluster.bad_hosts)) - - def test_all_fail(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - database='database', - shuffle=True, - client_base_class=FakeClient) - with self.assertRaises(InfluxDBServerError): - cluster.query('Fail') - self.assertEqual(0, len(cluster.hosts)) - self.assertEqual(3, len(cluster.bad_hosts)) - - def test_all_good(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - database='database', - shuffle=True, - client_base_class=FakeClient) - self.assertEqual('Success', cluster.query('')) - self.assertEqual(3, len(cluster.hosts)) - self.assertEqual(0, len(cluster.bad_hosts)) - - def test_recovery(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - database='database', - shuffle=True, - client_base_class=FakeClient) - with self.assertRaises(InfluxDBServerError): - cluster.query('Fail') - self.assertEqual('Success', cluster.query('')) - self.assertEqual(1, len(cluster.hosts)) - self.assertEqual(2, len(cluster.bad_hosts)) - - def test_healing(self): - cluster = InfluxDBClusterClient(hosts=self.hosts, - database='database', - shuffle=True, - healing_delay=1, - client_base_class=FakeClient) - with self.assertRaises(InfluxDBServerError): - cluster.query('Fail') - self.assertEqual('Success', cluster.query('')) - time.sleep(1.1) - self.assertEqual('Success', cluster.query('')) - self.assertEqual(2, len(cluster.hosts)) - self.assertEqual(1, len(cluster.bad_hosts)) - time.sleep(1.1) - self.assertEqual('Success', cluster.query('')) - self.assertEqual(3, len(cluster.hosts)) - self.assertEqual(0, len(cluster.bad_hosts)) - - def test_dsn(self): - cli = InfluxDBClusterClient.from_DSN(self.dsn_string) - self.assertEqual([('host1', 8086), ('host2', 8086)], cli.hosts) - self.assertEqual('http://host1:8086', cli._client._baseurl) - self.assertEqual('uSr', cli._client._username) - self.assertEqual('pWd', cli._client._password) - self.assertEqual('db', cli._client._database) - self.assertFalse(cli._client.use_udp) - - cli = InfluxDBClusterClient.from_DSN('udp+' + self.dsn_string) - self.assertTrue(cli._client.use_udp) - - cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string) - self.assertEqual('https://host1:8086', cli._client._baseurl) - - cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string, - **{'ssl': False}) - self.assertEqual('http://host1:8086', cli._client._baseurl) - - def test_dsn_password_caps(self): - cli = InfluxDBClusterClient.from_DSN( - 'https+influxdb://usr:pWd@host:8086/db') - self.assertEqual('pWd', cli._client._password) - - def test_dsn_mixed_scheme_case(self): - cli = InfluxDBClusterClient.from_DSN( - 'hTTps+inFLUxdb://usr:pWd@host:8086/db') - self.assertEqual('pWd', cli._client._password) - self.assertEqual('https://host:8086', cli._client._baseurl) - - cli = InfluxDBClusterClient.from_DSN( - 'uDP+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') - self.assertTrue(cli._client.use_udp) From 97952ba1bb0858774a2c10c073db9377adfe57a3 Mon Sep 17 00:00:00 2001 From: Oskari Saarenmaa Date: Mon, 15 Aug 2016 13:41:59 +0300 Subject: [PATCH 344/536] Properly quote all identifiers and literals in InfluxDBClient Previously InfluxDBClient tried to quote some of the identifiers (database and usernames, etc), but a number of API calls didn't have any quoting, and the ones that had quoting didn't account for special characters inside the identifiers. Add new `quote_ident()` and `quote_literal()` functions to line_protocol.py and use them consistently in the client. --- influxdb/client.py | 40 ++++++++++--------- influxdb/line_protocol.py | 30 ++++++++++---- influxdb/tests/client_test.py | 8 ++-- .../server_tests/client_test_with_server.py | 15 ------- influxdb/tests/test_line_protocol.py | 12 ++++++ 5 files changed, 60 insertions(+), 45 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index b6bbc72f..81e1c8f8 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -13,7 +13,7 @@ import requests.exceptions from sys import version_info -from influxdb.line_protocol import make_lines +from influxdb.line_protocol import make_lines, quote_ident, quote_literal from influxdb.resultset import ResultSet from .exceptions import InfluxDBClientError from .exceptions import InfluxDBServerError @@ -532,8 +532,8 @@ def alter_retention_policy(self, name, database=None, should be set. Otherwise the operation will fail. """ query_string = ( - "ALTER RETENTION POLICY \"{0}\" ON \"{1}\"" - ).format(name, database or self._database) + "ALTER RETENTION POLICY {0} ON {1}" + ).format(quote_ident(name), quote_ident(database or self._database)) if duration: query_string += " DURATION {0}".format(duration) if replication: @@ -553,8 +553,8 @@ def drop_retention_policy(self, name, database=None): :type database: str """ query_string = ( - "DROP RETENTION POLICY \"{0}\" ON \"{1}\"" - ).format(name, database or self._database) + "DROP RETENTION POLICY {0} ON {1}" + ).format(quote_ident(name), quote_ident(database or self._database)) self.query(query_string) def get_list_retention_policies(self, database=None): @@ -611,8 +611,8 @@ def create_user(self, username, password, admin=False): privileges or not :type admin: boolean """ - text = "CREATE USER \"{0}\" WITH PASSWORD '{1}'".format(username, - password) + text = "CREATE USER {0} WITH PASSWORD {1}".format( + quote_ident(username), quote_literal(password)) if admin: text += ' WITH ALL PRIVILEGES' self.query(text) @@ -623,7 +623,7 @@ def drop_user(self, username): :param username: the username to drop :type username: str """ - text = "DROP USER {0}".format(username) + text = "DROP USER {0}".format(quote_ident(username)) self.query(text) def set_user_password(self, username, password): @@ -634,7 +634,8 @@ def set_user_password(self, username, password): :param password: the new password for the user :type password: str """ - text = "SET PASSWORD FOR {0} = '{1}'".format(username, password) + text = "SET PASSWORD FOR {0} = {1}".format( + quote_ident(username), quote_literal(password)) self.query(text) def delete_series(self, database=None, measurement=None, tags=None): @@ -652,11 +653,12 @@ def delete_series(self, database=None, measurement=None, tags=None): database = database or self._database query_str = 'DROP SERIES' if measurement: - query_str += ' FROM "{0}"'.format(measurement) + query_str += ' FROM {0}'.format(quote_ident(measurement)) if tags: - query_str += ' WHERE ' + ' and '.join(["{0}='{1}'".format(k, v) - for k, v in tags.items()]) + tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v)) + for k, v in tags.items()] + query_str += ' WHERE ' + ' AND '.join(tag_eq_list) self.query(query_str, database=database) def grant_admin_privileges(self, username): @@ -668,7 +670,7 @@ def grant_admin_privileges(self, username): .. note:: Only a cluster administrator can create/drop databases and manage users. """ - text = "GRANT ALL PRIVILEGES TO {0}".format(username) + text = "GRANT ALL PRIVILEGES TO {0}".format(quote_ident(username)) self.query(text) def revoke_admin_privileges(self, username): @@ -680,7 +682,7 @@ def revoke_admin_privileges(self, username): .. note:: Only a cluster administrator can create/ drop databases and manage users. """ - text = "REVOKE ALL PRIVILEGES FROM {0}".format(username) + text = "REVOKE ALL PRIVILEGES FROM {0}".format(quote_ident(username)) self.query(text) def grant_privilege(self, privilege, database, username): @@ -695,8 +697,8 @@ def grant_privilege(self, privilege, database, username): :type username: str """ text = "GRANT {0} ON {1} TO {2}".format(privilege, - database, - username) + quote_ident(database), + quote_ident(username)) self.query(text) def revoke_privilege(self, privilege, database, username): @@ -711,8 +713,8 @@ def revoke_privilege(self, privilege, database, username): :type username: str """ text = "REVOKE {0} ON {1} FROM {2}".format(privilege, - database, - username) + quote_ident(database), + quote_ident(username)) self.query(text) def get_list_privileges(self, username): @@ -734,7 +736,7 @@ def get_list_privileges(self, username): {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] """ - text = "SHOW GRANTS FOR {0}".format(username) + text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points()) def send_packet(self, packet): diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 4d3dc544..9e0f0743 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -53,16 +53,32 @@ def _escape_tag(tag): ) +def quote_ident(value): + return "\"{0}\"".format( + value.replace( + "\\", "\\\\" + ).replace( + "\"", "\\\"" + ).replace( + "\n", "\\n" + ) + ) + + +def quote_literal(value): + return "'{0}'".format( + value.replace( + "\\", "\\\\" + ).replace( + "'", "\\'" + ) + ) + + def _escape_value(value): value = _get_unicode(value) if isinstance(value, text_type) and value != '': - return "\"{0}\"".format( - value.replace( - "\"", "\\\"" - ).replace( - "\n", "\\n" - ) - ) + return quote_ident(value) elif isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' else: diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 18de20f8..84b7eb77 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -688,7 +688,7 @@ def test_grant_admin_privileges(self): self.assertEqual( m.last_request.qs['q'][0], - 'grant all privileges to test' + 'grant all privileges to "test"' ) @raises(Exception) @@ -710,7 +710,7 @@ def test_revoke_admin_privileges(self): self.assertEqual( m.last_request.qs['q'][0], - 'revoke all privileges from test' + 'revoke all privileges from "test"' ) @raises(Exception) @@ -732,7 +732,7 @@ def test_grant_privilege(self): self.assertEqual( m.last_request.qs['q'][0], - 'grant read on testdb to test' + 'grant read on "testdb" to "test"' ) @raises(Exception) @@ -754,7 +754,7 @@ def test_revoke_privilege(self): self.assertEqual( m.last_request.qs['q'][0], - 'revoke read on testdb from test' + 'revoke read on "testdb" from "test"' ) @raises(Exception) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 560c506a..86faf204 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -200,14 +200,6 @@ def test_drop_user_nonexisting(self): self.assertIn('user not found', ctx.exception.content) - def test_drop_user_invalid(self): - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.drop_user('very invalid') - self.assertEqual(400, ctx.exception.code) - self.assertIn('{"error":"error parsing query: ' - 'found invalid, expected', - ctx.exception.content) - @unittest.skip("Broken as of 0.9.0") def test_revoke_admin_privileges(self): self.cli.create_user('test', 'test', admin=True) @@ -217,13 +209,6 @@ def test_revoke_admin_privileges(self): self.assertEqual([{'user': 'test', 'admin': False}], self.cli.get_list_users()) - def test_revoke_admin_privileges_invalid(self): - with self.assertRaises(InfluxDBClientError) as ctx: - self.cli.revoke_admin_privileges('') - self.assertEqual(400, ctx.exception.code) - self.assertIn('{"error":"error parsing query: ', - ctx.exception.content) - def test_grant_privilege(self): self.cli.create_user('test', 'test') self.cli.create_database('testdb') diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 2a95c4db..1d0b377c 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -76,3 +76,15 @@ def test_make_lines_unicode(self): line_protocol.make_lines(data), 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n' ) + + def test_quote_ident(self): + self.assertEqual( + line_protocol.quote_ident(r"""\foo ' bar " Örf"""), + r'''"\\foo ' bar \" Örf"''' + ) + + def test_quote_literal(self): + self.assertEqual( + line_protocol.quote_literal(r"""\foo ' bar " Örf"""), + r"""'\\foo \' bar " Örf'""" + ) From 1343ae9590fbe501eea57ba4d44f052effe8efa6 Mon Sep 17 00:00:00 2001 From: Matthew Bartos Date: Tue, 6 Sep 2016 10:09:29 -0400 Subject: [PATCH 345/536] Dataframe client support for (i) tag columns and (ii) direct conversion to line protocol (#364) * Addressed issues 362 and 363 * Added unit tests for tag columns. All tests working. * Added more comments and docstrings * Rolled back changes to retention policy duration. * Added comments to _dataframe_client. Re-pushing to try and fix travis build. * Try rebuilding without cache * Minor changes to _stringify_dataframe. Added test for numeric precision. * Incorporated fixes from @tzonghao. Fixed docstrings. --- .travis.yml | 10 +- influxdb/_dataframe_client.py | 239 ++++++++++++++++++++++-- influxdb/client.py | 58 ++++-- influxdb/tests/dataframe_client_test.py | 206 ++++++++++++++++++-- 4 files changed, 457 insertions(+), 56 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1aeb0949..868fe5cc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,8 +33,8 @@ notifications: sudo: false # Travis caching -cache: - directories: - - $HOME/.cache/pip -before_cache: - - rm -f $HOME/.cache/pip/log/debug.log +cache: false +# directories: +# - $HOME/.cache/pip +#before_cache: +# - rm -f $HOME/.cache/pip/log/debug.log diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 2980be90..ddae0862 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -35,9 +35,18 @@ class DataFrameClient(InfluxDBClient): EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') - def write_points(self, dataframe, measurement, tags=None, - time_precision=None, database=None, retention_policy=None, - batch_size=None): + def write_points(self, + dataframe, + measurement, + tags=None, + tag_columns=[], + field_columns=[], + time_precision=None, + database=None, + retention_policy=None, + batch_size=None, + protocol='line', + numeric_precision=None): """ Write to multiple time series names. @@ -50,27 +59,67 @@ def write_points(self, dataframe, measurement, tags=None, instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation :type batch_size: int + :param protocol: Protocol for writing data. Either 'line' or 'json'. + :param numeric_precision: Precision for floating point values. + Either None, 'full' or some int, where int is the desired decimal + precision. 'full' preserves full precision for int and float + datatypes. Defaults to None, which preserves 14-15 significant + figures for float and all significant figures for int datatypes. """ if batch_size: - number_batches = int(math.ceil( - len(dataframe) / float(batch_size))) + number_batches = int(math.ceil(len(dataframe) / float(batch_size))) for batch in range(number_batches): start_index = batch * batch_size end_index = (batch + 1) * batch_size - points = self._convert_dataframe_to_json( - dataframe.ix[start_index:end_index].copy(), - measurement, tags, time_precision - ) + if protocol == 'line': + points = self._convert_dataframe_to_lines( + dataframe.ix[start_index:end_index].copy(), + measurement=measurement, + global_tags=tags, + time_precision=time_precision, + tag_columns=tag_columns, + field_columns=field_columns, + numeric_precision=numeric_precision) + else: + points = self._convert_dataframe_to_json( + dataframe.ix[start_index:end_index].copy(), + measurement=measurement, + tags=tags, + time_precision=time_precision, + tag_columns=tag_columns, + field_columns=field_columns) super(DataFrameClient, self).write_points( - points, time_precision, database, retention_policy) + points, + time_precision, + database, + retention_policy, + protocol=protocol) return True else: - points = self._convert_dataframe_to_json( - dataframe, measurement, tags, time_precision - ) + if protocol == 'line': + points = self._convert_dataframe_to_lines( + dataframe, + measurement=measurement, + global_tags=tags, + tag_columns=tag_columns, + field_columns=field_columns, + time_precision=time_precision, + numeric_precision=numeric_precision) + else: + points = self._convert_dataframe_to_json( + dataframe, + measurement=measurement, + tags=tags, + time_precision=time_precision, + tag_columns=tag_columns, + field_columns=field_columns) super(DataFrameClient, self).write_points( - points, time_precision, database, retention_policy) + points, + time_precision, + database, + retention_policy, + protocol=protocol) return True def query(self, query, chunked=False, database=None): @@ -108,7 +157,12 @@ def _to_dataframe(self, rs): result[key] = df return result - def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, + def _convert_dataframe_to_json(self, + dataframe, + measurement, + tags=None, + tag_columns=[], + field_columns=[], time_precision=None): if not isinstance(dataframe, pd.DataFrame): @@ -119,6 +173,15 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') + # Make sure tags and tag columns are correctly typed + tag_columns = tag_columns if tag_columns else [] + field_columns = field_columns if field_columns else [] + tags = tags if tags else {} + # Assume field columns are all columns not included in tag columns + if not field_columns: + field_columns = list( + set(dataframe.columns).difference(set(tag_columns))) + dataframe.index = dataframe.index.to_datetime() if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') @@ -140,13 +203,151 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, points = [ {'measurement': measurement, - 'tags': tags if tags else {}, + 'tags': dict(list(tag.items()) + list(tags.items())), 'fields': rec, - 'time': int(ts.value / precision_factor) - } - for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))] + 'time': int(ts.value / precision_factor)} + for ts, tag, rec in zip(dataframe.index, + dataframe[tag_columns].to_dict('record'), + dataframe[field_columns].to_dict('record')) + ] + + return points + + def _convert_dataframe_to_lines(self, + dataframe, + measurement, + field_columns=[], + tag_columns=[], + global_tags={}, + time_precision=None, + numeric_precision=None): + + if not isinstance(dataframe, pd.DataFrame): + raise TypeError('Must be DataFrame, but type was: {0}.' + .format(type(dataframe))) + if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or + isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): + raise TypeError('Must be DataFrame with DatetimeIndex or \ + PeriodIndex.') + + # Create a Series of columns for easier indexing + column_series = pd.Series(dataframe.columns) + + if field_columns is None: + field_columns = [] + if tag_columns is None: + tag_columns = [] + + # Make sure field_columns and tag_columns are lists + field_columns = list(field_columns) if list(field_columns) else [] + tag_columns = list(tag_columns) if list(tag_columns) else [] + + # If field columns but no tag columns, assume rest of columns are tags + if field_columns and (not tag_columns): + tag_columns = list(column_series[~column_series.isin( + field_columns)]) + + # If no field columns, assume non-tag columns are fields + if not field_columns: + field_columns = list(column_series[~column_series.isin( + tag_columns)]) + + precision_factor = { + "n": 1, + "u": 1e3, + "ms": 1e6, + "s": 1e9, + "m": 1e9 * 60, + "h": 1e9 * 3600, + }.get(time_precision, 1) + + # Make array of timestamp ints + time = ((dataframe.index.to_datetime().values.astype(int) / + precision_factor).astype(int).astype(str)) + + # If tag columns exist, make an array of formatted tag keys and values + if tag_columns: + tag_df = dataframe[tag_columns] + tag_df = self._stringify_dataframe( + tag_df, numeric_precision, datatype='tag') + tags = (',' + ( + (tag_df.columns.values + '=').tolist() + tag_df)).sum(axis=1) + del tag_df + + else: + tags = '' + + # Make an array of formatted field keys and values + field_df = dataframe[field_columns] + field_df = self._stringify_dataframe( + field_df, numeric_precision, datatype='field') + field_df = (field_df.columns.values + '=').tolist() + field_df + field_df[field_df.columns[1:]] = ',' + field_df[field_df.columns[1:]] + fields = field_df.sum(axis=1) + del field_df + + # Add any global tags to formatted tag strings + if global_tags: + global_tags = ','.join(['='.join([tag, global_tags[tag]]) + for tag in global_tags]) + if tag_columns: + tags = tags + ',' + global_tags + else: + tags = ',' + global_tags + + # Generate line protocol string + points = (measurement + tags + ' ' + fields + ' ' + time).tolist() return points + def _stringify_dataframe(self, + dataframe, + numeric_precision, + datatype='field'): + + # Find int and string columns for field-type data + int_columns = dataframe.select_dtypes(include=['integer']).columns + string_columns = dataframe.select_dtypes(include=['object']).columns + + # Convert dataframe to string + if numeric_precision is None: + # If no precision specified, convert directly to string (fast) + dataframe = dataframe.astype(str) + elif numeric_precision == 'full': + # If full precision, use repr to get full float precision + float_columns = (dataframe.select_dtypes(include=['floating']) + .columns) + nonfloat_columns = dataframe.columns[~dataframe.columns.isin( + float_columns)] + dataframe[float_columns] = dataframe[float_columns].applymap(repr) + dataframe[nonfloat_columns] = (dataframe[nonfloat_columns] + .astype(str)) + elif isinstance(numeric_precision, int): + # If precision is specified, round to appropriate precision + float_columns = (dataframe.select_dtypes(include=['floating']) + .columns) + nonfloat_columns = dataframe.columns[~dataframe.columns.isin( + float_columns)] + dataframe[float_columns] = (dataframe[float_columns] + .round(numeric_precision)) + # If desired precision is > 10 decimal places, need to use repr + if numeric_precision > 10: + dataframe[float_columns] = (dataframe[float_columns] + .applymap(repr)) + dataframe[nonfloat_columns] = (dataframe[nonfloat_columns] + .astype(str)) + else: + dataframe = dataframe.astype(str) + else: + raise ValueError('Invalid numeric precision.') + + if datatype == 'field': + # If dealing with fields, format ints and strings correctly + dataframe[int_columns] = dataframe[int_columns] + 'i' + dataframe[string_columns] = '"' + dataframe[string_columns] + '"' + + dataframe.columns = dataframe.columns.astype(str) + return dataframe + def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() if time_precision == 'h': diff --git a/influxdb/client.py b/influxdb/client.py index b6bbc72f..40978dd3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -252,16 +252,20 @@ def request(self, url, method='GET', params=None, data=None, else: raise InfluxDBClientError(response.content, response.status_code) - def write(self, data, params=None, expected_response_code=204): + def write(self, data, params=None, expected_response_code=204, + protocol='json'): """Write data to InfluxDB. :param data: the data to be written - :type data: dict + :type data: (if protocol is 'json') dict + (if protocol is 'line') sequence of line protocol strings :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write operation, defaults to 204 :type expected_response_code: int + :param protocol: protocol of input data, either 'json' or 'line' + :type protocol: str :returns: True, if the write operation is successful :rtype: bool """ @@ -274,11 +278,16 @@ def write(self, data, params=None, expected_response_code=204): else: precision = None + if protocol == 'json': + data = make_lines(data, precision).encode('utf-8') + elif protocol == 'line': + data = ('\n'.join(data) + '\n').encode('utf-8') + self.request( url="write", method='POST', params=params, - data=make_lines(data, precision).encode('utf-8'), + data=data, expected_response_code=expected_response_code, headers=headers ) @@ -351,11 +360,15 @@ def write_points(self, retention_policy=None, tags=None, batch_size=None, + protocol='json' ): """Write to multiple time series names. :param points: the list of points to be written in the database :type points: list of dictionaries, each dictionary represents a point + :type data: (if protocol is 'json') list of dicts, where each dict + represents a point. + (if protocol is 'line') sequence of line protocol strings. :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str :param database: the database to write the points to. Defaults to @@ -373,6 +386,8 @@ def write_points(self, one database to another or when doing a massive write operation, defaults to None :type batch_size: int + :param protocol: Protocol for writing data. Either 'line' or 'json'. + :type protocol: str :returns: True, if the operation is successful :rtype: bool @@ -386,14 +401,14 @@ def write_points(self, time_precision=time_precision, database=database, retention_policy=retention_policy, - tags=tags) + tags=tags, protocol=protocol) return True else: return self._write_points(points=points, time_precision=time_precision, database=database, retention_policy=retention_policy, - tags=tags) + tags=tags, protocol=protocol) def _batches(self, iterable, size): for i in xrange(0, len(iterable), size): @@ -404,7 +419,8 @@ def _write_points(self, time_precision, database, retention_policy, - tags): + tags, + protocol='json'): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise ValueError( "Invalid time precision is given. " @@ -415,12 +431,15 @@ def _write_points(self, "InfluxDB only supports seconds precision for udp writes" ) - data = { - 'points': points - } + if protocol == 'json': + data = { + 'points': points + } - if tags is not None: - data['tags'] = tags + if tags is not None: + data['tags'] = tags + else: + data = points params = { 'db': database or self._database @@ -433,12 +452,13 @@ def _write_points(self, params['rp'] = retention_policy if self.use_udp: - self.send_packet(data) + self.send_packet(data, protocol=protocol) else: self.write( data=data, params=params, - expected_response_code=204 + expected_response_code=204, + protocol=protocol ) return True @@ -737,13 +757,19 @@ def get_list_privileges(self, username): text = "SHOW GRANTS FOR {0}".format(username) return list(self.query(text).get_points()) - def send_packet(self, packet): + def send_packet(self, packet, protocol='json'): """Send an UDP packet. :param packet: the packet to be sent - :type packet: dict + :type packet: (if protocol is 'json') dict + (if protocol is 'line') sequence of line protocol strings + :param protocol: protocol of input data, either 'json' or 'line' + :type protocol: str """ - data = make_lines(packet).encode('utf-8') + if protocol == 'json': + data = make_lines(packet).encode('utf-8') + elif protocol == 'line': + data = ('\n'.join(data) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self.udp_port)) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 5b868d14..0b3b9b90 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -37,8 +37,8 @@ def test_write_points_from_dataframe(self): columns=["column_one", "column_two", "column_three"]) expected = ( - b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n" - b"foo column_one=\"2\",column_three=2.0,column_two=2i " + b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n" + b"foo column_one=\"2\",column_two=2i,column_three=2.0 " b"3600000000000\n" ) @@ -69,6 +69,135 @@ def test_write_points_from_dataframe_in_batches(self): cli = DataFrameClient(database='db') self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1)) + def test_write_points_from_dataframe_with_tag_columns(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0], + ['red', 0, "2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["tag_one", "tag_two", "column_one", + "column_two", "column_three"]) + expected = ( + b"foo,tag_one=blue,tag_two=1 " + b"column_one=\"1\",column_two=1i,column_three=1.0 " + b"0\n" + b"foo,tag_one=red,tag_two=0 " + b"column_one=\"2\",column_two=2i,column_three=2.0 " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', + tag_columns=['tag_one', 'tag_two']) + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', + tag_columns=['tag_one', 'tag_two'], tags=None) + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0], + ['red', 0, "2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["tag_one", "tag_two", "column_one", + "column_two", "column_three"]) + expected = ( + b"foo,tag_one=blue,tag_two=1,global_tag=value " + b"column_one=\"1\",column_two=1i,column_three=1.0 " + b"0\n" + b"foo,tag_one=red,tag_two=0,global_tag=value " + b"column_one=\"2\",column_two=2i,column_three=2.0 " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', + tag_columns=['tag_one', 'tag_two'], + tags={'global_tag': 'value'}) + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_tag_cols_and_defaults(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0, 'hot'], + ['red', 0, "2", 2, 2.0, 'cold']], + index=[now, now + timedelta(hours=1)], + columns=["tag_one", "tag_two", "column_one", + "column_two", "column_three", + "tag_three"]) + expected_tags_and_fields = ( + b"foo,tag_one=blue " + b"column_one=\"1\",column_two=1i " + b"0\n" + b"foo,tag_one=red " + b"column_one=\"2\",column_two=2i " + b"3600000000000\n" + ) + + expected_tags_no_fields = ( + b"foo,tag_one=blue,tag_two=1 " + b"column_one=\"1\",column_two=1i,column_three=1.0," + b"tag_three=\"hot\" 0\n" + b"foo,tag_one=red,tag_two=0 " + b"column_one=\"2\",column_two=2i,column_three=2.0," + b"tag_three=\"cold\" 3600000000000\n" + ) + + expected_fields_no_tags = ( + b"foo,tag_one=blue,tag_two=1,tag_three=hot " + b"column_one=\"1\",column_two=1i,column_three=1.0 " + b"0\n" + b"foo,tag_one=red,tag_two=0,tag_three=cold " + b"column_one=\"2\",column_two=2i,column_three=2.0 " + b"3600000000000\n" + ) + + expected_no_tags_no_fields = ( + b"foo " + b"tag_one=\"blue\",tag_two=1i,column_one=\"1\"," + b"column_two=1i,column_three=1.0,tag_three=\"hot\" " + b"0\n" + b"foo " + b"tag_one=\"red\",tag_two=0i,column_one=\"2\"," + b"column_two=2i,column_three=2.0,tag_three=\"cold\" " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', + field_columns=['column_one', 'column_two'], + tag_columns=['tag_one']) + self.assertEqual(m.last_request.body, expected_tags_and_fields) + + cli.write_points(dataframe, 'foo', + tag_columns=['tag_one', 'tag_two']) + self.assertEqual(m.last_request.body, expected_tags_no_fields) + + cli.write_points(dataframe, 'foo', + field_columns=['column_one', 'column_two', + 'column_three']) + self.assertEqual(m.last_request.body, expected_fields_no_tags) + + cli.write_points(dataframe, 'foo') + self.assertEqual(m.last_request.body, expected_no_tags_no_fields) + def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names @@ -90,15 +219,60 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_with_numeric_precision(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + # df with numeric column names + dataframe = pd.DataFrame(data=[["1", 1, 1.1111111111111], + ["2", 2, 2.2222222222222]], + index=[now, now + timedelta(hours=1)]) + + expected_default_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' + ) + + expected_specified_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.2222 3600000000000\n' + ) + + expected_full_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n' + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + cli.write_points(dataframe, "foo", {"hello": "there"}) + + self.assertEqual(m.last_request.body, expected_default_precision) + + cli = DataFrameClient(database='db') + cli.write_points(dataframe, "foo", {"hello": "there"}, + numeric_precision=4) + + self.assertEqual(m.last_request.body, expected_specified_precision) + + cli = DataFrameClient(database='db') + cli.write_points(dataframe, "foo", {"hello": "there"}, + numeric_precision='full') + + self.assertEqual(m.last_request.body, expected_full_precision) + def test_write_points_from_dataframe_with_period_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[pd.Period('1970-01-01'), pd.Period('1970-01-02')], columns=["column_one", "column_two", "column_three"]) + expected = ( - b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n" - b"foo column_one=\"2\",column_three=2.0,column_two=2i " + b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n" + b"foo column_one=\"2\",column_two=2i,column_three=2.0 " b"86400000000000\n" ) @@ -130,48 +304,48 @@ def test_write_points_from_dataframe_with_time_precision(self): cli.write_points(dataframe, measurement, time_precision='h') self.assertEqual(m.last_request.qs['precision'], ['h']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2i 1\n', + b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo ' + b'column_one="2",column_two=2i,column_three=2.0 1\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='m') self.assertEqual(m.last_request.qs['precision'], ['m']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2i 60\n', + b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo ' + b'column_one="2",column_two=2i,column_three=2.0 60\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='s') self.assertEqual(m.last_request.qs['precision'], ['s']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2i 3600\n', + b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo ' + b'column_one="2",column_two=2i,column_three=2.0 3600\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='ms') self.assertEqual(m.last_request.qs['precision'], ['ms']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2i 3600000\n', + b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo ' + b'column_one="2",column_two=2i,column_three=2.0 3600000\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='u') self.assertEqual(m.last_request.qs['precision'], ['u']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' - b'column_one="2",column_three=2.0,column_two=2i 3600000000\n', + b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo ' + b'column_one="2",column_two=2i,column_three=2.0 3600000000\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='n') self.assertEqual(m.last_request.qs['precision'], ['n']) self.assertEqual( - b'foo column_one="1",column_three=1.0,column_two=1i 0\n' - b'foo column_one="2",column_three=2.0,column_two=2i ' + b'foo column_one="1",column_two=1i,column_three=1.0 0\n' + b'foo column_one="2",column_two=2i,column_three=2.0 ' b'3600000000000\n', m.last_request.body, ) From a1ffa0a5ab8bd377d5efabee7d899dc96c3000a1 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 8 Sep 2016 12:49:09 -0400 Subject: [PATCH 346/536] Fix DataFrameClient tag processing - tags should be sorted, with both global_tags and column_tags together - tags values should be escaped --- influxdb/_dataframe_client.py | 23 +++++++++------ influxdb/tests/dataframe_client_test.py | 37 ++++++++++++++++++++++--- 2 files changed, 47 insertions(+), 13 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index ddae0862..90b5a69f 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -8,6 +8,7 @@ from __future__ import unicode_literals import math +import re import pandas as pd @@ -26,6 +27,10 @@ def _pandas_time_unit(time_precision): return unit +def _escape_pandas_series(s): + return s.apply(lambda v: re.escape(v)) + + class DataFrameClient(InfluxDBClient): """ The ``DataFrameClient`` object holds information necessary to connect @@ -242,6 +247,12 @@ def _convert_dataframe_to_lines(self, field_columns = list(field_columns) if list(field_columns) else [] tag_columns = list(tag_columns) if list(tag_columns) else [] + # Make global_tags as tag_columns + if global_tags: + for tag in global_tags: + dataframe[tag] = global_tags[tag] + tag_columns.append(tag) + # If field columns but no tag columns, assume rest of columns are tags if field_columns and (not tag_columns): tag_columns = list(column_series[~column_series.isin( @@ -268,6 +279,7 @@ def _convert_dataframe_to_lines(self, # If tag columns exist, make an array of formatted tag keys and values if tag_columns: tag_df = dataframe[tag_columns] + tag_df = tag_df.sort_index(axis=1) tag_df = self._stringify_dataframe( tag_df, numeric_precision, datatype='tag') tags = (',' + ( @@ -286,15 +298,6 @@ def _convert_dataframe_to_lines(self, fields = field_df.sum(axis=1) del field_df - # Add any global tags to formatted tag strings - if global_tags: - global_tags = ','.join(['='.join([tag, global_tags[tag]]) - for tag in global_tags]) - if tag_columns: - tags = tags + ',' + global_tags - else: - tags = ',' + global_tags - # Generate line protocol string points = (measurement + tags + ' ' + fields + ' ' + time).tolist() return points @@ -344,6 +347,8 @@ def _stringify_dataframe(self, # If dealing with fields, format ints and strings correctly dataframe[int_columns] = dataframe[int_columns] + 'i' dataframe[string_columns] = '"' + dataframe[string_columns] + '"' + elif datatype == 'tag': + dataframe = dataframe.apply(_escape_pandas_series) dataframe.columns = dataframe.columns.astype(str) return dataframe diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 0b3b9b90..782e5c82 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -108,10 +108,10 @@ def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self): columns=["tag_one", "tag_two", "column_one", "column_two", "column_three"]) expected = ( - b"foo,tag_one=blue,tag_two=1,global_tag=value " + b"foo,global_tag=value,tag_one=blue,tag_two=1 " b"column_one=\"1\",column_two=1i,column_three=1.0 " b"0\n" - b"foo,tag_one=red,tag_two=0,global_tag=value " + b"foo,global_tag=value,tag_one=red,tag_two=0 " b"column_one=\"2\",column_two=2i,column_three=2.0 " b"3600000000000\n" ) @@ -155,10 +155,10 @@ def test_write_points_from_dataframe_with_tag_cols_and_defaults(self): ) expected_fields_no_tags = ( - b"foo,tag_one=blue,tag_two=1,tag_three=hot " + b"foo,tag_one=blue,tag_three=hot,tag_two=1 " b"column_one=\"1\",column_two=1i,column_three=1.0 " b"0\n" - b"foo,tag_one=red,tag_two=0,tag_three=cold " + b"foo,tag_one=red,tag_three=cold,tag_two=0 " b"column_one=\"2\",column_two=2i,column_three=2.0 " b"3600000000000\n" ) @@ -198,6 +198,35 @@ def test_write_points_from_dataframe_with_tag_cols_and_defaults(self): cli.write_points(dataframe, 'foo') self.assertEqual(m.last_request.body, expected_no_tags_no_fields) + def test_write_points_from_dataframe_with_tag_escaped(self): + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame( + data=[['blue', 1, "1", 1, 1.0, 'hot'], + ['red,green=orange', 0, "2", 2, 2.0, 'cold']], + index=[now, now + timedelta(hours=1)], + columns=["tag_one", "tag_two", "column_one", + "column_two", "column_three", + "tag_three"]) + + expected_escaped_tags = ( + b"foo,tag_one=blue " + b"column_one=\"1\",column_two=1i " + b"0\n" + b"foo,tag_one=red\\,green\\=orange " + b"column_one=\"2\",column_two=2i " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + cli = DataFrameClient(database='db') + cli.write_points(dataframe, 'foo', + field_columns=['column_one', 'column_two'], + tag_columns=['tag_one']) + self.assertEqual(m.last_request.body, expected_escaped_tags) + def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names From 978b080e959403ab121f5c736eab6d5d5ea82e5c Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Thu, 8 Sep 2016 17:29:21 -0400 Subject: [PATCH 347/536] Preserve previous tag escaping behavior `re.escape` affects all non-alphanumerical characters, as opposed to `line_protocol._escape_tag` which only escapes specific characters [\ ,=] --- influxdb/_dataframe_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 90b5a69f..0341a41d 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -8,11 +8,11 @@ from __future__ import unicode_literals import math -import re import pandas as pd from .client import InfluxDBClient +from .line_protocol import _escape_tag def _pandas_time_unit(time_precision): @@ -28,7 +28,7 @@ def _pandas_time_unit(time_precision): def _escape_pandas_series(s): - return s.apply(lambda v: re.escape(v)) + return s.apply(lambda v: _escape_tag(v)) class DataFrameClient(InfluxDBClient): From c6334f8cbb13586d0aa53135f340e0d3edf162c8 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Fri, 9 Sep 2016 11:54:14 -0400 Subject: [PATCH 348/536] Fix DataFrameClient empty tag processing Empty or None tags should be left out, otherwise it would break line protocol. --- influxdb/_dataframe_client.py | 14 ++++++++++--- influxdb/tests/dataframe_client_test.py | 27 ++++++++++++++++--------- 2 files changed, 29 insertions(+), 12 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 0341a41d..9eec9254 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -279,13 +279,21 @@ def _convert_dataframe_to_lines(self, # If tag columns exist, make an array of formatted tag keys and values if tag_columns: tag_df = dataframe[tag_columns] + tag_df = tag_df.fillna('') # replace NA with empty string tag_df = tag_df.sort_index(axis=1) tag_df = self._stringify_dataframe( tag_df, numeric_precision, datatype='tag') - tags = (',' + ( - (tag_df.columns.values + '=').tolist() + tag_df)).sum(axis=1) - del tag_df + # prepend tag keys + tag_df = tag_df.apply( + lambda s: s.apply( + lambda v, l: l + '=' + v if v else None, l=s.name)) + + # join tags, but leave out None values + tags = tag_df.apply( + lambda r: ',' + ','.join(r.dropna()), axis=1) + + del tag_df else: tags = '' diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 782e5c82..eb2e292c 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -201,20 +201,29 @@ def test_write_points_from_dataframe_with_tag_cols_and_defaults(self): def test_write_points_from_dataframe_with_tag_escaped(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame( - data=[['blue', 1, "1", 1, 1.0, 'hot'], - ['red,green=orange', 0, "2", 2, 2.0, 'cold']], - index=[now, now + timedelta(hours=1)], - columns=["tag_one", "tag_two", "column_one", - "column_two", "column_three", - "tag_three"]) + data=[ + ['blue orange', "1", 1, 'hot=cold'], # space, equal + ['red,green', "2", 2, r'cold\fire'], # comma, backslash + ['some', "2", 2, ''], # skip empty + ['some', "2", 2, None], # skip None + ], + index=pd.period_range(now, freq='H', periods=4), + columns=["tag_one", "column_one", "column_two", "tag_three"] + ) expected_escaped_tags = ( - b"foo,tag_one=blue " + b"foo,tag_one=blue\\ orange,tag_three=hot\\=cold " b"column_one=\"1\",column_two=1i " b"0\n" - b"foo,tag_one=red\\,green\\=orange " + b"foo,tag_one=red\\,green,tag_three=cold\\\\fire " b"column_one=\"2\",column_two=2i " b"3600000000000\n" + b"foo,tag_one=some " + b"column_one=\"2\",column_two=2i " + b"7200000000000\n" + b"foo,tag_one=some " + b"column_one=\"2\",column_two=2i " + b"10800000000000\n" ) with requests_mock.Mocker() as m: @@ -224,7 +233,7 @@ def test_write_points_from_dataframe_with_tag_escaped(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, 'foo', field_columns=['column_one', 'column_two'], - tag_columns=['tag_one']) + tag_columns=['tag_one', 'tag_three']) self.assertEqual(m.last_request.body, expected_escaped_tags) def test_write_points_from_dataframe_with_numeric_column_names(self): From a7c64405af3e819f6d4b864d5eb7b38a1395bea0 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Fri, 9 Sep 2016 12:12:08 -0400 Subject: [PATCH 349/536] Handle all-empty tags --- influxdb/_dataframe_client.py | 5 ++++- influxdb/tests/dataframe_client_test.py | 6 +++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 9eec9254..f854f82c 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -291,7 +291,10 @@ def _convert_dataframe_to_lines(self, # join tags, but leave out None values tags = tag_df.apply( - lambda r: ',' + ','.join(r.dropna()), axis=1) + lambda r: ','.join(r.dropna()), axis=1) + + # prepend comma + tags = tags.apply(lambda v: ',' + v if v else '') del tag_df else: diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index eb2e292c..3667b0fc 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -206,8 +206,9 @@ def test_write_points_from_dataframe_with_tag_escaped(self): ['red,green', "2", 2, r'cold\fire'], # comma, backslash ['some', "2", 2, ''], # skip empty ['some', "2", 2, None], # skip None + ['', "2", 2, None], # all tags empty ], - index=pd.period_range(now, freq='H', periods=4), + index=pd.period_range(now, freq='H', periods=5), columns=["tag_one", "column_one", "column_two", "tag_three"] ) @@ -224,6 +225,9 @@ def test_write_points_from_dataframe_with_tag_escaped(self): b"foo,tag_one=some " b"column_one=\"2\",column_two=2i " b"10800000000000\n" + b"foo " + b"column_one=\"2\",column_two=2i " + b"14400000000000\n" ) with requests_mock.Mocker() as m: From cbde490024e439ab780af0e4fb1832ef5a4f0f00 Mon Sep 17 00:00:00 2001 From: "tzonghao.chen" Date: Fri, 9 Sep 2016 16:25:10 -0400 Subject: [PATCH 350/536] Add proper timestamp timezone handling Fix bug #250 --- influxdb/line_protocol.py | 12 +++++------ influxdb/tests/test_line_protocol.py | 31 ++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 6 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 9e0f0743..7c8c8f24 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -5,14 +5,16 @@ from __future__ import print_function from __future__ import unicode_literals -from calendar import timegm from copy import copy from datetime import datetime from numbers import Integral +from pytz import UTC from dateutil.parser import parse from six import binary_type, text_type, integer_types, PY2 +EPOCH = UTC.localize(datetime.utcfromtimestamp(0)) + def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): @@ -20,10 +22,9 @@ def _convert_timestamp(timestamp, precision=None): if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): - ns = ( - timegm(timestamp.utctimetuple()) * 1e9 + - timestamp.microsecond * 1e3 - ) + if not timestamp.tzinfo: + timestamp = UTC.localize(timestamp) + ns = (timestamp - EPOCH).total_seconds() * 1e9 if precision is None or precision == 'n': return ns elif precision == 'u': @@ -36,7 +37,6 @@ def _convert_timestamp(timestamp, precision=None): return ns / 1e9 / 60 elif precision == 'h': return ns / 1e9 / 3600 - raise ValueError(timestamp) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 1d0b377c..726f8705 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -5,7 +5,9 @@ from __future__ import print_function from __future__ import unicode_literals +from datetime import datetime import unittest +from pytz import UTC, timezone from influxdb import line_protocol @@ -40,6 +42,35 @@ def test_make_lines(self): 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n' ) + def test_timezone(self): + dt = datetime(2009, 11, 10, 23, 0, 0, 123456) + utc = UTC.localize(dt) + berlin = timezone('Europe/Berlin').localize(dt) + eastern = berlin.astimezone(timezone('US/Eastern')) + data = { + "points": [ + {"measurement": "A", "fields": {"val": 1}, + "time": 0}, + {"measurement": "A", "fields": {"val": 1}, + "time": "2009-11-10T23:00:00.123456Z"}, + {"measurement": "A", "fields": {"val": 1}, "time": dt}, + {"measurement": "A", "fields": {"val": 1}, "time": utc}, + {"measurement": "A", "fields": {"val": 1}, "time": berlin}, + {"measurement": "A", "fields": {"val": 1}, "time": eastern}, + ] + } + self.assertEqual( + line_protocol.make_lines(data), + '\n'.join([ + 'A val=1i 0', + 'A val=1i 1257894000123456000', + 'A val=1i 1257894000123456000', + 'A val=1i 1257894000123456000', + 'A val=1i 1257890400123456000', + 'A val=1i 1257890400123456000', + ]) + '\n' + ) + def test_string_val_newline(self): data = { "points": [ From 355b1a91edf2dfcff66c2a02e034977f65d0690c Mon Sep 17 00:00:00 2001 From: Simon Gomizelj Date: Thu, 22 Sep 2016 13:08:36 -0400 Subject: [PATCH 351/536] Fix DataFrameClient import error on python3.5 --- influxdb/dataframe_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 2d90fda3..1841633b 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -16,8 +16,10 @@ from .client import InfluxDBClient class DataFrameClient(InfluxDBClient): + err = err + def __init__(self, *a, **kw): raise ImportError("DataFrameClient requires Pandas " - "which couldn't be imported: %s" % err) + "which couldn't be imported: %s" % self.err) else: from ._dataframe_client import DataFrameClient From 499bcde7f3a56835426593759650bfeb7646de20 Mon Sep 17 00:00:00 2001 From: Phuong Nguyen Date: Fri, 23 Sep 2016 15:20:38 -0500 Subject: [PATCH 352/536] Fix order of input parameters of delete_series in documentation --- influxdb/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 44a08bdc..b9faa4f0 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -662,13 +662,13 @@ def delete_series(self, database=None, measurement=None, tags=None): """Delete series from a database. Series can be filtered by measurement and tags. - :param measurement: Delete all series from a measurement - :type id: string - :param tags: Delete all series that match given tags - :type id: dict :param database: the database from which the series should be deleted, defaults to client's current database :type database: str + :param measurement: Delete all series from a measurement + :type id: str + :param tags: Delete all series that match given tags + :type id: dict """ database = database or self._database query_str = 'DROP SERIES' From a71ebcb1f2a855be0c5675c38a275a0835887c88 Mon Sep 17 00:00:00 2001 From: NotSqrt Date: Mon, 28 Nov 2016 13:26:27 +0100 Subject: [PATCH 353/536] Fix method name in example --- examples/tutorial_pandas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index c9a09fde..9b3a6da5 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -29,7 +29,7 @@ def main(host='localhost', port=8086): client.query("select * from demo") print("Delete database: " + dbname) - client.delete_database(dbname) + client.drop_database(dbname) def parse_args(): From f9bd6cc19728d4578717acf752268e9a884a602d Mon Sep 17 00:00:00 2001 From: Guillaume Ansanay-Alex Date: Thu, 1 Dec 2016 20:23:05 +0100 Subject: [PATCH 354/536] Fix pandas calls + New test suite + Test against InfluxDB v1.1.0 (#388) (Thanks @gansanay!) * pandas .to_datetime() is deprecated as of 0.19.0 See http://pandas.pydata.org/pandas-docs/version/0.19.1/whatsnew.html#deprecations * pandas 0.19.x breaks python 3.3 compatibility * pypy3 - pip requires Python >= 2.6 or >= 3.3 Using Python 3.4 for Travis build * Use 'language' syntax for Travis CI configuration * Fix silly error * Define new test matrix * Fix build language environments for pypy, pypy3 * Display error logs for pypy builds * Test newer versions of pypy and pypy3 * Try to fix calls to coverage/docs/flake8 * Test setting VERSION * Acknowledging the issue in Travis-CI https://github.com/travis-ci/travis-ci/issues/6304 Commenting out pypy3 test * Get InfluxDB 1.1.0 * Default retetion policy name and duration unit have changed in InfluxDB * Default retention name and policy duration, again * Altering retention policy doesn't change shard group duration * Fix styling * Fix coverage link in README * Put initial coverage link back * Update README: v1.1.0 is the recommended version of InfluxDB --- .travis.yml | 38 +++++++++------- README.rst | 6 +-- influxdb/_dataframe_client.py | 8 +++- influxdb/influxdb08/dataframe_client.py | 7 ++- .../server_tests/client_test_with_server.py | 44 +++++++++++-------- tox.ini | 4 +- 6 files changed, 65 insertions(+), 42 deletions(-) diff --git a/.travis.yml b/.travis.yml index 868fe5cc..1dd2c78d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,30 +1,38 @@ language: python + addons: apt: packages: - wget -env: - - TOX_ENV=py27 -# - TOX_ENV=py32 -# Disabling py32 tests until the following issue is fixed: -# pip 8.x breaks python 3.2 compatibility -# https://github.com/pypa/pip/issues/3390 - - TOX_ENV=py33 - - TOX_ENV=py34 - - TOX_ENV=pypy - - TOX_ENV=pypy3 - - TOX_ENV=docs - - TOX_ENV=flake8 - - TOX_ENV=coverage + +matrix: + include: + - python: 2.7 + env: TOX_ENV=py27 + - python: pypy-5.3.1 + env: TOX_ENV=pypy + - python: 3.4 + env: TOX_ENV=py34 +# An issue in travis-ci prevents this case from running +# Link to issue: https://github.com/travis-ci/travis-ci/issues/6304 +# - python: pypy3.3-5.2-alpha1 +# env: TOX_ENV=pypy3 + - python: 3.4 + env: TOX_ENV=docs + - python: 3.4 + env: TOX_ENV=flake8 + - python: 3.4 + env: TOX_ENV=coverage + install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://dl.influxdata.com/influxdb/releases/influxdb_0.13.0_amd64.deb + - wget https://dl.influxdata.com/influxdb/releases/influxdb_1.1.0_amd64.deb - dpkg -x influxdb*.deb influxdb_install script: - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/usr/bin/influxd - - travis_wait 30 tox -e $TOX_ENV + - tox -e $TOX_ENV after_success: - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi notifications: diff --git a/README.rst b/README.rst index d3a8d775..a88ce0f2 100644 --- a/README.rst +++ b/README.rst @@ -22,10 +22,10 @@ InfluxDB is an open-source distributed time series database, find more about Inf .. _installation: -InfluxDB v0.8.X users -===================== +InfluxDB pre v1.1.0 users +========================= -InfluxDB 0.9 was released and it is the new recommended version. However, InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. +InfluxDB 1.1.0 was released and it is the new recommended version. InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. Installation ============ diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 0341a41d..5f81e4c8 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -273,8 +273,12 @@ def _convert_dataframe_to_lines(self, }.get(time_precision, 1) # Make array of timestamp ints - time = ((dataframe.index.to_datetime().values.astype(int) / - precision_factor).astype(int).astype(str)) + if isinstance(dataframe.index, pd.tseries.period.PeriodIndex): + time = ((dataframe.index.to_timestamp().values.astype(int) / + precision_factor).astype(int).astype(str)) + else: + time = ((pd.to_datetime(dataframe.index).values.astype(int) / + precision_factor).astype(int).astype(str)) # If tag columns exist, make an array of formatted tag keys and values if tag_columns: diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index 28173e6e..ba302af8 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -132,7 +132,12 @@ def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') - dataframe.index = dataframe.index.to_datetime() + + if isinstance(dataframe.index, pd.tseries.period.PeriodIndex): + dataframe.index = dataframe.index.to_timestamp() + else: + dataframe.index = pd.to_datetime(dataframe.index) + if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') dataframe['time'] = [self._datetime_to_epoch(dt, time_precision) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 86faf204..d81054c9 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -431,8 +431,8 @@ def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'name': 'default', - 'duration': '0', + {'name': 'autogen', + 'duration': '0s', 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', 'default': True} @@ -447,11 +447,11 @@ def test_create_retention_policy_default(self): self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': False, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'}, + 'name': 'autogen'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 1, @@ -468,14 +468,17 @@ def test_create_retention_policy_default(self): def test_create_retention_policy(self): self.cli.create_retention_policy('somename', '1d', 1) + # NB: creating a retention policy without specifying + # shard group duration + # leads to a shard group duration of 1 hour rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': True, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'}, + 'name': 'autogen'}, {'duration': '24h0m0s', 'default': False, 'replicaN': 1, @@ -491,18 +494,19 @@ def test_alter_retention_policy(self): # Test alter duration self.cli.alter_retention_policy('somename', 'db', duration='4d') + # NB: altering retention policy doesn't change shard group duration rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': True, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'}, + 'name': 'autogen'}, {'duration': '96h0m0s', 'default': False, 'replicaN': 1, - 'shardGroupDuration': u'24h0m0s', + 'shardGroupDuration': u'1h0m0s', 'name': 'somename'} ], rsp @@ -511,18 +515,19 @@ def test_alter_retention_policy(self): # Test alter replication self.cli.alter_retention_policy('somename', 'db', replication=4) + # NB: altering retention policy doesn't change shard group duration rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': True, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'}, + 'name': 'autogen'}, {'duration': '96h0m0s', 'default': False, 'replicaN': 4, - 'shardGroupDuration': u'24h0m0s', + 'shardGroupDuration': u'1h0m0s', 'name': 'somename'} ], rsp @@ -531,18 +536,19 @@ def test_alter_retention_policy(self): # Test alter default self.cli.alter_retention_policy('somename', 'db', default=True) + # NB: altering retention policy doesn't change shard group duration rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': False, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'}, + 'name': 'autogen'}, {'duration': '96h0m0s', 'default': True, 'replicaN': 4, - 'shardGroupDuration': u'24h0m0s', + 'shardGroupDuration': u'1h0m0s', 'name': 'somename'} ], rsp @@ -558,11 +564,11 @@ def test_alter_retention_policy_invalid(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': True, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'}, + 'name': 'autogen'}, {'duration': '24h0m0s', 'default': False, 'replicaN': 1, @@ -580,11 +586,11 @@ def test_drop_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', + {'duration': '0s', 'default': True, 'replicaN': 1, 'shardGroupDuration': u'168h0m0s', - 'name': 'default'} + 'name': 'autogen'} ], rsp ) diff --git a/tox.ini b/tox.ini index ddc15514..856a4717 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,12 @@ [tox] -envlist = py34, py27, pypy, flake8 +envlist = py27, py34, pypy, pypy3, flake8, coverage, docs [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - py27,py32,py33,py34,py26: pandas + py27,py34: pandas # Only install pandas with non-pypy interpreters commands = nosetests -v --with-doctest {posargs} From b32807115bc7cc9d8390cfe44a982a59b84b4175 Mon Sep 17 00:00:00 2001 From: Emmanuel Levijarvi Date: Sat, 3 Dec 2016 10:15:58 -0800 Subject: [PATCH 355/536] fixes for mutable default parameters in dataframe --- influxdb/_dataframe_client.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 5f81e4c8..4fc5de2b 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -44,8 +44,8 @@ def write_points(self, dataframe, measurement, tags=None, - tag_columns=[], - field_columns=[], + tag_columns=None, + field_columns=None, time_precision=None, database=None, retention_policy=None, @@ -72,6 +72,10 @@ def write_points(self, figures for float and all significant figures for int datatypes. """ + if tag_columns is None: + tag_columns = [] + if field_columns is None: + field_columns = [] if batch_size: number_batches = int(math.ceil(len(dataframe) / float(batch_size))) for batch in range(number_batches): @@ -166,8 +170,8 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, - tag_columns=[], - field_columns=[], + tag_columns=None, + field_columns=None, time_precision=None): if not isinstance(dataframe, pd.DataFrame): @@ -179,9 +183,9 @@ def _convert_dataframe_to_json(self, PeriodIndex.') # Make sure tags and tag columns are correctly typed - tag_columns = tag_columns if tag_columns else [] - field_columns = field_columns if field_columns else [] - tags = tags if tags else {} + tag_columns = tag_columns if tag_columns is not None else [] + field_columns = field_columns if field_columns is not None else [] + tags = tags if tags is not None else {} # Assume field columns are all columns not included in tag columns if not field_columns: field_columns = list( @@ -221,8 +225,8 @@ def _convert_dataframe_to_json(self, def _convert_dataframe_to_lines(self, dataframe, measurement, - field_columns=[], - tag_columns=[], + field_columns=None, + tag_columns=None, global_tags={}, time_precision=None, numeric_precision=None): @@ -242,6 +246,8 @@ def _convert_dataframe_to_lines(self, field_columns = [] if tag_columns is None: tag_columns = [] + if global_tags is None: + global_tags = {} # Make sure field_columns and tag_columns are lists field_columns = list(field_columns) if list(field_columns) else [] From 2e8c58867008227989742f9949d9e23048c4144b Mon Sep 17 00:00:00 2001 From: Emmanuel Levijarvi Date: Sat, 3 Dec 2016 10:19:26 -0800 Subject: [PATCH 356/536] fix for using from_DSN constructor with dataframe --- influxdb/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index b9faa4f0..1a0a4821 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -131,8 +131,8 @@ def _port(self): def _get_port(self): return self.__port - @staticmethod - def from_DSN(dsn, **kwargs): + @classmethod + def from_DSN(cls, dsn, **kwargs): """Return an instance of :class:`~.InfluxDBClient` from the provided data source name. Supported schemes are "influxdb", "https+influxdb" and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` @@ -169,7 +169,7 @@ def from_DSN(dsn, **kwargs): init_args['port'] = port init_args.update(kwargs) - return InfluxDBClient(**init_args) + return cls(**init_args) def switch_database(self, database): """Change the client's database. From 15210d15504aa317660caf65b8fb07415c878330 Mon Sep 17 00:00:00 2001 From: Matt Date: Tue, 6 Dec 2016 22:00:12 +0000 Subject: [PATCH 357/536] Use quote_ident() to allow databases with spaces in the names * Added an exception when the paramters will create a malformed request. * Updated the tests around this to use a named databse instead. --- influxdb/client.py | 20 ++++++++++++++------ influxdb/tests/client_test.py | 2 +- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index b9faa4f0..f0fbd94c 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -485,7 +485,7 @@ def create_database(self, dbname): :param dbname: the name of the database to create :type dbname: str """ - self.query("CREATE DATABASE \"%s\"" % dbname) + self.query("CREATE DATABASE {0}".format(quote_ident(dbname))) def drop_database(self, dbname): """Drop a database from InfluxDB. @@ -493,7 +493,7 @@ def drop_database(self, dbname): :param dbname: the name of the database to drop :type dbname: str """ - self.query("DROP DATABASE \"%s\"" % dbname) + self.query("DROP DATABASE {0}".format(quote_ident(dbname))) def create_retention_policy(self, name, duration, replication, database=None, default=False): @@ -517,9 +517,10 @@ def create_retention_policy(self, name, duration, replication, :type default: bool """ query_string = \ - "CREATE RETENTION POLICY \"%s\" ON \"%s\" " \ - "DURATION %s REPLICATION %s" % \ - (name, database or self._database, duration, replication) + "CREATE RETENTION POLICY {0} ON {1} " \ + "DURATION {2} REPLICATION {3}".format( + quote_ident(name), quote_ident(database or self._database), + duration, replication) if default is True: query_string += " DEFAULT" @@ -597,8 +598,15 @@ def get_list_retention_policies(self, database=None): u'name': u'default', u'replicaN': 1}] """ + + if not (database or self._database): + raise InfluxDBClientError( + "get_list_retention_policies() requires a database as a " + "parameter or the client to be using a database") + rsp = self.query( - "SHOW RETENTION POLICIES ON \"%s\"" % (database or self._database) + "SHOW RETENTION POLICIES ON {0}".format( + quote_ident(database or self._database)) ) return list(rsp.get_points()) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 84b7eb77..f586df3f 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -591,7 +591,7 @@ def test_get_list_retention_policies(self): text=example_response ) self.assertListEqual( - self.cli.get_list_retention_policies(), + self.cli.get_list_retention_policies("db"), [{'duration': '24h0m0s', 'name': 'fsfdsdf', 'replicaN': 2}] ) From 6ded47ba04f594cc8be301e4e2997f0a3969c5bf Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 7 Dec 2016 10:48:44 -0500 Subject: [PATCH 358/536] 4.0.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index fc2e4261..00739c49 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -17,4 +17,4 @@ ] -__version__ = '3.0.0' +__version__ = '4.0.0' From 9812d0fd39315136f5f5f5d15725ca73d9a5606a Mon Sep 17 00:00:00 2001 From: Emmanuel Levijarvi Date: Wed, 7 Dec 2016 09:29:01 -0800 Subject: [PATCH 359/536] add DataFrameClient test for DSN constructor --- influxdb/tests/dataframe_client_test.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 782e5c82..dfff0df2 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -553,3 +553,8 @@ def test_datetime_to_epoch(self): cli._datetime_to_epoch(timestamp, time_precision='n'), 1356998400000000000.0 ) + + def test_dsn_constructor(self): + client = DataFrameClient.from_DSN('influxdb://localhost:8086') + self.assertIsInstance(client, DataFrameClient) + self.assertEqual('http://localhost:8086', client._baseurl) From 21d025e913e8e187496c67cfb878c40465ad3bd0 Mon Sep 17 00:00:00 2001 From: h0bot Date: Wed, 14 Dec 2016 21:43:59 +0000 Subject: [PATCH 360/536] Sending via UDP line protocol throws ... due to an uninitialized data variable. --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index ab9aa409..2dfd5f38 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -779,7 +779,7 @@ def send_packet(self, packet, protocol='json'): if protocol == 'json': data = make_lines(packet).encode('utf-8') elif protocol == 'line': - data = ('\n'.join(data) + '\n').encode('utf-8') + data = ('\n'.join(packet) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self.udp_port)) From e334519cca403ce7bedf0e96a99543671219e742 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Christoffer=20Andersen?= Date: Mon, 2 Jan 2017 12:52:15 +0100 Subject: [PATCH 361/536] Space padded select-queries now return DataFrames Fixed an issue where space padded select-queries are interpreted as being non-select-queries thus falling back to returning a ResultSet instead of a dictionary of panda DataFrames. --- influxdb/_dataframe_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index ef1fa78f..4b96250f 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -140,7 +140,7 @@ def query(self, query, chunked=False, database=None): """ results = super(DataFrameClient, self).query(query, database=database) - if query.upper().startswith("SELECT"): + if query.strip().upper().startswith("SELECT"): if len(results) > 0: return self._to_dataframe(results) else: From 1df262cd63204e881f00f942d6aab919e47275ff Mon Sep 17 00:00:00 2001 From: Panos Date: Thu, 16 Feb 2017 14:55:32 +0000 Subject: [PATCH 362/536] Added chunked query responses implementation and test. Added chunked parameter to client query function. --- influxdb/client.py | 16 +++++++++++++++- influxdb/tests/client_test.py | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index ab9aa409..c4f70272 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -293,13 +293,23 @@ def write(self, data, params=None, expected_response_code=204, ) return True + def _read_chunked_response(self, response, raise_errors=True): + for line in response.iter_lines(): + # import ipdb; ipdb.set_trace() + if isinstance(line, bytes): + line = line.decode('utf-8') + data = json.loads(line) + for result in data.get('results', []): + yield ResultSet(result, raise_errors=raise_errors) + def query(self, query, params=None, epoch=None, expected_response_code=200, database=None, - raise_errors=True): + raise_errors=True, + chunked=False): """Send a query to InfluxDB. :param query: the actual query string @@ -339,6 +349,10 @@ def query(self, expected_response_code=expected_response_code ) + if chunked or 'chunked' in params: + params['chunked'] = 'true' + return self._read_chunked_response(response) + data = response.json() results = [ diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index f586df3f..def6ccb2 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -32,7 +32,7 @@ import unittest from influxdb import InfluxDBClient - +from influxdb.resultset import ResultSet def _build_response_object(status_code=200, content=""): resp = requests.Response() @@ -792,6 +792,37 @@ def test_invalid_port_fails(self): with self.assertRaises(ValueError): InfluxDBClient('host', '80/redir', 'username', 'password') + def test_chunked_response(self): + example_response = u'{"results":[{"statement_id":0,"series": ' \ + '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \ + '[["value","integer"]]}],"partial":true}]}\n{"results":' \ + '[{"statement_id":0,"series":[{"name":"iops","columns":' \ + '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \ + '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \ + '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \ + '[["value","integer"]]}],"partial":true}]}\n{"results":' \ + '[{"statement_id":0,"series":[{"name":"memory","columns":' \ + '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + response = list(self.cli.query('show series limit 4 offset 0', chunked=True)) + self.assertTrue(len(response) == 4) + self.assertEqual(response[0].raw, ResultSet( + {"statement_id":0, + "series": [{"name":"cpu","columns":["fieldKey","fieldType"], + "values": [["value","integer"]]}],"partial":True} + ).raw) + self.assertEqual(response[3].raw, ResultSet( + {"statement_id":0, + "series":[{"name":"memory","columns": + ["fieldKey","fieldType"], + "values":[["value","integer"]]}]} + ).raw) class FakeClient(InfluxDBClient): From a7c963c779c00b47fd8edf7846b5a2d5d763ecb2 Mon Sep 17 00:00:00 2001 From: Panos Date: Thu, 16 Feb 2017 15:26:37 +0000 Subject: [PATCH 363/536] Pep8 changes --- influxdb/tests/client_test.py | 46 ++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index def6ccb2..6886652c 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -34,6 +34,7 @@ from influxdb import InfluxDBClient from influxdb.resultset import ResultSet + def _build_response_object(status_code=200, content=""): resp = requests.Response() resp.status_code = status_code @@ -793,16 +794,17 @@ def test_invalid_port_fails(self): InfluxDBClient('host', '80/redir', 'username', 'password') def test_chunked_response(self): - example_response = u'{"results":[{"statement_id":0,"series": ' \ - '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \ - '[["value","integer"]]}],"partial":true}]}\n{"results":' \ - '[{"statement_id":0,"series":[{"name":"iops","columns":' \ - '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \ - '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \ - '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \ - '[["value","integer"]]}],"partial":true}]}\n{"results":' \ - '[{"statement_id":0,"series":[{"name":"memory","columns":' \ - '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n' + example_response = \ + u'{"results":[{"statement_id":0,"series":' \ + '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \ + '[["value","integer"]]}],"partial":true}]}\n{"results":' \ + '[{"statement_id":0,"series":[{"name":"iops","columns":' \ + '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \ + '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \ + '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \ + '[["value","integer"]]}],"partial":true}]}\n{"results":' \ + '[{"statement_id":0,"series":[{"name":"memory","columns":' \ + '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n' with requests_mock.Mocker() as m: m.register_uri( @@ -810,19 +812,23 @@ def test_chunked_response(self): "http://localhost:8086/query", text=example_response ) - response = list(self.cli.query('show series limit 4 offset 0', chunked=True)) + response = list(self.cli.query('show series limit 4 offset 0', + chunked=True)) self.assertTrue(len(response) == 4) self.assertEqual(response[0].raw, ResultSet( - {"statement_id":0, - "series": [{"name":"cpu","columns":["fieldKey","fieldType"], - "values": [["value","integer"]]}],"partial":True} - ).raw) + {"statement_id": 0, + "series": [{"name": "cpu", + "columns": ["fieldKey", "fieldType"], + "values": [["value", "integer"]]}], + "partial": True} + ).raw) self.assertEqual(response[3].raw, ResultSet( - {"statement_id":0, - "series":[{"name":"memory","columns": - ["fieldKey","fieldType"], - "values":[["value","integer"]]}]} - ).raw) + {"statement_id": 0, + "series": [{"name": "memory", + "columns": ["fieldKey", "fieldType"], + "values": [["value", "integer"]]}]} + ).raw) + class FakeClient(InfluxDBClient): From d08be8523ad9459d3692fe029f51a1abb24ebbae Mon Sep 17 00:00:00 2001 From: Panos Date: Thu, 16 Feb 2017 15:26:54 +0000 Subject: [PATCH 364/536] Ignore docs failures on travis --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 1dd2c78d..2259dc06 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,9 @@ addons: - wget matrix: + allow_failures: + - python: 3.4 + env: TOX_ENV=docs include: - python: 2.7 env: TOX_ENV=py27 From 49698fc2572ed1c0e3b6bb196f66e90de9e683df Mon Sep 17 00:00:00 2001 From: Panos Date: Thu, 16 Feb 2017 15:30:12 +0000 Subject: [PATCH 365/536] Updated docstrings --- influxdb/client.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index c4f70272..55656068 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -295,7 +295,6 @@ def write(self, data, params=None, expected_response_code=204, def _read_chunked_response(self, response, raise_errors=True): for line in response.iter_lines(): - # import ipdb; ipdb.set_trace() if isinstance(line, bytes): line = line.decode('utf-8') data = json.loads(line) @@ -329,6 +328,11 @@ def query(self, returns errors, defaults to True :type raise_errors: bool + :param chunked: Enable to use chunked responses from InfluxDB. + With ``chunked`` enabled, a _generator_ of ResultSet objects + is returned as opposed to a list. + :type chunked: bool + :returns: the queried data :rtype: :class:`~.ResultSet` """ From 7dddb0baed32bf41bf0518bcfc5415849e3033da Mon Sep 17 00:00:00 2001 From: Panos Date: Thu, 16 Feb 2017 15:40:51 +0000 Subject: [PATCH 366/536] Added chunk size parameter --- influxdb/client.py | 8 +++++++- influxdb/tests/client_test.py | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 55656068..bd85d96c 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -308,7 +308,8 @@ def query(self, expected_response_code=200, database=None, raise_errors=True, - chunked=False): + chunked=False, + chunk_size=0): """Send a query to InfluxDB. :param query: the actual query string @@ -333,6 +334,9 @@ def query(self, is returned as opposed to a list. :type chunked: bool + :param chunk_size: Size of each chunk to tell InfluxDB to use. + :type chunk_size: int + :returns: the queried data :rtype: :class:`~.ResultSet` """ @@ -355,6 +359,8 @@ def query(self, if chunked or 'chunked' in params: params['chunked'] = 'true' + if chunk_size > 0: + params['chunk_size'] = chunk_size return self._read_chunked_response(response) data = response.json() diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 6886652c..c5a87ae8 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -813,7 +813,7 @@ def test_chunked_response(self): text=example_response ) response = list(self.cli.query('show series limit 4 offset 0', - chunked=True)) + chunked=True, chunk_size=4)) self.assertTrue(len(response) == 4) self.assertEqual(response[0].raw, ResultSet( {"statement_id": 0, From ffd1af91025c18d162713b40be800b88609b16f9 Mon Sep 17 00:00:00 2001 From: Panos Date: Thu, 16 Feb 2017 16:14:04 +0000 Subject: [PATCH 367/536] Fix chunked and chunk size parameter set --- influxdb/client.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index bd85d96c..28f6ae11 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -349,6 +349,11 @@ def query(self, if epoch is not None: params['epoch'] = epoch + if chunked: + params['chunked'] = 'true' + if chunk_size > 0: + params['chunk_size'] = chunk_size + response = self.request( url="query", method='GET', @@ -357,10 +362,7 @@ def query(self, expected_response_code=expected_response_code ) - if chunked or 'chunked' in params: - params['chunked'] = 'true' - if chunk_size > 0: - params['chunk_size'] = chunk_size + if chunked: return self._read_chunked_response(response) data = response.json() From 3625e4ce605ac2cbf049ae52602475de20b7e24b Mon Sep 17 00:00:00 2001 From: Panos Date: Tue, 21 Feb 2017 16:52:38 +0000 Subject: [PATCH 368/536] Make one result set per chunk for improved performance and API compatibility with non-chunked responses --- influxdb/client.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 28f6ae11..0698c871 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -294,12 +294,16 @@ def write(self, data, params=None, expected_response_code=204, return True def _read_chunked_response(self, response, raise_errors=True): + result_set = {} for line in response.iter_lines(): if isinstance(line, bytes): line = line.decode('utf-8') data = json.loads(line) for result in data.get('results', []): - yield ResultSet(result, raise_errors=raise_errors) + for _key in result: + if type(result[_key]) == list: + result_set.setdefault(_key, []).extend(result[_key]) + return ResultSet(result_set, raise_errors=raise_errors) def query(self, query, @@ -330,8 +334,8 @@ def query(self, :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. - With ``chunked`` enabled, a _generator_ of ResultSet objects - is returned as opposed to a list. + With ``chunked`` enabled, one ResultSet is returned per chunk + containing all results within that chunk :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. From 0c2e1718c99f2246c5d49f10f28154f0bf61a485 Mon Sep 17 00:00:00 2001 From: Panos Date: Tue, 21 Feb 2017 17:16:35 +0000 Subject: [PATCH 369/536] Updated chunked responses test for API change --- influxdb/tests/client_test.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index c5a87ae8..0ba04f4a 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -812,22 +812,23 @@ def test_chunked_response(self): "http://localhost:8086/query", text=example_response ) - response = list(self.cli.query('show series limit 4 offset 0', - chunked=True, chunk_size=4)) + response = self.cli.query('show series limit 4 offset 0', + chunked=True, chunk_size=4) self.assertTrue(len(response) == 4) - self.assertEqual(response[0].raw, ResultSet( - {"statement_id": 0, - "series": [{"name": "cpu", - "columns": ["fieldKey", "fieldType"], - "values": [["value", "integer"]]}], - "partial": True} - ).raw) - self.assertEqual(response[3].raw, ResultSet( - {"statement_id": 0, - "series": [{"name": "memory", - "columns": ["fieldKey", "fieldType"], - "values": [["value", "integer"]]}]} - ).raw) + self.assertEqual(response.__repr__(), ResultSet( + {'series': [{'values': [['value', 'integer']], + 'name': 'cpu', + 'columns': ['fieldKey', 'fieldType']}, + {'values': [['value', 'integer']], + 'name': 'iops', + 'columns': ['fieldKey', 'fieldType']}, + {'values': [['value', 'integer']], + 'name': 'load', + 'columns': ['fieldKey', 'fieldType']}, + {'values': [['value', 'integer']], + 'name': 'memory', + 'columns': ['fieldKey', 'fieldType']}]} + ).__repr__()) class FakeClient(InfluxDBClient): From 3da299af900167d508534a30716b60788df921b4 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 8 Mar 2017 00:25:42 +0000 Subject: [PATCH 370/536] Adding/updating docstring parameters --- influxdb/client.py | 15 +++++++++++---- influxdb/tests/helper_test.py | 4 ++-- influxdb/tests/influxdb08/helper_test.py | 8 ++++---- influxdb/tests/server_tests/base.py | 8 ++++---- 4 files changed, 21 insertions(+), 14 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 0698c871..7124fe6a 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -205,6 +205,8 @@ def request(self, url, method='GET', params=None, data=None, :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int + :param headers: headers to add to the request + :type headers: dict :returns: the response from the request :rtype: :class:`requests.Response` :raises InfluxDBServerError: if the response code is any server error @@ -322,6 +324,11 @@ def query(self, :param params: additional parameters for the request, defaults to {} :type params: dict + :param epoch: response timestamps to be in epoch format either 'h', + 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is + RFC3339 UTC format with nanosecond precision + :type epoch: str + :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int @@ -396,7 +403,7 @@ def write_points(self, :param points: the list of points to be written in the database :type points: list of dictionaries, each dictionary represents a point - :type data: (if protocol is 'json') list of dicts, where each dict + :type points: (if protocol is 'json') list of dicts, where each dict represents a point. (if protocol is 'line') sequence of line protocol strings. :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None @@ -575,7 +582,7 @@ def alter_retention_policy(self, name, database=None, :type duration: str :param replication: the new replication of the existing retention policy - :type replication: str + :type replication: int :param default: whether or not to set the modified policy as default :type default: bool @@ -704,9 +711,9 @@ def delete_series(self, database=None, measurement=None, tags=None): deleted, defaults to client's current database :type database: str :param measurement: Delete all series from a measurement - :type id: str + :type measurement: str :param tags: Delete all series that match given tags - :type id: dict + :type tags: dict """ database = database or self._database query_str = 'DROP SERIES' diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 10546286..44392f80 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -261,9 +261,9 @@ class Meta: self.assertEqual(point2['time'], yesterday) def testInvalidHelpers(self): - ''' + """ Tests errors in invalid helpers. - ''' + """ class MissingMeta(SeriesHelper): pass diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py index e744d1e4..c9ce311f 100644 --- a/influxdb/tests/influxdb08/helper_test.py +++ b/influxdb/tests/influxdb08/helper_test.py @@ -83,9 +83,9 @@ def testSingleSeriesName(self): 'Resetting helper did not empty datapoints.') def testSeveralSeriesNames(self): - ''' + """ Tests JSON conversion when there is only one series name. - ''' + """ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158) TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157) @@ -116,9 +116,9 @@ def testSeveralSeriesNames(self): 'Resetting helper did not empty datapoints.') def testInvalidHelpers(self): - ''' + """ Tests errors in invalid helpers. - ''' + """ class MissingMeta(SeriesHelper): pass diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py index 3566d7ba..f217fce1 100644 --- a/influxdb/tests/server_tests/base.py +++ b/influxdb/tests/server_tests/base.py @@ -41,9 +41,9 @@ def _teardown_influxdb_server(inst): class SingleTestCaseWithServerMixin(object): - ''' A mixin for unittest.TestCase to start an influxdb server instance + """ A mixin for unittest.TestCase to start an influxdb server instance in a temporary directory **for each test function/case** - ''' + """ # 'influxdb_template_conf' attribute must be set # on the TestCase class or instance. @@ -53,10 +53,10 @@ class SingleTestCaseWithServerMixin(object): class ManyTestCasesWithServerMixin(object): - ''' Same than SingleTestCaseWithServerMixin + """ Same than SingleTestCaseWithServerMixin but creates a single instance for the whole class. Also pre-creates a fresh database: 'db'. - ''' + """ # 'influxdb_template_conf' attribute must be set on the class itself ! From 1a7e0fa54c54540fbd0e55108f82074a9a8b6b73 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 8 Mar 2017 00:33:53 +0000 Subject: [PATCH 371/536] Removed mutable default arguments --- influxdb/_dataframe_client.py | 4 ++-- influxdb/tests/client_test.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index ef1fa78f..f8fcc949 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -227,7 +227,7 @@ def _convert_dataframe_to_lines(self, measurement, field_columns=None, tag_columns=None, - global_tags={}, + global_tags=None, time_precision=None, numeric_precision=None): @@ -366,7 +366,7 @@ def _stringify_dataframe(self, if datatype == 'field': # If dealing with fields, format ints and strings correctly - dataframe[int_columns] = dataframe[int_columns] + 'i' + dataframe[int_columns] += 'i' dataframe[string_columns] = '"' + dataframe[string_columns] + '"' elif datatype == 'tag': dataframe = dataframe.apply(_escape_pandas_series) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 0ba04f4a..e92aabdc 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -838,7 +838,7 @@ def __init__(self, *args, **kwargs): def query(self, query, - params={}, + params=None, expected_response_code=200, database=None): if query == 'Fail': From 268f2a78f8e3ec1f4fb7a1e57599b6812a148d7e Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 8 Mar 2017 01:15:26 +0000 Subject: [PATCH 372/536] Simplified comparison in client --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 7124fe6a..f72d6e7e 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -247,7 +247,7 @@ def request(self, url, method='GET', params=None, data=None, else: raise e - if response.status_code >= 500 and response.status_code < 600: + if 500 <= response.status_code < 600: raise InfluxDBServerError(response.content) elif response.status_code == expected_response_code: return response From da39921f94a90e330c796e0ad9ff861e9fc5a8af Mon Sep 17 00:00:00 2001 From: Jack Zampolin Date: Tue, 21 Mar 2017 12:05:48 -0700 Subject: [PATCH 373/536] Add correct protocol to the Pandas client @nicolajkirchhof Is this what you were talking about? --- examples/tutorial_pandas.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index c9a09fde..91a5634e 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -8,6 +8,7 @@ def main(host='localhost', port=8086): user = 'root' password = 'root' dbname = 'example' + protocol = 'json' client = DataFrameClient(host, port, user, password, dbname) From b322ca275b4da3e316ad64f8f91560e1584ea7bb Mon Sep 17 00:00:00 2001 From: Isil Demir Date: Tue, 21 Mar 2017 17:58:06 -0700 Subject: [PATCH 374/536] fix for lost precision on float field values --- influxdb/line_protocol.py | 10 ++++++++++ influxdb/tests/test_line_protocol.py | 16 ++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 7c8c8f24..180aea31 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -75,12 +75,22 @@ def quote_literal(value): ) +def _is_float(value): + try: + float(value) + except ValueError: + return False + return True + + def _escape_value(value): value = _get_unicode(value) if isinstance(value, text_type) and value != '': return quote_ident(value) elif isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' + elif _is_float(value): + return repr(value) else: return str(value) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 726f8705..fc45971e 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -119,3 +119,19 @@ def test_quote_literal(self): line_protocol.quote_literal(r"""\foo ' bar " Örf"""), r"""'\\foo \' bar " Örf'""" ) + + def test_float_with_long_decimal_fraction(self): + data = { + "points": [ + { + "measurement": "test", + "fields": { + "float_val": 1.0000000000000009, + } + } + ] + } + self.assertEqual( + line_protocol.make_lines(data), + 'test float_val=1.0000000000000009\n' + ) From 889ef09125a5f1b2dc39e6e42f5532c98d44a746 Mon Sep 17 00:00:00 2001 From: nicolajkirchhof Date: Wed, 22 Mar 2017 19:17:37 +0100 Subject: [PATCH 375/536] Corrected example Tested against influxdb-python version 4.0.0 --- examples/tutorial_pandas.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 91a5634e..86dc48f7 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -5,9 +5,12 @@ def main(host='localhost', port=8086): + host='localhost' + port=8086 user = 'root' password = 'root' - dbname = 'example' + dbname = 'demo' + # Temporarily used to avoid line protocol time conversion issues #412, #426, #431. protocol = 'json' client = DataFrameClient(host, port, user, password, dbname) @@ -21,16 +24,16 @@ def main(host='localhost', port=8086): client.create_database(dbname) print("Write DataFrame") - client.write_points(df, 'demo') + client.write_points(df, 'demo', protocol=protocol) print("Write DataFrame with Tags") - client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'}) + client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'}, protocol=protocol) print("Read DataFrame") client.query("select * from demo") print("Delete database: " + dbname) - client.delete_database(dbname) + client.drop_database(dbname) def parse_args(): From 449d02eb1b5e2a532d3c323f09195242cf44f2b8 Mon Sep 17 00:00:00 2001 From: nicolajkirchhof Date: Wed, 22 Mar 2017 19:23:58 +0100 Subject: [PATCH 376/536] Removed obsolete code --- examples/tutorial_pandas.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 86dc48f7..855f5740 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -5,8 +5,6 @@ def main(host='localhost', port=8086): - host='localhost' - port=8086 user = 'root' password = 'root' dbname = 'demo' From 99a5a65cc82f908a84a9b8780a40e449259e4c06 Mon Sep 17 00:00:00 2001 From: Ron Rothman Date: Tue, 28 Mar 2017 12:26:31 -0400 Subject: [PATCH 377/536] reduced the number of objects allocated in make_lines. no change in functionality; only efficiency. --- influxdb/line_protocol.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 7c8c8f24..2042add4 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -5,13 +5,12 @@ from __future__ import print_function from __future__ import unicode_literals -from copy import copy from datetime import datetime from numbers import Integral from pytz import UTC from dateutil.parser import parse -from six import binary_type, text_type, integer_types, PY2 +from six import iteritems, binary_type, text_type, integer_types, PY2 EPOCH = UTC.localize(datetime.utcfromtimestamp(0)) @@ -108,7 +107,7 @@ def make_lines(data, precision=None): matching the line protocol introduced in InfluxDB 0.9.0. """ lines = [] - static_tags = data.get('tags', None) + static_tags = data.get('tags') for point in data['points']: elements = [] @@ -119,32 +118,29 @@ def make_lines(data, precision=None): key_values = [measurement] # add tags - if static_tags is None: - tags = point.get('tags', {}) + if static_tags: + tags = dict(static_tags) # make a copy, since we'll modify + tags.update(point.get('tags') or {}) else: - tags = copy(static_tags) - tags.update(point.get('tags', {})) + tags = point.get('tags') or {} # tags should be sorted client-side to take load off server - for tag_key in sorted(tags.keys()): + for tag_key, tag_value in sorted(iteritems(tags)): key = _escape_tag(tag_key) - value = _escape_tag(tags[tag_key]) + value = _escape_tag(tag_value) if key != '' and value != '': - key_values.append("{key}={value}".format(key=key, value=value)) + key_values.append(key + "=" + value) key_values = ','.join(key_values) elements.append(key_values) # add fields field_values = [] - for field_key in sorted(point['fields'].keys()): + for field_key, field_value in sorted(iteritems(point['fields'])): key = _escape_tag(field_key) - value = _escape_value(point['fields'][field_key]) + value = _escape_value(field_value) if key != '' and value != '': - field_values.append("{key}={value}".format( - key=key, - value=value - )) + field_values.append(key + "=" + value) field_values = ','.join(field_values) elements.append(field_values) From eb049f197e76575e53ff883ecd2eff05c519da00 Mon Sep 17 00:00:00 2001 From: Isil Demir Date: Wed, 5 Apr 2017 17:14:46 -0700 Subject: [PATCH 378/536] docstring for pep257 compliance --- influxdb/tests/test_line_protocol.py | 1 + 1 file changed, 1 insertion(+) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index fc45971e..61f54557 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -121,6 +121,7 @@ def test_quote_literal(self): ) def test_float_with_long_decimal_fraction(self): + """Ensure precision is preserved when casting floats into strings.""" data = { "points": [ { From 194ef13cf802854eab4c6d2accaa6c81c45239d1 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sun, 9 Apr 2017 21:51:53 -0400 Subject: [PATCH 379/536] Allow user-configurable number of retries when submitting request to database --- influxdb/client.py | 19 ++++++++++++------- influxdb/influxdb08/client.py | 21 +++++++++++++-------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index f72d6e7e..05245e7b 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -53,6 +53,9 @@ class InfluxDBClient(object): :param timeout: number of seconds Requests will wait for your client to establish a connection, defaults to None :type timeout: int + :param retries: number of retries your client will try before aborting, + defaults to 3. 0 indicates try until success + :type retries: int :param use_udp: use UDP to connect to InfluxDB, defaults to False :type use_udp: bool :param udp_port: UDP port to connect to InfluxDB, defaults to 4444 @@ -70,6 +73,7 @@ def __init__(self, ssl=False, verify_ssl=False, timeout=None, + retries=3, use_udp=False, udp_port=4444, proxies=None, @@ -81,6 +85,7 @@ def __init__(self, self._password = password self._database = database self._timeout = timeout + self._retries = retries self._verify_ssl = verify_ssl @@ -225,9 +230,10 @@ def request(self, url, method='GET', params=None, data=None, if isinstance(data, (dict, list)): data = json.dumps(data) - # Try to send the request a maximum of three times. (see #103) - # TODO (aviau): Make this configurable. - for i in range(0, 3): + # Try to send the request more than once by default (see #103) + retry = True + _try = 0 + while retry: try: response = self._session.request( method=method, @@ -242,10 +248,9 @@ def request(self, url, method='GET', params=None, data=None, ) break except requests.exceptions.ConnectionError as e: - if i < 2: - continue - else: - raise e + _try += 1 + if self._retries != 0: + retry = _try < self._retries if 500 <= response.status_code < 600: raise InfluxDBServerError(response.content) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index a3b31639..7797884b 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -55,6 +55,9 @@ class InfluxDBClient(object): :param verify_ssl: verify SSL certificates for HTTPS requests, defaults is False :type verify_ssl: boolean + :param retries: number of retries your client will try before aborting, + defaults to 3. 0 indicates try until success + :type retries: int :param timeout: number of seconds Requests will wait for your client to establish a connection, defaults to None :type timeout: int @@ -73,6 +76,7 @@ def __init__(self, ssl=False, verify_ssl=False, timeout=None, + retries=3, use_udp=False, udp_port=4444): """ @@ -84,6 +88,7 @@ def __init__(self, self._password = password self._database = database self._timeout = timeout + self._retries = retries self._verify_ssl = verify_ssl @@ -228,10 +233,11 @@ def request(self, url, method='GET', params=None, data=None, if data is not None and not isinstance(data, str): data = json.dumps(data) - # Try to send the request a maximum of three times. (see #103) - # TODO (aviau): Make this configurable. - for i in range(0, 3): - try: + retry = True + _try = 0 + # Try to send the request more than once by default (see #103) + while retry: + try: response = session.request( method=method, url=url, @@ -244,10 +250,9 @@ def request(self, url, method='GET', params=None, data=None, break except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: - if i < 2: - continue - else: - raise e + _try += 1 + if self._retries != 0: + retry = _try < self._retries if response.status_code == expected_response_code: return response From 93b91e461114f87744c4b0998b30736998ff2b39 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Tue, 11 Apr 2017 23:39:10 -0400 Subject: [PATCH 380/536] fixing indentation, adding tests to confirm connection behavior --- influxdb/client.py | 3 ++ influxdb/influxdb08/client.py | 6 ++-- influxdb/tests/client_test.py | 54 +++++++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 05245e7b..6c9ed1b9 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -252,6 +252,9 @@ def request(self, url, method='GET', params=None, data=None, if self._retries != 0: retry = _try < self._retries + else: + raise requests.exceptions.ConnectionError + if 500 <= response.status_code < 600: raise InfluxDBServerError(response.content) elif response.status_code == expected_response_code: diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 7797884b..00cf4fe5 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -248,11 +248,13 @@ def request(self, url, method='GET', params=None, data=None, timeout=self._timeout ) break - except (requests.exceptions.ConnectionError, - requests.exceptions.Timeout) as e: + except (requests.exceptions.ConnectionError, + requests.exceptions.Timeout) as e: _try += 1 if self._retries != 0: retry = _try < self._retries + else: + raise requests.exceptions.ConnectionError if response.status_code == expected_response_code: return response diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e92aabdc..f998c4e9 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -645,6 +645,60 @@ def connection_error(self, *args, **kwargs): with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points) + @mock.patch('requests.Session.request') + def test_random_request_retry(self, mock_request): + """Tests that a random number of connection errors will be handled""" + + class CustomMock(object): + def __init__(self, retries): + self.i = 0 + self.retries = retries + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < self.retries: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 204 + return r + + retries = random.randint(1, 100) + mock_request.side_effect = CustomMock(retries).connection_error + + cli = InfluxDBClient(database='db', retries=retries) + cli.write_points( + self.dummy_points + ) + + @mock.patch('requests.Session.request') + def test_random_request_retry_raises(self, mock_request): + """Tests that a random number of connection errors plus one will be not handled""" + + class CustomMock(object): + def __init__(self, retries): + self.i = 0 + self.retries = retries + + def connection_error(self, *args, **kwargs): + self.i += 1 + + if self.i < self.retries + 1: + raise requests.exceptions.ConnectionError + else: + r = requests.Response() + r.status_code = 200 + return r + + retries = random.randint(1, 100) + mock_request.side_effect = CustomMock(retries).connection_error + + cli = InfluxDBClient(database='db', retries=retries) + + with self.assertRaises(requests.exceptions.ConnectionError): + cli.write_points(self.dummy_points) + def test_get_list_users(self): example_response = ( '{"results":[{"series":[{"columns":["user","admin"],' From 74ce6444182f4e588d77faf92a4696900458141a Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Tue, 11 Apr 2017 23:45:19 -0400 Subject: [PATCH 381/536] fixing flake8 failures --- influxdb/influxdb08/client.py | 6 +++--- influxdb/tests/client_test.py | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 00cf4fe5..8955ab76 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -237,7 +237,7 @@ def request(self, url, method='GET', params=None, data=None, _try = 0 # Try to send the request more than once by default (see #103) while retry: - try: + try: response = session.request( method=method, url=url, @@ -248,8 +248,8 @@ def request(self, url, method='GET', params=None, data=None, timeout=self._timeout ) break - except (requests.exceptions.ConnectionError, - requests.exceptions.Timeout) as e: + except (requests.exceptions.ConnectionError, + requests.exceptions.Timeout) as e: _try += 1 if self._retries != 0: retry = _try < self._retries diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index f998c4e9..e319c0e5 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -674,7 +674,8 @@ def connection_error(self, *args, **kwargs): @mock.patch('requests.Session.request') def test_random_request_retry_raises(self, mock_request): - """Tests that a random number of connection errors plus one will be not handled""" + """Tests that a random number of connection errors plus one \ + will not be handled""" class CustomMock(object): def __init__(self, retries): From 262e1cf6ac40c087089f7d794e60eb8f23e9abe8 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 12 Apr 2017 03:09:15 -0400 Subject: [PATCH 382/536] 4.1.0 tag --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 00739c49..90c049c9 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -17,4 +17,4 @@ ] -__version__ = '4.0.0' +__version__ = '4.1.0' From 87e2dfee6c23203b08fe938d63dadfdbabdc34a2 Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 12 Apr 2017 03:21:11 -0400 Subject: [PATCH 383/536] tox.ini: bump Sphinx to 1.5.5 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 856a4717..15060e37 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,7 @@ commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb [testenv:docs] deps = -r{toxinidir}/requirements.txt pandas - Sphinx==1.2.3 + Sphinx==1.5.5 sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build From 9f4c7bfba857ed9e14e5a3a437cac0ae0a0b9d2c Mon Sep 17 00:00:00 2001 From: aviau Date: Wed, 12 Apr 2017 03:25:23 -0400 Subject: [PATCH 384/536] dev-requirements.txt: bump sphinx to 1.5.5 --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 78d40a24..7d75102e 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -2,7 +2,7 @@ requests nose mock pandas -Sphinx==1.2.3 +Sphinx==1.5.5 sphinx_rtd_theme wheel twine From bce3c5aea3da0b755e863331a4a803614e10c8fc Mon Sep 17 00:00:00 2001 From: Gustavo Bezerra Date: Wed, 26 Apr 2017 12:08:23 +0900 Subject: [PATCH 385/536] Add chunked query support for DataFrameClient --- influxdb/_dataframe_client.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 7683aeef..a64ebb32 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -8,6 +8,7 @@ from __future__ import unicode_literals import math +from collections import defaultdict import pandas as pd @@ -131,25 +132,26 @@ def write_points(self, protocol=protocol) return True - def query(self, query, chunked=False, database=None): + def query(self, query, dropna=True, **kwargs): """ Quering data into a DataFrame. - :param chunked: [Optional, default=False] True if the data shall be - retrieved in chunks, False otherwise. + :param query: the actual query string + :param dropna: drop columns where all values are missing + :param **kwargs: additional parameters for ``InfluxDBClient.query`` """ - results = super(DataFrameClient, self).query(query, database=database) + results = super(DataFrameClient, self).query(query, **kwargs) if query.strip().upper().startswith("SELECT"): if len(results) > 0: - return self._to_dataframe(results) + return self._to_dataframe(results, dropna) else: return {} else: return results - def _to_dataframe(self, rs): - result = {} + def _to_dataframe(self, rs, dropna=True): + result = defaultdict(list) if isinstance(rs, list): return map(self._to_dataframe, rs) for key, data in rs.items(): @@ -163,6 +165,11 @@ def _to_dataframe(self, rs): df.set_index('time', inplace=True) df.index = df.index.tz_localize('UTC') df.index.name = None + result[key].append(df) + for key, data in result.items(): + df = pd.concat(data).sort_index() + if dropna: + df.dropna(how='all', axis=1, inplace=True) result[key] = df return result From fddea93e283cc3cbdbcbe37053ed70018dc6c122 Mon Sep 17 00:00:00 2001 From: Andrew Long Date: Mon, 15 May 2017 10:40:27 -0400 Subject: [PATCH 386/536] Fixed typo Line 44, "Queying data: " to "Querying data: " --- examples/tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/tutorial.py b/examples/tutorial.py index e790ece5..92ae7e3b 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -41,7 +41,7 @@ def main(host='localhost', port=8086): print("Write points: {0}".format(json_body)) client.write_points(json_body) - print("Queying data: " + query) + print("Querying data: " + query) result = client.query(query) print("Result: {0}".format(result)) From a2ac2e8118c9220419eaf483685eeda491335612 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 1 Jun 2017 23:25:57 -0500 Subject: [PATCH 387/536] updating deps for py27, including pandas --- .travis.yml | 2 +- influxdb/_dataframe_client.py | 10 +++++----- influxdb/influxdb08/dataframe_client.py | 6 +++--- requirements.txt | 6 +++--- setup.py | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2259dc06..c55613cc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,7 +31,7 @@ install: - pip install tox - pip install coveralls - mkdir influxdb_install - - wget https://dl.influxdata.com/influxdb/releases/influxdb_1.1.0_amd64.deb + - wget https://dl.influxdata.com/influxdb/releases/influxdb_1.2.4_amd64.deb - dpkg -x influxdb*.deb influxdb_install script: - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/usr/bin/influxd diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 7683aeef..df187513 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -177,8 +177,8 @@ def _convert_dataframe_to_json(self, if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) - if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or - isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): + if not (isinstance(dataframe.index, pd.PeriodIndex) or + isinstance(dataframe.index, pd.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') @@ -234,8 +234,8 @@ def _convert_dataframe_to_lines(self, if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) - if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or - isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): + if not (isinstance(dataframe.index, pd.PeriodIndex) or + isinstance(dataframe.index, pd.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') @@ -279,7 +279,7 @@ def _convert_dataframe_to_lines(self, }.get(time_precision, 1) # Make array of timestamp ints - if isinstance(dataframe.index, pd.tseries.period.PeriodIndex): + if isinstance(dataframe.index, pd.PeriodIndex): time = ((dataframe.index.to_timestamp().values.astype(int) / precision_factor).astype(int).astype(str)) else: diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index ba302af8..59b033ab 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -128,12 +128,12 @@ def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) - if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or - isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): + if not (isinstance(dataframe.index, pd.PeriodIndex) or + isinstance(dataframe.index, pd.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') - if isinstance(dataframe.index, pd.tseries.period.PeriodIndex): + if isinstance(dataframe.index, pd.PeriodIndex): dataframe.index = dataframe.index.to_timestamp() else: dataframe.index = pd.to_datetime(dataframe.index) diff --git a/requirements.txt b/requirements.txt index 43c09bd1..db5f6f85 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -python-dateutil>=2.0.0 +python-dateutil>=2.6.0 pytz -requests>=1.0.3 -six>=1.9.0 +requests>=2.17.0 +six>=1.10.0 diff --git a/setup.py b/setup.py index fe09ce36..14b0d943 100755 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', ), From 3edc76379ab0e9475534c4f65861b000648617eb Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Sun, 4 Jun 2017 23:09:32 -0500 Subject: [PATCH 388/536] Test on py35 and py36 (#458) --- .travis.yml | 14 +++++++------- dev-requirements.txt | 4 ++-- setup.py | 2 ++ tox.ini | 6 +++--- 4 files changed, 14 insertions(+), 12 deletions(-) diff --git a/.travis.yml b/.travis.yml index c55613cc..e7765582 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,15 +16,15 @@ matrix: env: TOX_ENV=pypy - python: 3.4 env: TOX_ENV=py34 -# An issue in travis-ci prevents this case from running -# Link to issue: https://github.com/travis-ci/travis-ci/issues/6304 -# - python: pypy3.3-5.2-alpha1 -# env: TOX_ENV=pypy3 - - python: 3.4 + - python: 3.5 + env: TOX_ENV=py35 + - python: 3.6 + env: TOX_ENV=py36 + - python: 3.6 env: TOX_ENV=docs - - python: 3.4 + - python: 3.6 env: TOX_ENV=flake8 - - python: 3.4 + - python: 3.6 env: TOX_ENV=coverage install: diff --git a/dev-requirements.txt b/dev-requirements.txt index 7d75102e..bc7b4c87 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,7 +1,7 @@ -requests +requests>=2.17.0 nose mock -pandas +pandas==0.20.1 Sphinx==1.5.5 sphinx_rtd_theme wheel diff --git a/setup.py b/setup.py index 14b0d943..7e66a779 100755 --- a/setup.py +++ b/setup.py @@ -50,6 +50,8 @@ 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', ), diff --git a/tox.ini b/tox.ini index 15060e37..d9409903 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,12 @@ [tox] -envlist = py27, py34, pypy, pypy3, flake8, coverage, docs +envlist = py27, py34, py35, py36, pypy, pypy3, flake8, coverage, docs [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - py27,py34: pandas + py27,py34,py35,py36: pandas==0.20.1 # Only install pandas with non-pypy interpreters commands = nosetests -v --with-doctest {posargs} @@ -25,7 +25,7 @@ commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb [testenv:docs] deps = -r{toxinidir}/requirements.txt - pandas + pandas==0.20.1 Sphinx==1.5.5 sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build From f5c91411f862c2fefde56aca12eb527177f0492c Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 6 Jun 2017 16:31:47 -0400 Subject: [PATCH 389/536] 4.1.1 tag --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 90c049c9..f5a5f1cb 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -17,4 +17,4 @@ ] -__version__ = '4.1.0' +__version__ = '4.1.1' From 1a466cfab68d55c721555f481c21cdec5d4d1982 Mon Sep 17 00:00:00 2001 From: Danilo Bargen Date: Fri, 9 Jun 2017 11:41:12 +0200 Subject: [PATCH 390/536] Fix typo in docs: insuring -> ensuring --- influxdb/helper.py | 2 +- influxdb/influxdb08/helper.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index 0bae6f92..23d479ed 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -18,7 +18,7 @@ class SeriesHelper(object): """ Subclassing this helper eases writing data points in bulk. - All data points are immutable, insuring they do not get overwritten. + All data points are immutable, ensuring they do not get overwritten. Each subclass can write to its own database. The time series names can also be based on one or more defined fields. The field "time" can be specified when creating a point, and may be any of diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py index e5827fa1..53e9ca4a 100644 --- a/influxdb/influxdb08/helper.py +++ b/influxdb/influxdb08/helper.py @@ -17,7 +17,7 @@ class SeriesHelper(object): """ Subclassing this helper eases writing data points in bulk. - All data points are immutable, insuring they do not get overwritten. + All data points are immutable, ensuring they do not get overwritten. Each subclass can write to its own database. The time series names can also be based on one or more defined fields. From b1e29fba81dc429406d0dbc462999538f0fb4363 Mon Sep 17 00:00:00 2001 From: Samuel Colvin Date: Wed, 7 Jun 2017 11:44:39 +0100 Subject: [PATCH 391/536] update python version in readme --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index a88ce0f2..1ae75e35 100644 --- a/README.rst +++ b/README.rst @@ -43,9 +43,9 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ============ -The InfluxDB-Python distribution is supported and tested on Python 2.7, 3.3, 3.4, PyPy and PyPy3. +The InfluxDB-Python distribution is supported and tested on Python 2.7, 3.3, 3.4, 3.5, 3.6, PyPy and PyPy3. -**Note:** Python 3.2 is currently untested. See ``.travis.yml``. +**Note:** Python 3.2 is currently untested. See ``.travis.yml``. Main dependency is: From 5f645d3d5c5b7f242215a74e9f671c699d8b8a9d Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 22 Jun 2017 08:07:38 -0500 Subject: [PATCH 392/536] initial commit pep257 compliance Signed-off-by: Sebastian Borza --- docs/source/conf.py | 2 + examples/tutorial.py | 8 +- examples/tutorial_pandas.py | 10 +- examples/tutorial_serieshelper.py | 21 +- examples/tutorial_server_data.py | 33 +-- examples/tutorial_sine_wave.py | 21 +- influxdb/__init__.py | 1 + influxdb/_dataframe_client.py | 141 ++++++------ influxdb/chunked_json.py | 7 +- influxdb/client.py | 97 ++++---- influxdb/dataframe_client.py | 9 +- influxdb/exceptions.py | 7 + influxdb/helper.py | 29 ++- influxdb/influxdb08/__init__.py | 1 + influxdb/influxdb08/chunked_json.py | 7 +- influxdb/influxdb08/client.py | 214 +++++++----------- influxdb/influxdb08/dataframe_client.py | 55 +++-- influxdb/influxdb08/helper.py | 28 ++- influxdb/line_protocol.py | 62 +++-- influxdb/resultset.py | 46 ++-- influxdb/tests/__init__.py | 1 + influxdb/tests/chunked_json_test.py | 12 +- influxdb/tests/client_test.py | 132 +++++++---- influxdb/tests/dataframe_client_test.py | 39 +++- influxdb/tests/helper_test.py | 59 +++-- influxdb/tests/influxdb08/__init__.py | 1 + influxdb/tests/influxdb08/client_test.py | 108 +++++++-- .../tests/influxdb08/dataframe_client_test.py | 34 ++- influxdb/tests/influxdb08/helper_test.py | 50 ++-- influxdb/tests/misc.py | 6 +- influxdb/tests/resultset_test.py | 13 ++ influxdb/tests/server_tests/__init__.py | 1 + influxdb/tests/server_tests/base.py | 17 +- .../server_tests/client_test_with_server.py | 70 ++++-- .../tests/server_tests/influxdb_instance.py | 20 +- influxdb/tests/test_line_protocol.py | 8 + setup.py | 1 + 37 files changed, 823 insertions(+), 548 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f55684d5..231c776c 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # +"""InfluxDB documentation build configuration file.""" + # InfluxDB documentation build configuration file, created by # sphinx-quickstart on Thu Oct 16 00:33:06 2014. # diff --git a/examples/tutorial.py b/examples/tutorial.py index 92ae7e3b..4083bfc5 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -1,9 +1,13 @@ +# -*- coding: utf-8 -*- +"""Tutorial on using the InfluxDB client.""" + import argparse from influxdb import InfluxDBClient def main(host='localhost', port=8086): + """Instantiate a connection to the InfluxDB.""" user = 'root' password = 'root' dbname = 'example' @@ -54,9 +58,11 @@ def main(host='localhost', port=8086): def parse_args(): + """Parse the args.""" parser = argparse.ArgumentParser( description='example code to play with InfluxDB') - parser.add_argument('--host', type=str, required=False, default='localhost', + parser.add_argument('--host', type=str, required=False, + default='localhost', help='hostname of InfluxDB http API') parser.add_argument('--port', type=int, required=False, default=8086, help='port of InfluxDB http API') diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 855f5740..9cec910d 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -1,3 +1,6 @@ +# -*- coding: utf-8 -*- +"""Tutorial for using pandas and the InfluxDB client.""" + import argparse import pandas as pd @@ -5,10 +8,11 @@ def main(host='localhost', port=8086): + """Instantiate the connection to the InfluxDB client.""" user = 'root' password = 'root' dbname = 'demo' - # Temporarily used to avoid line protocol time conversion issues #412, #426, #431. + # Temporarily avoid line protocol time conversion issues #412, #426, #431. protocol = 'json' client = DataFrameClient(host, port, user, password, dbname) @@ -25,7 +29,8 @@ def main(host='localhost', port=8086): client.write_points(df, 'demo', protocol=protocol) print("Write DataFrame with Tags") - client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'}, protocol=protocol) + client.write_points(df, 'demo', + {'k1': 'v1', 'k2': 'v2'}, protocol=protocol) print("Read DataFrame") client.query("select * from demo") @@ -35,6 +40,7 @@ def main(host='localhost', port=8086): def parse_args(): + """Parse the args from main.""" parser = argparse.ArgumentParser( description='example code to play with InfluxDB') parser.add_argument('--host', type=str, required=False, diff --git a/examples/tutorial_serieshelper.py b/examples/tutorial_serieshelper.py index d7bd27c9..13929df2 100644 --- a/examples/tutorial_serieshelper.py +++ b/examples/tutorial_serieshelper.py @@ -1,6 +1,5 @@ -""" -Tutorial/Example how to use the class helper `SeriesHelper` -""" +# -*- coding: utf-8 -*- +"""Tutorial how to use the class helper `SeriesHelper`.""" from influxdb import InfluxDBClient from influxdb import SeriesHelper @@ -20,18 +19,28 @@ class MySeriesHelper(SeriesHelper): - # Meta class stores time series helper configuration. + """Instantiate SeriesHelper to write points to the backend.""" + class Meta: + """Meta class stores time series helper configuration.""" + # The client should be an instance of InfluxDBClient. client = myclient - # The series name must be a string. Add dependent fields/tags in curly brackets. + + # The series name must be a string. Add dependent fields/tags + # in curly brackets. series_name = 'events.stats.{server_name}' + # Defines all the fields in this time series. fields = ['some_stat', 'other_stat'] + # Defines all the tags for the series. tags = ['server_name'] - # Defines the number of data points to store prior to writing on the wire. + + # Defines the number of data points to store prior to writing + # on the wire. bulk_size = 5 + # autocommit must be set to True when using bulk_size autocommit = True diff --git a/examples/tutorial_server_data.py b/examples/tutorial_server_data.py index f9ed3f5a..cb903fad 100644 --- a/examples/tutorial_server_data.py +++ b/examples/tutorial_server_data.py @@ -1,13 +1,15 @@ +# -*- coding: utf-8 -*- +"""Tutorial on using the server functions.""" from __future__ import print_function import argparse -from influxdb import InfluxDBClient -from influxdb.client import InfluxDBClientError import datetime import random import time +from influxdb import InfluxDBClient +from influxdb.client import InfluxDBClientError USER = 'root' PASSWORD = 'root' @@ -15,7 +17,7 @@ def main(host='localhost', port=8086, nb_day=15): - + """Instantiate a connection to the backend.""" nb_day = 15 # number of day to generate time series timeinterval_min = 5 # create an event every x minutes total_minutes = 1440 * nb_day @@ -30,15 +32,15 @@ def main(host='localhost', port=8086, nb_day=15): hostName = "server-%d" % random.randint(1, 5) # pointValues = [int(past_date.strftime('%s')), value, hostName] pointValues = { - "time": int(past_date.strftime('%s')), - "measurement": metric, - 'fields': { - 'value': value, - }, - 'tags': { - "hostName": hostName, - }, - } + "time": int(past_date.strftime('%s')), + "measurement": metric, + "fields": { + "value": value, + }, + "tags": { + "hostName": hostName, + }, + } series.append(pointValues) print(series) @@ -62,7 +64,8 @@ def main(host='localhost', port=8086, nb_day=15): time.sleep(2) - query = "SELECT MEAN(value) FROM {} WHERE time > now() - 10d GROUP BY time(500m)".format(metric) + query = "SELECT MEAN(value) FROM {} WHERE \ + time > now() - 10d GROUP BY time(500m)".format(metric) result = client.query(query, database=DBNAME) print(result) print("Result: {0}".format(result)) @@ -72,9 +75,11 @@ def main(host='localhost', port=8086, nb_day=15): def parse_args(): + """Parse the args.""" parser = argparse.ArgumentParser( description='example code to play with InfluxDB') - parser.add_argument('--host', type=str, required=False, default='localhost', + parser.add_argument('--host', type=str, required=False, + default='localhost', help='hostname influxdb http API') parser.add_argument('--port', type=int, required=False, default=8086, help='port influxdb http API') diff --git a/examples/tutorial_sine_wave.py b/examples/tutorial_sine_wave.py index 414dd10d..99b3d388 100644 --- a/examples/tutorial_sine_wave.py +++ b/examples/tutorial_sine_wave.py @@ -1,10 +1,13 @@ +# -*- coding: utf-8 -*- +"""Tutorial using all elements to define a sine wave.""" + import argparse -from influxdb import InfluxDBClient import math import datetime import time +from influxdb import InfluxDBClient USER = 'root' PASSWORD = 'root' @@ -12,9 +15,7 @@ def main(host='localhost', port=8086): - """ - main function to generate the sin wave - """ + """Define function to generate the sin wave.""" now = datetime.datetime.today() points = [] @@ -36,7 +37,7 @@ def main(host='localhost', port=8086): client.create_database(DBNAME) client.switch_database(DBNAME) - #Write points + # Write points client.write_points(points) time.sleep(3) @@ -47,9 +48,9 @@ def main(host='localhost', port=8086): print("Result: {0}".format(result)) """ - You might want to comment the delete and plot the result on InfluxDB Interface - Connect on InfluxDB Interface at http://127.0.0.1:8083/ - Select the database tutorial -> Explore Data + You might want to comment the delete and plot the result on InfluxDB + Interface. Connect on InfluxDB Interface at http://127.0.0.1:8083/ + Select the database tutorial -> Explore Data Then run the following query: @@ -61,9 +62,11 @@ def main(host='localhost', port=8086): def parse_args(): + """Parse the args.""" parser = argparse.ArgumentParser( description='example code to play with InfluxDB') - parser.add_argument('--host', type=str, required=False, default='localhost', + parser.add_argument('--host', type=str, required=False, + default='localhost', help='hostname influxdb http API') parser.add_argument('--port', type=int, required=False, default=8086, help='port influxdb http API') diff --git a/influxdb/__init__.py b/influxdb/__init__.py index f5a5f1cb..6442e26b 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Initialize the influxdb package.""" from __future__ import absolute_import from __future__ import division diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index df187513..ac110a4a 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -""" -DataFrame client for InfluxDB -""" +"""DataFrame client for InfluxDB.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -32,7 +31,8 @@ def _escape_pandas_series(s): class DataFrameClient(InfluxDBClient): - """ + """DataFrameClient instantiates InfluxDBClient to connect to the backend. + The ``DataFrameClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. The client reads and writes from pandas DataFrames. @@ -52,8 +52,7 @@ def write_points(self, batch_size=None, protocol='line', numeric_precision=None): - """ - Write to multiple time series names. + """Write to multiple time series names. :param dataframe: data points in a DataFrame :param measurement: name of measurement @@ -70,17 +69,20 @@ def write_points(self, precision. 'full' preserves full precision for int and float datatypes. Defaults to None, which preserves 14-15 significant figures for float and all significant figures for int datatypes. - """ if tag_columns is None: tag_columns = [] + if field_columns is None: field_columns = [] + if batch_size: number_batches = int(math.ceil(len(dataframe) / float(batch_size))) + for batch in range(number_batches): start_index = batch * batch_size end_index = (batch + 1) * batch_size + if protocol == 'line': points = self._convert_dataframe_to_lines( dataframe.ix[start_index:end_index].copy(), @@ -98,46 +100,48 @@ def write_points(self, time_precision=time_precision, tag_columns=tag_columns, field_columns=field_columns) + super(DataFrameClient, self).write_points( points, time_precision, database, retention_policy, protocol=protocol) + return True + + if protocol == 'line': + points = self._convert_dataframe_to_lines( + dataframe, + measurement=measurement, + global_tags=tags, + tag_columns=tag_columns, + field_columns=field_columns, + time_precision=time_precision, + numeric_precision=numeric_precision) else: - if protocol == 'line': - points = self._convert_dataframe_to_lines( - dataframe, - measurement=measurement, - global_tags=tags, - tag_columns=tag_columns, - field_columns=field_columns, - time_precision=time_precision, - numeric_precision=numeric_precision) - else: - points = self._convert_dataframe_to_json( - dataframe, - measurement=measurement, - tags=tags, - time_precision=time_precision, - tag_columns=tag_columns, - field_columns=field_columns) - super(DataFrameClient, self).write_points( - points, - time_precision, - database, - retention_policy, - protocol=protocol) - return True + points = self._convert_dataframe_to_json( + dataframe, + measurement=measurement, + tags=tags, + time_precision=time_precision, + tag_columns=tag_columns, + field_columns=field_columns) + + super(DataFrameClient, self).write_points( + points, + time_precision, + database, + retention_policy, + protocol=protocol) + + return True def query(self, query, chunked=False, database=None): - """ - Quering data into a DataFrame. + """Quering data into a DataFrame. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. - """ results = super(DataFrameClient, self).query(query, database=database) if query.strip().upper().startswith("SELECT"): @@ -152,6 +156,7 @@ def _to_dataframe(self, rs): result = {} if isinstance(rs, list): return map(self._to_dataframe, rs) + for key, data in rs.items(): name, tags = key if tags is None: @@ -164,10 +169,11 @@ def _to_dataframe(self, rs): df.index = df.index.tz_localize('UTC') df.index.name = None result[key] = df + return result - def _convert_dataframe_to_json(self, - dataframe, + @staticmethod + def _convert_dataframe_to_json(dataframe, measurement, tags=None, tag_columns=None, @@ -244,8 +250,10 @@ def _convert_dataframe_to_lines(self, if field_columns is None: field_columns = [] + if tag_columns is None: tag_columns = [] + if global_tags is None: global_tags = {} @@ -312,8 +320,9 @@ def _convert_dataframe_to_lines(self, # Make an array of formatted field keys and values field_df = dataframe[field_columns] - field_df = self._stringify_dataframe( - field_df, numeric_precision, datatype='field') + field_df = self._stringify_dataframe(field_df, + numeric_precision, + datatype='field') field_df = (field_df.columns.values + '=').tolist() + field_df field_df[field_df.columns[1:]] = ',' + field_df[field_df.columns[1:]] fields = field_df.sum(axis=1) @@ -323,56 +332,52 @@ def _convert_dataframe_to_lines(self, points = (measurement + tags + ' ' + fields + ' ' + time).tolist() return points - def _stringify_dataframe(self, - dataframe, - numeric_precision, - datatype='field'): - + @staticmethod + def _stringify_dataframe(dframe, numeric_precision, datatype='field'): # Find int and string columns for field-type data - int_columns = dataframe.select_dtypes(include=['integer']).columns - string_columns = dataframe.select_dtypes(include=['object']).columns + int_columns = dframe.select_dtypes(include=['integer']).columns + string_columns = dframe.select_dtypes(include=['object']).columns - # Convert dataframe to string + # Convert dframe to string if numeric_precision is None: # If no precision specified, convert directly to string (fast) - dataframe = dataframe.astype(str) + dframe = dframe.astype(str) elif numeric_precision == 'full': # If full precision, use repr to get full float precision - float_columns = (dataframe.select_dtypes(include=['floating']) - .columns) - nonfloat_columns = dataframe.columns[~dataframe.columns.isin( + float_columns = (dframe.select_dtypes( + include=['floating']).columns) + nonfloat_columns = dframe.columns[~dframe.columns.isin( float_columns)] - dataframe[float_columns] = dataframe[float_columns].applymap(repr) - dataframe[nonfloat_columns] = (dataframe[nonfloat_columns] - .astype(str)) + dframe[float_columns] = dframe[float_columns].applymap(repr) + dframe[nonfloat_columns] = (dframe[nonfloat_columns].astype(str)) elif isinstance(numeric_precision, int): # If precision is specified, round to appropriate precision - float_columns = (dataframe.select_dtypes(include=['floating']) - .columns) - nonfloat_columns = dataframe.columns[~dataframe.columns.isin( + float_columns = (dframe.select_dtypes( + include=['floating']).columns) + nonfloat_columns = dframe.columns[~dframe.columns.isin( float_columns)] - dataframe[float_columns] = (dataframe[float_columns] - .round(numeric_precision)) + dframe[float_columns] = (dframe[float_columns].round( + numeric_precision)) + # If desired precision is > 10 decimal places, need to use repr if numeric_precision > 10: - dataframe[float_columns] = (dataframe[float_columns] - .applymap(repr)) - dataframe[nonfloat_columns] = (dataframe[nonfloat_columns] - .astype(str)) + dframe[float_columns] = (dframe[float_columns].applymap(repr)) + dframe[nonfloat_columns] = (dframe[nonfloat_columns] + .astype(str)) else: - dataframe = dataframe.astype(str) + dframe = dframe.astype(str) else: raise ValueError('Invalid numeric precision.') if datatype == 'field': # If dealing with fields, format ints and strings correctly - dataframe[int_columns] += 'i' - dataframe[string_columns] = '"' + dataframe[string_columns] + '"' + dframe[int_columns] += 'i' + dframe[string_columns] = '"' + dframe[string_columns] + '"' elif datatype == 'tag': - dataframe = dataframe.apply(_escape_pandas_series) + dframe = dframe.apply(_escape_pandas_series) - dataframe.columns = dataframe.columns.astype(str) - return dataframe + dframe.columns = dframe.columns.astype(str) + return dframe def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() diff --git a/influxdb/chunked_json.py b/influxdb/chunked_json.py index ae748e51..4e40f01a 100644 --- a/influxdb/chunked_json.py +++ b/influxdb/chunked_json.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Module to generate chunked JSON replies.""" # # Author: Adrian Sampson @@ -12,11 +13,11 @@ import json -_decoder = json.JSONDecoder() - def loads(s): - """A generator reading a sequence of JSON values from a string.""" + """Generate a sequence of JSON values from a string.""" + _decoder = json.JSONDecoder() + while s: s = s.strip() obj, pos = _decoder.raw_decode(s) diff --git a/influxdb/client.py b/influxdb/client.py index 5f415abc..d2f7c545 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -1,17 +1,17 @@ # -*- coding: utf-8 -*- -""" -Python client for InfluxDB -""" +"""Python client for InfluxDB.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals +from sys import version_info + import json import socket import requests import requests.exceptions -from sys import version_info from influxdb.line_protocol import make_lines, quote_ident, quote_literal from influxdb.resultset import ResultSet @@ -30,7 +30,9 @@ class InfluxDBClient(object): - """The :class:`~.InfluxDBClient` object holds information necessary to + """InfluxDBClient primary client object to connect InfluxDB. + + The :class:`~.InfluxDBClient` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. @@ -117,28 +119,21 @@ def __init__(self, @property def _baseurl(self): - return self._get_baseurl() - - def _get_baseurl(self): return self.__baseurl @property def _host(self): - return self._get_host() - - def _get_host(self): return self.__host @property def _port(self): - return self._get_port() - - def _get_port(self): return self.__port @classmethod - def from_DSN(cls, dsn, **kwargs): - """Return an instance of :class:`~.InfluxDBClient` from the provided + def from_dsn(cls, dsn, **kwargs): + r"""Generate an instance of InfluxDBClient from given data source name. + + Return an instance of :class:`~.InfluxDBClient` from the provided data source name. Supported schemes are "influxdb", "https+influxdb" and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` constructor may also be passed to this method. @@ -153,12 +148,12 @@ def from_DSN(cls, dsn, **kwargs): :: - >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ -localhost:8086/databasename', timeout=5) + >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ + localhost:8086/databasename', timeout=5) >> type(cli) - >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ -localhost:8086/databasename', timeout=5, udp_port=159) + >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ + localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 @@ -167,8 +162,7 @@ def from_DSN(cls, dsn, **kwargs): be used for the TCP connection; specify the UDP port with the additional `udp_port` parameter (cf. examples). """ - - init_args = parse_dsn(dsn) + init_args = _parse_dsn(dsn) host, port = init_args.pop('hosts')[0] init_args['host'] = host init_args['port'] = port @@ -247,7 +241,7 @@ def request(self, url, method='GET', params=None, data=None, timeout=self._timeout ) break - except requests.exceptions.ConnectionError as e: + except requests.exceptions.ConnectionError: _try += 1 if self._retries != 0: retry = _try < self._retries @@ -279,7 +273,6 @@ def write(self, data, params=None, expected_response_code=204, :returns: True, if the write operation is successful :rtype: bool """ - headers = self._headers headers['Content-type'] = 'application/octet-stream' @@ -303,7 +296,8 @@ def write(self, data, params=None, expected_response_code=204, ) return True - def _read_chunked_response(self, response, raise_errors=True): + @staticmethod + def _read_chunked_response(response, raise_errors=True): result_set = {} for line in response.iter_lines(): if isinstance(line, bytes): @@ -311,8 +305,9 @@ def _read_chunked_response(self, response, raise_errors=True): data = json.loads(line) for result in data.get('results', []): for _key in result: - if type(result[_key]) == list: - result_set.setdefault(_key, []).extend(result[_key]) + if isinstance(result[_key], list): + result_set.setdefault( + _key, []).extend(result[_key]) return ResultSet(result_set, raise_errors=raise_errors) def query(self, @@ -329,7 +324,8 @@ def query(self, :param query: the actual query string :type query: str - :param params: additional parameters for the request, defaults to {} + :param params: additional parameters for the request, + defaults to {} :type params: dict :param epoch: response timestamps to be in epoch format either 'h', @@ -395,8 +391,8 @@ def query(self, # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] - else: - return results + + return results def write_points(self, points, @@ -439,7 +435,6 @@ def write_points(self, .. note:: if no retention policy is specified, the default retention policy for the database is used """ - if batch_size and batch_size > 0: for batch in self._batches(points, batch_size): self._write_points(points=batch, @@ -448,14 +443,15 @@ def write_points(self, retention_policy=retention_policy, tags=tags, protocol=protocol) return True - else: - return self._write_points(points=points, - time_precision=time_precision, - database=database, - retention_policy=retention_policy, - tags=tags, protocol=protocol) - def _batches(self, iterable, size): + return self._write_points(points=points, + time_precision=time_precision, + database=database, + retention_policy=retention_policy, + tags=tags, protocol=protocol) + + @staticmethod + def _batches(iterable, size): for i in xrange(0, len(iterable), size): yield iterable[i:i + size] @@ -549,8 +545,8 @@ def create_retention_policy(self, name, duration, replication, :param duration: the duration of the new retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, - respectively. For infinite retention – meaning the data will - never be deleted – use 'INF' for duration. + respectively. For infinite retention - meaning the data will + never be deleted - use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the replication of the retention policy @@ -584,8 +580,8 @@ def alter_retention_policy(self, name, database=None, :param duration: the new duration of the existing retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, - respectively. For infinite retention – meaning the data will - never be deleted – use 'INF' for duration. + respectively. For infinite retention, meaning the data will + never be deleted, use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the new replication of the existing @@ -642,8 +638,7 @@ def get_list_retention_policies(self, database=None): u'duration': u'0', u'name': u'default', u'replicaN': 1}] - """ - + """ if not (database or self._database): raise InfluxDBClientError( "get_list_retention_policies() requires a database as a " @@ -674,7 +669,7 @@ def get_list_users(self): return list(self.query("SHOW USERS").get_points()) def create_user(self, username, password, admin=False): - """Create a new user in InfluxDB + """Create a new user in InfluxDB. :param username: the new username to create :type username: str @@ -712,8 +707,9 @@ def set_user_password(self, username, password): self.query(text) def delete_series(self, database=None, measurement=None, tags=None): - """Delete series from a database. Series can be filtered by - measurement and tags. + """Delete series from a database. + + Series can be filtered by measurement and tags. :param database: the database from which the series should be deleted, defaults to client's current database @@ -828,7 +824,12 @@ def send_packet(self, packet, protocol='json'): self.udp_socket.sendto(data, (self._host, self.udp_port)) -def parse_dsn(dsn): +def _parse_dsn(dsn): + """Parse data source name. + + This is a helper function to split the data source name provided in + the from_dsn classmethod + """ conn_params = urlparse(dsn) init_args = {} scheme_info = conn_params.scheme.split('+') diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 1841633b..97258644 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -""" -DataFrame client for InfluxDB -""" +"""DataFrame client for InfluxDB.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -16,9 +15,13 @@ from .client import InfluxDBClient class DataFrameClient(InfluxDBClient): + """DataFrameClient default class instantiation.""" + err = err def __init__(self, *a, **kw): + """Initialize the default DataFrameClient.""" + super(DataFrameClient, self).__init__() raise ImportError("DataFrameClient requires Pandas " "which couldn't be imported: %s" % self.err) else: diff --git a/influxdb/exceptions.py b/influxdb/exceptions.py index 6860f420..bd71d301 100644 --- a/influxdb/exceptions.py +++ b/influxdb/exceptions.py @@ -1,3 +1,6 @@ +# -*- coding: utf-8 -*- +"""Exception handler for InfluxDBClient.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -6,7 +9,9 @@ class InfluxDBClientError(Exception): """Raised when an error occurs in the request.""" + def __init__(self, content, code=None): + """Initialize the InfluxDBClientError handler.""" if isinstance(content, type(b'')): content = content.decode('UTF-8', 'replace') @@ -24,5 +29,7 @@ def __init__(self, content, code=None): class InfluxDBServerError(Exception): """Raised when a server error occurs.""" + def __init__(self, content): + """Initialize the InfluxDBServerError handler.""" super(InfluxDBServerError, self).__init__(content) diff --git a/influxdb/helper.py b/influxdb/helper.py index 23d479ed..c56a636a 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -""" -Helper class for InfluxDB -""" +"""Helper class for InfluxDB.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -15,9 +14,8 @@ class SeriesHelper(object): + """Subclass this helper eases writing data points in bulk. - """ - Subclassing this helper eases writing data points in bulk. All data points are immutable, ensuring they do not get overwritten. Each subclass can write to its own database. The time series names can also be based on one or more defined fields. @@ -45,11 +43,11 @@ class Meta: # If True and no bulk_size, then will set bulk_size to 1. """ + __initialized__ = False def __new__(cls, *args, **kwargs): - """ - Initializes class attributes for subsequent constructor calls. + """Initialize class attributes for subsequent constructor calls. :note: *args and **kwargs are not explicitly used in this function, but needed for Python 2 compatibility. @@ -101,11 +99,11 @@ def __new__(cls, *args, **kwargs): cls._fields.remove('time') cls._type = namedtuple(cls.__name__, cls._fields + cls._tags + ['time']) + return super(SeriesHelper, cls).__new__(cls) def __init__(self, **kw): - """ - Constructor call creates a new data point. All fields must be present. + """Call to constructor creates a new data point. All fields must be present. :note: Data points written when `bulk_size` is reached per Helper. :warning: Data points are *immutable* (`namedtuples`). @@ -130,8 +128,7 @@ def __init__(self, **kw): @classmethod def commit(cls, client=None): - """ - Commit everything from datapoints via the client. + """Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. @@ -145,7 +142,8 @@ def commit(cls, client=None): @classmethod def _json_body_(cls): - """ + """Return the JSON body of given datapoints. + :return: JSON body of these datapoints. """ json = [] @@ -169,10 +167,9 @@ def _json_body_(cls): @classmethod def _reset_(cls): - """ - Reset data storage. - """ + """Reset data storage.""" cls._datapoints = defaultdict(list) - def _current_timestamp(self): + @staticmethod + def _current_timestamp(): return datetime.utcnow() diff --git a/influxdb/influxdb08/__init__.py b/influxdb/influxdb08/__init__.py index c718cdce..f4e6c082 100644 --- a/influxdb/influxdb08/__init__.py +++ b/influxdb/influxdb08/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the influxdb08 package.""" from __future__ import absolute_import from __future__ import division diff --git a/influxdb/influxdb08/chunked_json.py b/influxdb/influxdb08/chunked_json.py index ae748e51..d6847de1 100644 --- a/influxdb/influxdb08/chunked_json.py +++ b/influxdb/influxdb08/chunked_json.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Module to generate chunked JSON replies for influxdb08.""" # # Author: Adrian Sampson @@ -12,11 +13,11 @@ import json -_decoder = json.JSONDecoder() - def loads(s): - """A generator reading a sequence of JSON values from a string.""" + """Generate a sequence of JSON values from a string.""" + _decoder = json.JSONDecoder() + while s: s = s.strip() obj, pos = _decoder.raw_decode(s) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 8955ab76..59a01f54 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -""" -Python client for InfluxDB -""" +"""Python client for InfluxDB v0.8.""" + +import warnings +from sys import version_info + import json import socket import requests import requests.exceptions -import warnings -from sys import version_info from influxdb import chunked_json @@ -25,8 +25,10 @@ class InfluxDBClientError(Exception): - """Raised when an error occurs in the request""" + """Raised when an error occurs in the request.""" + def __init__(self, content, code=-1): + """Initialize an InfluxDBClientError handler.""" super(InfluxDBClientError, self).__init__( "{0}: {1}".format(code, content)) self.content = content @@ -34,8 +36,8 @@ def __init__(self, content, code=-1): class InfluxDBClient(object): + """Define the standard InfluxDBClient for influxdb v0.8. - """ The ``InfluxDBClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. @@ -79,9 +81,7 @@ def __init__(self, retries=3, use_udp=False, udp_port=4444): - """ - Construct a new InfluxDBClient object. - """ + """Construct a new InfluxDBClient object.""" self._host = host self._port = port self._username = username @@ -112,19 +112,20 @@ def __init__(self, 'Accept': 'text/plain'} @staticmethod - def from_DSN(dsn, **kwargs): - """ + def from_dsn(dsn, **kwargs): + r"""Return an instaance of InfluxDBClient from given data source name. + Returns an instance of InfluxDBClient from the provided data source name. Supported schemes are "influxdb", "https+influxdb", "udp+influxdb". Parameters for the InfluxDBClient constructor may be also be passed to this function. Examples: - >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ + >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ ... localhost:8086/databasename', timeout=5) >> type(cli) - >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ + >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ ... localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 @@ -138,11 +139,12 @@ def from_DSN(dsn, **kwargs): used for the TCP connection; specify the udp port with the additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. - """ + """ init_args = {} conn_params = urlparse(dsn) scheme_info = conn_params.scheme.split('+') + if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None @@ -151,6 +153,7 @@ def from_DSN(dsn, **kwargs): if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) + if modifier: if modifier == 'udp': init_args['use_udp'] = True @@ -177,10 +180,7 @@ def from_DSN(dsn, **kwargs): # Change member variables def switch_database(self, database): - """ - switch_database() - - Change client database. + """Change client database. :param database: the new database name to switch to :type database: string @@ -188,9 +188,9 @@ def switch_database(self, database): self._database = database def switch_db(self, database): - """ - DEPRECATED. Change client database. + """Change client database. + DEPRECATED. """ warnings.warn( "switch_db is deprecated, and will be removed " @@ -200,10 +200,7 @@ def switch_db(self, database): return self.switch_database(database) def switch_user(self, username, password): - """ - switch_user() - - Change client username. + """Change client username. :param username: the new username to switch to :type username: string @@ -215,9 +212,7 @@ def switch_user(self, username, password): def request(self, url, method='GET', params=None, data=None, expected_response_code=200): - """ - Make a http request to API - """ + """Make a http request to API.""" url = "{0}/{1}".format(self._baseurl, url) if params is None: @@ -249,7 +244,7 @@ def request(self, url, method='GET', params=None, data=None, ) break except (requests.exceptions.ConnectionError, - requests.exceptions.Timeout) as e: + requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries @@ -262,7 +257,7 @@ def request(self, url, method='GET', params=None, data=None, raise InfluxDBClientError(response.content, response.status_code) def write(self, data): - """ Provided as convenience for influxdb v0.9.0, this may change. """ + """Provide as convenience for influxdb v0.9.0, this may change.""" self.request( url="write", method='POST', @@ -279,8 +274,9 @@ def write(self, data): # with a JSON body of points. def write_points(self, data, time_precision='s', *args, **kwargs): - """ - Write to multiple time series names. An example data blob is: + """Write to multiple time series names. + + An example data blob is: data = [ { @@ -303,11 +299,10 @@ def write_points(self, data, time_precision='s', *args, **kwargs): instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation :type batch_size: int - """ + """ def list_chunks(l, n): - """ Yield successive n-sized chunks from l. - """ + """Yield successive n-sized chunks from l.""" for i in xrange(0, len(l), n): yield l[i:i + n] @@ -328,14 +323,14 @@ def list_chunks(l, n): data=item, time_precision=time_precision) return True - else: - return self._write_points(data=data, - time_precision=time_precision) + + return self._write_points(data=data, + time_precision=time_precision) def write_points_with_precision(self, data, time_precision='s'): - """ - DEPRECATED. Write to multiple time series names + """Write to multiple time series names. + DEPRECATED. """ warnings.warn( "write_points_with_precision is deprecated, and will be removed " @@ -376,9 +371,7 @@ def _write_points(self, data, time_precision): # One Time Deletes def delete_points(self, name): - """ - Delete an entire series - """ + """Delete an entire series.""" url = "db/{0}/series/{1}".format(self._database, name) self.request( @@ -392,12 +385,12 @@ def delete_points(self, name): # Regularly Scheduled Deletes def create_scheduled_delete(self, json_body): - """ - TODO: Create scheduled delete + """Create schedule delete from database. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() @@ -408,28 +401,27 @@ def create_scheduled_delete(self, json_body): # curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id def get_list_scheduled_delete(self): - """ - TODO: Get list of scheduled deletes + """Get list of scheduled deletes. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() def remove_scheduled_delete(self, delete_id): - """ - TODO: Remove scheduled delete + """Remove scheduled delete. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() def query(self, query, time_precision='s', chunked=False): - """ - Quering data + """Query data from the influxdb v0.8 database. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. @@ -469,15 +461,14 @@ def _query(self, query, time_precision='s', chunked=False): ) if chunked: - decoded = {} try: decoded = chunked_json.loads(response.content.decode()) except UnicodeDecodeError: decoded = chunked_json.loads(response.content.decode('utf-8')) - finally: - return list(decoded) - else: - return response.json() + + return list(decoded) + + return response.json() # Creating and Dropping Databases # @@ -488,10 +479,7 @@ def _query(self, query, time_precision='s', chunked=False): # curl -X DELETE http://localhost:8086/db/site_development def create_database(self, database): - """ - create_database() - - Create a database on the InfluxDB server. + """Create a database on the InfluxDB server. :param database: the name of the database to create :type database: string @@ -511,10 +499,7 @@ def create_database(self, database): return True def delete_database(self, database): - """ - delete_database() - - Drop a database on the InfluxDB server. + """Drop a database on the InfluxDB server. :param database: the name of the database to delete :type database: string @@ -534,9 +519,7 @@ def delete_database(self, database): # curl -X GET http://localhost:8086/db def get_list_database(self): - """ - Get the list of databases - """ + """Get the list of databases.""" url = "db" response = self.request( @@ -548,9 +531,9 @@ def get_list_database(self): return response.json() def get_database_list(self): - """ - DEPRECATED. Get the list of databases + """Get the list of databases. + DEPRECATED. """ warnings.warn( "get_database_list is deprecated, and will be removed " @@ -560,10 +543,7 @@ def get_database_list(self): return self.get_list_database() def delete_series(self, series): - """ - delete_series() - - Drop a series on the InfluxDB server. + """Drop a series on the InfluxDB server. :param series: the name of the series to delete :type series: string @@ -583,29 +563,14 @@ def delete_series(self, series): return True def get_list_series(self): - """ - Get a list of all time series in a database - """ - + """Get a list of all time series in a database.""" response = self._query('list series') - - series_list = [] - for series in response[0]['points']: - series_list.append(series[1]) - - return series_list + return [series[1] for series in response[0]['points']] def get_list_continuous_queries(self): - """ - Get a list of continuous queries - """ - + """Get a list of continuous queries.""" response = self._query('list continuous queries') - queries_list = [] - for query in response[0]['points']: - queries_list.append(query[2]) - - return queries_list + return [query[2] for query in response[0]['points']] # Security # get list of cluster admins @@ -639,9 +604,7 @@ def get_list_continuous_queries(self): # http://localhost:8086/db/site_dev/admins/paul?u=root&p=root def get_list_cluster_admins(self): - """ - Get list of cluster admins - """ + """Get list of cluster admins.""" response = self.request( url="cluster_admins", method='GET', @@ -651,9 +614,7 @@ def get_list_cluster_admins(self): return response.json() def add_cluster_admin(self, new_username, new_password): - """ - Add cluster admin - """ + """Add cluster admin.""" data = { 'name': new_username, 'password': new_password @@ -669,9 +630,7 @@ def add_cluster_admin(self, new_username, new_password): return True def update_cluster_admin_password(self, username, new_password): - """ - Update cluster admin password - """ + """Update cluster admin password.""" url = "cluster_admins/{0}".format(username) data = { @@ -688,9 +647,7 @@ def update_cluster_admin_password(self, username, new_password): return True def delete_cluster_admin(self, username): - """ - Delete cluster admin - """ + """Delete cluster admin.""" url = "cluster_admins/{0}".format(username) self.request( @@ -702,18 +659,15 @@ def delete_cluster_admin(self, username): return True def set_database_admin(self, username): - """ - Set user as database admin - """ + """Set user as database admin.""" return self.alter_database_admin(username, True) def unset_database_admin(self, username): - """ - Unset user as database admin - """ + """Unset user as database admin.""" return self.alter_database_admin(username, False) def alter_database_admin(self, username, is_admin): + """Alter the database admin.""" url = "db/{0}/users/{1}".format(self._database, username) data = {'admin': is_admin} @@ -728,42 +682,42 @@ def alter_database_admin(self, username, is_admin): return True def get_list_database_admins(self): - """ - TODO: Get list of database admins + """Get list of database admins. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() def add_database_admin(self, new_username, new_password): - """ - TODO: Add cluster admin + """Add cluster admin. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() def update_database_admin_password(self, username, new_password): - """ - TODO: Update database admin password + """Update database admin password. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() def delete_database_admin(self, username): - """ - TODO: Delete database admin + """Delete database admin. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() @@ -786,9 +740,7 @@ def delete_database_admin(self, username): # curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root def get_database_users(self): - """ - Get list of database users - """ + """Get list of database users.""" url = "db/{0}/users".format(self._database) response = self.request( @@ -800,8 +752,7 @@ def get_database_users(self): return response.json() def add_database_user(self, new_username, new_password, permissions=None): - """ - Add database user + """Add database user. :param permissions: A ``(readFrom, writeTo)`` tuple """ @@ -830,14 +781,12 @@ def add_database_user(self, new_username, new_password, permissions=None): return True def update_database_user_password(self, username, new_password): - """ - Update password - """ + """Update password.""" return self.alter_database_user(username, new_password) def alter_database_user(self, username, password=None, permissions=None): - """ - Alters a database user and/or their permissions. + """Alter a database user and/or their permissions. + :param permissions: A ``(readFrom, writeTo)`` tuple :raise TypeError: if permissions cannot be read. :raise ValueError: if neither password nor permissions provided. @@ -873,9 +822,7 @@ def alter_database_user(self, username, password=None, permissions=None): return True def delete_database_user(self, username): - """ - Delete database user - """ + """Delete database user.""" url = "db/{0}/users/{1}".format(self._database, username) self.request( @@ -889,16 +836,17 @@ def delete_database_user(self, username): # update the user by POSTing to db/site_dev/users/paul def update_permission(self, username, json_body): - """ - TODO: Update read/write permission + """Update read/write permission. 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 + """ raise NotImplementedError() def send_packet(self, packet): + """Send a UDP packet along the wire.""" data = json.dumps(packet) byte = data.encode('utf-8') self.udp_socket.sendto(byte, (self._host, self.udp_port)) diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index 59b033ab..71e7e881 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -""" -DataFrame client for InfluxDB -""" +"""DataFrame client for InfluxDB v0.8.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -14,13 +13,15 @@ class DataFrameClient(InfluxDBClient): - """ + """Primary defintion of the DataFrameClient for v0.8. + The ``DataFrameClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. The client reads and writes from pandas DataFrames. """ def __init__(self, ignore_nan=True, *args, **kwargs): + """Initialize an instance of the DataFrameClient.""" super(DataFrameClient, self).__init__(*args, **kwargs) try: @@ -29,12 +30,12 @@ def __init__(self, ignore_nan=True, *args, **kwargs): except ImportError as ex: raise ImportError('DataFrameClient requires Pandas, ' '"{ex}" problem importing'.format(ex=str(ex))) + self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') self.ignore_nan = ignore_nan def write_points(self, data, *args, **kwargs): - """ - Write to multiple time series names. + """Write to multiple time series names. :param data: A dictionary mapping series names to pandas DataFrames :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' @@ -44,7 +45,6 @@ def write_points(self, data, *args, **kwargs): one database to another or when doing a massive write operation :type batch_size: int """ - batch_size = kwargs.get('batch_size') time_precision = kwargs.get('time_precision', 's') if batch_size: @@ -55,22 +55,25 @@ def write_points(self, data, *args, **kwargs): for batch in range(number_batches): start_index = batch * batch_size end_index = (batch + 1) * batch_size - data = [self._convert_dataframe_to_json( - name=key, - dataframe=data_frame.ix[start_index:end_index].copy(), - time_precision=time_precision)] - InfluxDBClient.write_points(self, data, *args, **kwargs) + outdata = [ + self._convert_dataframe_to_json( + name=key, + dataframe=data_frame + .ix[start_index:end_index].copy(), + time_precision=time_precision)] + InfluxDBClient.write_points(self, outdata, *args, **kwargs) return True - else: - data = [self._convert_dataframe_to_json( - name=key, dataframe=dataframe, time_precision=time_precision) - for key, dataframe in data.items()] - return InfluxDBClient.write_points(self, data, *args, **kwargs) + + outdata = [ + self._convert_dataframe_to_json(name=key, dataframe=dataframe, + time_precision=time_precision) + for key, dataframe in data.items()] + return InfluxDBClient.write_points(self, outdata, *args, **kwargs) def write_points_with_precision(self, data, time_precision='s'): - """ - DEPRECATED. Write to multiple time series names + """Write to multiple time series names. + DEPRECATED """ warnings.warn( "write_points_with_precision is deprecated, and will be removed " @@ -80,8 +83,7 @@ def write_points_with_precision(self, data, time_precision='s'): return self.write_points(data, time_precision='s') def query(self, query, time_precision='s', chunked=False): - """ - Quering data into DataFrames. + """Query data into DataFrames. Returns a DataFrame for a single time series and a map for multiple time series with the time series as value and its name as key. @@ -90,7 +92,6 @@ def query(self, query, time_precision='s', chunked=False): or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. - """ result = InfluxDBClient.query(self, query=query, time_precision=time_precision, @@ -106,18 +107,21 @@ def query(self, query, time_precision='s', chunked=False): time_precision) return ret - def _to_dataframe(self, json_result, time_precision): + @staticmethod + def _to_dataframe(json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], columns=json_result['columns']) if 'sequence_number' in dataframe.keys(): dataframe.sort_values(['time', 'sequence_number'], inplace=True) else: dataframe.sort_values(['time'], inplace=True) + pandas_time_unit = time_precision if time_precision == 'm': pandas_time_unit = 'ms' elif time_precision == 'u': pandas_time_unit = 'us' + dataframe.index = pd.to_datetime(list(dataframe['time']), unit=pandas_time_unit, utc=True) @@ -154,13 +158,14 @@ def _convert_array(self, array): except ImportError as ex: raise ImportError('DataFrameClient requires Numpy, ' '"{ex}" problem importing'.format(ex=str(ex))) + if self.ignore_nan: number_types = (int, float, np.number) condition = (all(isinstance(el, number_types) for el in array) and np.isnan(array)) return list(np.where(condition, None, array)) - else: - return list(array) + + return list(array) def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py index 53e9ca4a..f3dec33c 100644 --- a/influxdb/influxdb08/helper.py +++ b/influxdb/influxdb08/helper.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -""" -Helper class for InfluxDB -""" +"""Helper class for InfluxDB for v0.8.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -14,8 +13,8 @@ class SeriesHelper(object): + """Define the SeriesHelper object for InfluxDB v0.8. - """ Subclassing this helper eases writing data points in bulk. All data points are immutable, ensuring they do not get overwritten. Each subclass can write to its own database. @@ -41,11 +40,11 @@ class Meta: # If True and no bulk_size, then will set bulk_size to 1. """ + __initialized__ = False def __new__(cls, *args, **kwargs): - """ - Initializes class attributes for subsequent constructor calls. + """Initialize class attributes for subsequent constructor calls. :note: *args and **kwargs are not explicitly used in this function, but needed for Python 2 compatibility. @@ -97,8 +96,9 @@ def __new__(cls, *args, **kwargs): return super(SeriesHelper, cls).__new__(cls) def __init__(self, **kw): - """ - Constructor call creates a new data point. All fields must be present. + """Create a new data point. + + All fields must be present. :note: Data points written when `bulk_size` is reached per Helper. :warning: Data points are *immutable* (`namedtuples`). @@ -120,8 +120,7 @@ def __init__(self, **kw): @classmethod def commit(cls, client=None): - """ - Commit everything from datapoints via the client. + """Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. @@ -135,8 +134,9 @@ def commit(cls, client=None): @classmethod def _json_body_(cls): - """ - :return: JSON body of these datapoints. + """Return JSON body of the datapoints. + + :return: JSON body of the datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): @@ -149,7 +149,5 @@ def _json_body_(cls): @classmethod def _reset_(cls): - """ - Reset data storage. - """ + """Reset data storage.""" cls._datapoints = defaultdict(list) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 59d93bff..4ec56dc5 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the line_protocol handler.""" from __future__ import absolute_import from __future__ import division @@ -18,11 +19,14 @@ def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int + if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) + if isinstance(timestamp, datetime): if not timestamp.tzinfo: timestamp = UTC.localize(timestamp) + ns = (timestamp - EPOCH).total_seconds() * 1e9 if precision is None or precision == 'n': return ns @@ -36,6 +40,7 @@ def _convert_timestamp(timestamp, precision=None): return ns / 1e9 / 60 elif precision == 'h': return ns / 1e9 / 3600 + raise ValueError(timestamp) @@ -53,25 +58,18 @@ def _escape_tag(tag): def quote_ident(value): - return "\"{0}\"".format( - value.replace( - "\\", "\\\\" - ).replace( - "\"", "\\\"" - ).replace( - "\n", "\\n" - ) - ) + """Indent the quotes.""" + return "\"{}\"".format(value + .replace("\\", "\\\\") + .replace("\"", "\\\"") + .replace("\n", "\\n")) def quote_literal(value): - return "'{0}'".format( - value.replace( - "\\", "\\\\" - ).replace( - "'", "\\'" - ) - ) + """Quote provided literal.""" + return "'{}'".format(value + .replace("\\", "\\\\") + .replace("'", "\\'")) def _is_float(value): @@ -79,25 +77,25 @@ def _is_float(value): float(value) except ValueError: return False + return True def _escape_value(value): value = _get_unicode(value) + if isinstance(value, text_type) and value != '': return quote_ident(value) elif isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' elif _is_float(value): return repr(value) - else: - return str(value) + + return str(value) def _get_unicode(data, force=False): - """ - Try to return a text aka unicode object from the given data. - """ + """Try to return a text aka unicode object from the given data.""" if isinstance(data, binary_type): return data.decode('utf-8') elif data is None: @@ -112,7 +110,8 @@ def _get_unicode(data, force=False): def make_lines(data, precision=None): - """ + """Extract points from given dict. + Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ @@ -123,8 +122,7 @@ def make_lines(data, precision=None): # add measurement name measurement = _escape_tag(_get_unicode( - point.get('measurement', data.get('measurement')) - )) + point.get('measurement', data.get('measurement')))) key_values = [measurement] # add tags @@ -141,27 +139,27 @@ def make_lines(data, precision=None): if key != '' and value != '': key_values.append(key + "=" + value) - key_values = ','.join(key_values) - elements.append(key_values) + + elements.append(','.join(key_values)) # add fields field_values = [] for field_key, field_value in sorted(iteritems(point['fields'])): key = _escape_tag(field_key) value = _escape_value(field_value) + if key != '' and value != '': field_values.append(key + "=" + value) - field_values = ','.join(field_values) - elements.append(field_values) + + elements.append(','.join(field_values)) # add timestamp if 'time' in point: timestamp = _get_unicode(str(int( - _convert_timestamp(point['time'], precision) - ))) + _convert_timestamp(point['time'], precision)))) elements.append(timestamp) line = ' '.join(elements) lines.append(line) - lines = '\n'.join(lines) - return lines + '\n' + + return '\n'.join(lines) + '\n' diff --git a/influxdb/resultset.py b/influxdb/resultset.py index c44ac908..79d72ca8 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Module to prepare the resultset.""" from __future__ import absolute_import from __future__ import division @@ -13,18 +14,19 @@ class ResultSet(object): - """A wrapper around a single InfluxDB query result""" + """A wrapper around a single InfluxDB query result.""" def __init__(self, series, raise_errors=True): + """Initialize the ResultSet.""" self._raw = series - self._error = self.raw.get('error', None) + self._error = self._raw.get('error', None) if self.error is not None and raise_errors is True: raise InfluxDBClientError(self.error) @property def raw(self): - """Raw JSON from InfluxDB""" + """Raw JSON from InfluxDB.""" return self._raw @raw.setter @@ -33,11 +35,12 @@ def raw(self, value): @property def error(self): - """Error returned by InfluxDB""" + """Error returned by InfluxDB.""" return self._error def __getitem__(self, key): - """ + """Retrieve the series name or specific set based on key. + :param key: Either a serie name, or a tags_dict, or a 2-tuple(serie_name, tags_dict). If the serie name is None (or not given) then any serie @@ -50,7 +53,6 @@ def __getitem__(self, key): The order in which the points are yielded is actually undefined but it might change.. """ - warnings.warn( ("ResultSet's ``__getitem__`` method will be deprecated. Use" "``get_points`` instead."), @@ -58,10 +60,12 @@ def __getitem__(self, key): ) if isinstance(key, tuple): - if 2 != len(key): + if len(key) != 2: raise TypeError('only 2-tuples allowed') + name = key[0] tags = key[1] + if not isinstance(tags, dict) and tags is not None: raise TypeError('tags should be a dict') elif isinstance(key, dict): @@ -74,8 +78,7 @@ def __getitem__(self, key): return self.get_points(name, tags) def get_points(self, measurement=None, tags=None): - """ - Returns a generator for all the points that match the given filters. + """Return a generator for all the points that match the given filters. :param measurement: The measurement name :type measurement: str @@ -85,7 +88,6 @@ def get_points(self, measurement=None, tags=None): :return: Points generator """ - # Raise error if measurement is not str or bytes if not isinstance(measurement, (bytes, type(b''.decode()), type(None))): @@ -110,6 +112,7 @@ def get_points(self, measurement=None, tags=None): yield item def __repr__(self): + """Representation of ResultSet object.""" items = [] for item in self.items(): @@ -118,13 +121,13 @@ def __repr__(self): return "ResultSet({%s})" % ", ".join(items) def __iter__(self): - """ Iterating a ResultSet will yield one dict instance per serie result. - """ + """Yield one dict instance per serie result.""" for key in self.keys(): yield list(self.__getitem__(key)) - def _tag_matches(self, tags, filter): - """Checks if all key/values in filter match in tags""" + @staticmethod + def _tag_matches(tags, filter): + """Check if all key/values in filter match in tags.""" for tag_name, tag_value in filter.items(): # using _sentinel as I'm not sure that "None" # could be used, because it could be a valid @@ -133,17 +136,20 @@ def _tag_matches(self, tags, filter): serie_tag_value = tags.get(tag_name, _sentinel) if serie_tag_value != tag_value: return False + return True def _get_series(self): - """Returns all series""" + """Return all series.""" return self.raw.get('series', []) def __len__(self): + """Return the len of the keys in the ResultSet.""" return len(self.keys()) def keys(self): - """ + """Return the list of keys in the ResultSet. + :return: List of keys. Keys are tuples (serie_name, tags) """ keys = [] @@ -156,7 +162,8 @@ def keys(self): return keys def items(self): - """ + """Return the set of items from the ResultSet. + :return: List of tuples, (key, generator) """ items = [] @@ -170,7 +177,7 @@ def items(self): return items def _get_points_for_serie(self, serie): - """ Return generator of dict from columns and values of a serie + """Return generator of dict from columns and values of a serie. :param serie: One serie :return: Generator of dicts @@ -183,7 +190,7 @@ def _get_points_for_serie(self, serie): @staticmethod def point_from_cols_vals(cols, vals): - """ Creates a dict from columns and values lists + """Create a dict from columns and values lists. :param cols: List of columns :param vals: List of values @@ -192,4 +199,5 @@ def point_from_cols_vals(cols, vals): point = {} for col_index, col_name in enumerate(cols): point[col_name] = vals[col_index] + return point diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py index 9dadbb05..adf2f20c 100644 --- a/influxdb/tests/__init__.py +++ b/influxdb/tests/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Configure the tests package for InfluxDBClient.""" from __future__ import absolute_import from __future__ import division diff --git a/influxdb/tests/chunked_json_test.py b/influxdb/tests/chunked_json_test.py index 48e3a736..f633bcb1 100644 --- a/influxdb/tests/chunked_json_test.py +++ b/influxdb/tests/chunked_json_test.py @@ -1,25 +1,26 @@ # -*- coding: utf-8 -*- +"""Chunked JSON test.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals -from influxdb import chunked_json - import unittest +from influxdb import chunked_json + class TestChunkJson(unittest.TestCase): + """Set up the TestChunkJson object.""" @classmethod def setUpClass(cls): + """Initialize the TestChunkJson object.""" super(TestChunkJson, cls).setUpClass() def test_load(self): - """ - Tests reading a sequence of JSON values from a string - """ + """Test reading a sequence of JSON values from a string.""" example_response = \ '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \ '"columns": ["time", "value"], "values": ' \ @@ -30,7 +31,6 @@ def test_load(self): res = list(chunked_json.loads(example_response)) # import ipdb; ipdb.set_trace() - # self.assertTrue(res) self.assertListEqual( [ { diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e319c0e5..8def6629 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- -""" -unit tests for the InfluxDBClient. +"""Unit tests for the InfluxDBClient. -NB/WARNING : +NB/WARNING: This module implements tests for the InfluxDBClient class but does so + without any server instance running @@ -14,22 +13,24 @@ See client_test_with_server.py for tests against a running server instance. """ + from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals +import random +import socket +import unittest +import warnings + import json +import mock import requests import requests.exceptions -import socket import requests_mock -import random + from nose.tools import raises -from mock import patch -import warnings -import mock -import unittest from influxdb import InfluxDBClient from influxdb.resultset import ResultSet @@ -43,10 +44,10 @@ def _build_response_object(status_code=200, content=""): def _mocked_session(cli, method="GET", status_code=200, content=""): - method = method.upper() def request(*args, **kwargs): + """Request content from the mocked session.""" c = content # Check method @@ -70,18 +71,14 @@ def request(*args, **kwargs): return _build_response_object(status_code=status_code, content=c) - mocked = patch.object( - cli._session, - 'request', - side_effect=request - ) - - return mocked + return mock.patch.object(cli._session, 'request', side_effect=request) class TestInfluxDBClient(unittest.TestCase): + """Set up the TestInfluxDBClient object.""" def setUp(self): + """Initialize an instance of TestInfluxDBClient object.""" # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) @@ -103,6 +100,7 @@ def setUp(self): self.dsn_string = 'influxdb://uSr:pWd@my.host.fr:1886/db' def test_scheme(self): + """Set up the test schema for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual('http://host:8086', cli._baseurl) @@ -112,38 +110,42 @@ def test_scheme(self): self.assertEqual('https://host:8086', cli._baseurl) def test_dsn(self): - cli = InfluxDBClient.from_DSN('influxdb://192.168.0.1:1886') + """Set up the test datasource name for TestInfluxDBClient object.""" + cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886') self.assertEqual('http://192.168.0.1:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN(self.dsn_string) + cli = InfluxDBClient.from_dsn(self.dsn_string) self.assertEqual('http://my.host.fr:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) - cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string) + cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string) self.assertTrue(cli.use_udp) - cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) + cli = InfluxDBClient.from_dsn('https+' + self.dsn_string) self.assertEqual('https://my.host.fr:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, + cli = InfluxDBClient.from_dsn('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://my.host.fr:1886', cli._baseurl) def test_switch_database(self): + """Test switch database in TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') self.assertEqual('another_database', cli._database) def test_switch_user(self): + """Test switch user in TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') self.assertEqual('another_username', cli._username) self.assertEqual('another_password', cli._password) def test_write(self): + """Test write in TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -168,6 +170,7 @@ def test_write(self): ) def test_write_points(self): + """Test write points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -186,6 +189,7 @@ def test_write_points(self): ) def test_write_points_toplevel_attributes(self): + """Test write points attrs for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -207,6 +211,7 @@ def test_write_points_toplevel_attributes(self): ) def test_write_points_batch(self): + """Test write points batch for TestInfluxDBClient object.""" dummy_points = [ {"measurement": "cpu_usage", "tags": {"unit": "percent"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, @@ -235,6 +240,7 @@ def test_write_points_batch(self): m.last_request.body.decode('utf-8')) def test_write_points_udp(self): + """Test write points UDP for TestInfluxDBClient object.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) port = random.randint(4000, 8000) s.bind(('0.0.0.0', port)) @@ -254,6 +260,7 @@ def test_write_points_udp(self): ) def test_write_bad_precision_udp(self): + """Test write bad precision in UDP for TestInfluxDBClient object.""" cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=4444 @@ -270,11 +277,13 @@ def test_write_bad_precision_udp(self): @raises(Exception) def test_write_points_fails(self): + """Test write points fail for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'post', 500): cli.write_points([]) def test_write_points_with_precision(self): + """Test write points with precision for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -327,6 +336,7 @@ def test_write_points_with_precision(self): ) def test_write_points_bad_precision(self): + """Test write points w/bad precision TestInfluxDBClient object.""" cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, @@ -340,11 +350,13 @@ def test_write_points_bad_precision(self): @raises(Exception) def test_write_points_with_precision_fails(self): + """Test write points w/precision fail for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'post', 500): cli.write_points_with_precision([]) def test_query(self): + """Test query method for TestInfluxDBClient object.""" example_response = ( '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' '"columns": ["time", "value"], "values": ' @@ -368,6 +380,7 @@ def test_query(self): @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): + """Test chunked query for TestInfluxDBClient object.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ @@ -401,10 +414,12 @@ def test_query_chunked(self): @raises(Exception) def test_query_fail(self): + """Test query failed for TestInfluxDBClient object.""" with _mocked_session(self.cli, 'get', 401): self.cli.query('select column_one from foo;') def test_create_database(self): + """Test create database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, @@ -418,6 +433,7 @@ def test_create_database(self): ) def test_create_numeric_named_database(self): + """Test create db w/numeric name for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, @@ -432,10 +448,12 @@ def test_create_numeric_named_database(self): @raises(Exception) def test_create_database_fails(self): + """Test create database fail for TestInfluxDBClient object.""" with _mocked_session(self.cli, 'post', 401): self.cli.create_database('new_db') def test_drop_database(self): + """Test drop database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, @@ -449,6 +467,7 @@ def test_drop_database(self): ) def test_drop_numeric_named_database(self): + """Test drop numeric db for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, @@ -462,6 +481,7 @@ def test_drop_numeric_named_database(self): ) def test_get_list_database(self): + """Test get list of databases for TestInfluxDBClient object.""" data = {'results': [ {'series': [ {'name': 'databases', @@ -479,11 +499,13 @@ def test_get_list_database(self): @raises(Exception) def test_get_list_database_fails(self): + """Test get list of dbs fail for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 401): cli.get_list_database() def test_create_retention_policy_default(self): + """Test create default ret policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -503,6 +525,7 @@ def test_create_retention_policy_default(self): ) def test_create_retention_policy(self): + """Test create retention policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -522,6 +545,7 @@ def test_create_retention_policy(self): ) def test_alter_retention_policy(self): + """Test alter retention policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -555,11 +579,13 @@ def test_alter_retention_policy(self): @raises(Exception) def test_alter_retention_policy_invalid(self): + """Test invalid alter ret policy for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.alter_retention_policy('somename', 'db') def test_drop_retention_policy(self): + """Test drop retention policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -576,11 +602,13 @@ def test_drop_retention_policy(self): @raises(Exception) def test_drop_retention_policy_fails(self): + """Test failed drop ret policy for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'delete', 401): cli.drop_retention_policy('default', 'db') def test_get_list_retention_policies(self): + """Test get retention policies for TestInfluxDBClient object.""" example_response = \ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ ' "columns": ["name", "duration", "replicaN"]}]}]}' @@ -599,20 +627,23 @@ def test_get_list_retention_policies(self): @mock.patch('requests.Session.request') def test_request_retry(self, mock_request): - """Tests that two connection errors will be handled""" - + """Test that two connection errors will be handled.""" class CustomMock(object): - i = 0 + """Create custom mock object for test.""" + + def __init__(self): + self.i = 0 def connection_error(self, *args, **kwargs): + """Handle a connection error for the CustomMock object.""" self.i += 1 if self.i < 3: raise requests.exceptions.ConnectionError - else: - r = requests.Response() - r.status_code = 204 - return r + + r = requests.Response() + r.status_code = 204 + return r mock_request.side_effect = CustomMock().connection_error @@ -623,12 +654,15 @@ def connection_error(self, *args, **kwargs): @mock.patch('requests.Session.request') def test_request_retry_raises(self, mock_request): - """Tests that three connection errors will not be handled""" - + """Test that three connection errors will not be handled.""" class CustomMock(object): - i = 0 + """Create custom mock object for test.""" + + def __init__(self): + self.i = 0 def connection_error(self, *args, **kwargs): + """Handle a connection error for the CustomMock object.""" self.i += 1 if self.i < 4: @@ -647,14 +681,16 @@ def connection_error(self, *args, **kwargs): @mock.patch('requests.Session.request') def test_random_request_retry(self, mock_request): - """Tests that a random number of connection errors will be handled""" - + """Test that a random number of connection errors will be handled.""" class CustomMock(object): + """Create custom mock object for test.""" + def __init__(self, retries): self.i = 0 self.retries = retries def connection_error(self, *args, **kwargs): + """Handle a connection error for the CustomMock object.""" self.i += 1 if self.i < self.retries: @@ -668,21 +704,20 @@ def connection_error(self, *args, **kwargs): mock_request.side_effect = CustomMock(retries).connection_error cli = InfluxDBClient(database='db', retries=retries) - cli.write_points( - self.dummy_points - ) + cli.write_points(self.dummy_points) @mock.patch('requests.Session.request') def test_random_request_retry_raises(self, mock_request): - """Tests that a random number of connection errors plus one \ - will not be handled""" - + """Test a random number of conn errors plus one will not be handled.""" class CustomMock(object): + """Create custom mock object for test.""" + def __init__(self, retries): self.i = 0 self.retries = retries def connection_error(self, *args, **kwargs): + """Handle a connection error for the CustomMock object.""" self.i += 1 if self.i < self.retries + 1: @@ -701,6 +736,7 @@ def connection_error(self, *args, **kwargs): cli.write_points(self.dummy_points) def test_get_list_users(self): + """Test get users for TestInfluxDBClient object.""" example_response = ( '{"results":[{"series":[{"columns":["user","admin"],' '"values":[["test",false]]}]}]}' @@ -719,6 +755,7 @@ def test_get_list_users(self): ) def test_get_list_users_empty(self): + """Test get empty userlist for TestInfluxDBClient object.""" example_response = ( '{"results":[{"series":[{"columns":["user","admin"]}]}]}' ) @@ -732,6 +769,7 @@ def test_get_list_users_empty(self): self.assertListEqual(self.cli.get_list_users(), []) def test_grant_admin_privileges(self): + """Test grant admin privs for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -749,11 +787,13 @@ def test_grant_admin_privileges(self): @raises(Exception) def test_grant_admin_privileges_invalid(self): + """Test grant invalid admin privs for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.grant_admin_privileges('') def test_revoke_admin_privileges(self): + """Test revoke admin privs for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -771,11 +811,13 @@ def test_revoke_admin_privileges(self): @raises(Exception) def test_revoke_admin_privileges_invalid(self): + """Test revoke invalid admin privs for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.revoke_admin_privileges('') def test_grant_privilege(self): + """Test grant privs for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -793,11 +835,13 @@ def test_grant_privilege(self): @raises(Exception) def test_grant_privilege_invalid(self): + """Test grant invalid privs for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.grant_privilege('', 'testdb', 'test') def test_revoke_privilege(self): + """Test revoke privs for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: @@ -815,11 +859,13 @@ def test_revoke_privilege(self): @raises(Exception) def test_revoke_privilege_invalid(self): + """Test revoke invalid privs for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.revoke_privilege('', 'testdb', 'test') def test_get_list_privileges(self): + """Tst get list of privs for TestInfluxDBClient object.""" data = {'results': [ {'series': [ {'columns': ['database', 'privilege'], @@ -840,15 +886,18 @@ def test_get_list_privileges(self): @raises(Exception) def test_get_list_privileges_fails(self): + """Test failed get list of privs for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 401): cli.get_list_privileges('test') def test_invalid_port_fails(self): + """Test invalid port fail for TestInfluxDBClient object.""" with self.assertRaises(ValueError): InfluxDBClient('host', '80/redir', 'username', 'password') def test_chunked_response(self): + """Test chunked reponse for TestInfluxDBClient object.""" example_response = \ u'{"results":[{"statement_id":0,"series":' \ '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \ @@ -887,8 +936,10 @@ def test_chunked_response(self): class FakeClient(InfluxDBClient): + """Set up a fake client instance of InfluxDBClient.""" def __init__(self, *args, **kwargs): + """Initialize an instance of the FakeClient object.""" super(FakeClient, self).__init__(*args, **kwargs) def query(self, @@ -896,6 +947,7 @@ def query(self, params=None, expected_response_code=200, database=None): + """Query data from the FakeClient object.""" if query == 'Fail': raise Exception("Fail") elif query == 'Fail once' and self._host == 'host1': diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 4740468e..02aaac5f 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -1,21 +1,22 @@ # -*- coding: utf-8 -*- -""" -unit tests for misc module -""" +"""Unit tests for misc module.""" + from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals -from .client_test import _mocked_session +from datetime import timedelta -import unittest import json +import unittest +import warnings import requests_mock -from nose.tools import raises -from datetime import timedelta + from influxdb.tests import skipIfPYpy, using_pypy -import warnings +from nose.tools import raises + +from .client_test import _mocked_session if not using_pypy: import pandas as pd @@ -25,12 +26,15 @@ @skipIfPYpy class TestDataFrameClient(unittest.TestCase): + """Set up a test DataFrameClient object.""" def setUp(self): + """Instantiate a TestDataFrameClient object.""" # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) def test_write_points_from_dataframe(self): + """Test write points from df in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -56,6 +60,7 @@ def test_write_points_from_dataframe(self): self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_in_batches(self): + """Test write points in batch from df in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -70,6 +75,7 @@ def test_write_points_from_dataframe_in_batches(self): self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1)) def test_write_points_from_dataframe_with_tag_columns(self): + """Test write points from df w/tag in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0], ['red', 0, "2", 2, 2.0]], @@ -101,6 +107,7 @@ def test_write_points_from_dataframe_with_tag_columns(self): self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self): + """Test write points from df w/tag + cols in TestDataFrameClient.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0], ['red', 0, "2", 2, 2.0]], @@ -129,6 +136,7 @@ def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self): self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_tag_cols_and_defaults(self): + """Test default write points from df w/tag in TestDataFrameClient.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0, 'hot'], ['red', 0, "2", 2, 2.0, 'cold']], @@ -199,6 +207,7 @@ def test_write_points_from_dataframe_with_tag_cols_and_defaults(self): self.assertEqual(m.last_request.body, expected_no_tags_no_fields) def test_write_points_from_dataframe_with_tag_escaped(self): + """Test write points from df w/escaped tag in TestDataFrameClient.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame( data=[ @@ -241,6 +250,7 @@ def test_write_points_from_dataframe_with_tag_escaped(self): self.assertEqual(m.last_request.body, expected_escaped_tags) def test_write_points_from_dataframe_with_numeric_column_names(self): + """Test write points from df with numeric cols.""" now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], @@ -262,6 +272,7 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_numeric_precision(self): + """Test write points from df with numeric precision.""" now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names dataframe = pd.DataFrame(data=[["1", 1, 1.1111111111111], @@ -306,6 +317,7 @@ def test_write_points_from_dataframe_with_numeric_precision(self): self.assertEqual(m.last_request.body, expected_full_precision) def test_write_points_from_dataframe_with_period_index(self): + """Test write points from df with period index.""" dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[pd.Period('1970-01-01'), pd.Period('1970-01-02')], @@ -329,6 +341,7 @@ def test_write_points_from_dataframe_with_period_index(self): self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_time_precision(self): + """Test write points from df with time precision.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -394,6 +407,7 @@ def test_write_points_from_dataframe_with_time_precision(self): @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): + """Test failed write points from df without time index.""" dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], columns=["column_one", "column_two", "column_three"]) @@ -408,6 +422,7 @@ def test_write_points_from_dataframe_fails_without_time_index(self): @raises(TypeError) def test_write_points_from_dataframe_fails_with_series(self): + """Test failed write points from df with series.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.Series(data=[1.0, 2.0], index=[now, now + timedelta(hours=1)]) @@ -421,6 +436,7 @@ def test_write_points_from_dataframe_fails_with_series(self): cli.write_points(dataframe, "foo") def test_query_into_dataframe(self): + """Test query into df for TestDataFrameClient object.""" data = { "results": [{ "series": [ @@ -462,6 +478,7 @@ def test_query_into_dataframe(self): assert_frame_equal(expected[k], result[k]) def test_multiquery_into_dataframe(self): + """Test multiquyer into df for TestDataFrameClient object.""" data = { "results": [ { @@ -512,12 +529,14 @@ def test_multiquery_into_dataframe(self): assert_frame_equal(e[k], r[k]) def test_query_with_empty_result(self): + """Test query with empty results in TestDataFrameClient object.""" cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'GET', 200, {"results": [{}]}): result = cli.query('select column_one from foo;') self.assertEqual(result, {}) def test_get_list_database(self): + """Test get list of databases in TestDataFrameClient object.""" data = {'results': [ {'series': [ {'measurement': 'databases', @@ -535,6 +554,7 @@ def test_get_list_database(self): ) def test_datetime_to_epoch(self): + """Test convert datetime to epoch in TestDataFrameClient object.""" timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') cli = DataFrameClient('host', 8086, 'username', 'password', 'db') @@ -568,6 +588,7 @@ def test_datetime_to_epoch(self): ) def test_dsn_constructor(self): - client = DataFrameClient.from_DSN('influxdb://localhost:8086') + """Test data source name deconstructor in TestDataFrameClient.""" + client = DataFrameClient.from_dsn('influxdb://localhost:8086') self.assertIsInstance(client, DataFrameClient) self.assertEqual('http://localhost:8086', client._baseurl) diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 44392f80..39e5ee75 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -1,23 +1,27 @@ # -*- coding: utf-8 -*- +"""Set of series helper functions for test.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals +from datetime import datetime, timedelta + import unittest import warnings import mock -from datetime import datetime, timedelta from influxdb import SeriesHelper, InfluxDBClient from requests.exceptions import ConnectionError class TestSeriesHelper(unittest.TestCase): + """Define the SeriesHelper test object.""" @classmethod def setUpClass(cls): + """Set up the TestSeriesHelper object.""" super(TestSeriesHelper, cls).setUpClass() TestSeriesHelper.client = InfluxDBClient( @@ -29,8 +33,11 @@ def setUpClass(cls): ) class MySeriesHelper(SeriesHelper): + """Define a SeriesHelper object.""" class Meta: + """Define metadata for the SeriesHelper object.""" + client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['some_stat'] @@ -41,6 +48,7 @@ class Meta: TestSeriesHelper.MySeriesHelper = MySeriesHelper def tearDown(self): + """Deconstruct the TestSeriesHelper object.""" super(TestSeriesHelper, self).tearDown() TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( @@ -49,12 +57,13 @@ def tearDown(self): 'Resetting helper did not empty datapoints.') def test_auto_commit(self): - """ - Tests that write_points is called after the right number of events - """ + """Test write_points called after valid number of events.""" class AutoCommitTest(SeriesHelper): + """Define a SeriesHelper instance to test autocommit.""" class Meta: + """Define metadata for AutoCommitTest.""" + series_name = 'events.stats.{server_name}' fields = ['some_stat'] tags = ['server_name', 'other_tag'] @@ -74,9 +83,7 @@ class Meta: @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') def testSingleSeriesName(self, current_timestamp): - """ - Tests JSON conversion when there is only one series name. - """ + """Test JSON conversion when there is only one series name.""" current_timestamp.return_value = current_date = datetime.today() TestSeriesHelper.MySeriesHelper( server_name='us.east-1', other_tag='ello', some_stat=159) @@ -141,9 +148,7 @@ def testSingleSeriesName(self, current_timestamp): @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') def testSeveralSeriesNames(self, current_timestamp): - """ - Tests JSON conversion when there are multiple series names. - """ + """Test JSON conversion when there are multiple series names.""" current_timestamp.return_value = current_date = datetime.today() TestSeriesHelper.MySeriesHelper( server_name='us.east-1', some_stat=159, other_tag='ello') @@ -209,9 +214,7 @@ def testSeveralSeriesNames(self, current_timestamp): @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') def testSeriesWithoutTimeField(self, current_timestamp): - """ - Tests that time is optional on a series without a time field. - """ + """Test that time is optional on a series without a time field.""" current_date = datetime.today() yesterday = current_date - timedelta(days=1) current_timestamp.return_value = yesterday @@ -230,9 +233,7 @@ def testSeriesWithoutTimeField(self, current_timestamp): @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') def testSeriesWithTimeField(self, current_timestamp): - """ - Test that time is optional on a series with a time field. - """ + """Test that time is optional on a series with a time field.""" current_date = datetime.today() yesterday = current_date - timedelta(days=1) current_timestamp.return_value = yesterday @@ -261,27 +262,36 @@ class Meta: self.assertEqual(point2['time'], yesterday) def testInvalidHelpers(self): - """ - Tests errors in invalid helpers. - """ + """Test errors in invalid helpers.""" class MissingMeta(SeriesHelper): + """Define instance of SeriesHelper for missing meta.""" + pass class MissingClient(SeriesHelper): + """Define SeriesHelper for missing client data.""" class Meta: + """Define metadat for MissingClient.""" + series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] autocommit = True class MissingSeriesName(SeriesHelper): + """Define instance of SeriesHelper for missing series.""" class Meta: + """Define metadata for MissingSeriesName.""" + fields = ['time', 'server_name'] class MissingFields(SeriesHelper): + """Define instance of SeriesHelper for missing fields.""" class Meta: + """Define metadata for MissingFields.""" + series_name = 'events.stats.{server_name}' for cls in [MissingMeta, MissingClient, MissingFields, @@ -292,9 +302,7 @@ class Meta: @unittest.skip("Fails on py32") def testWarnBulkSizeZero(self): - """ - Tests warning for an invalid bulk size. - """ + """Test warning for an invalid bulk size.""" class WarnBulkSizeZero(SeriesHelper): class Meta: @@ -320,12 +328,13 @@ class Meta: 'Warning message did not contain "forced to 1".') def testWarnBulkSizeNoEffect(self): - """ - Tests warning for a set bulk size but autocommit False. - """ + """Test warning for a set bulk size but autocommit False.""" class WarnBulkSizeNoEffect(SeriesHelper): + """Define SeriesHelper for warning on bulk size.""" class Meta: + """Define metadat for WarnBulkSizeNoEffect.""" + series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 diff --git a/influxdb/tests/influxdb08/__init__.py b/influxdb/tests/influxdb08/__init__.py index 40a96afc..0e79ed1c 100644 --- a/influxdb/tests/influxdb08/__init__.py +++ b/influxdb/tests/influxdb08/__init__.py @@ -1 +1,2 @@ # -*- coding: utf-8 -*- +"""Define the influxdb08 test package.""" diff --git a/influxdb/tests/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py index bcf1702d..4a789fb5 100644 --- a/influxdb/tests/influxdb08/client_test.py +++ b/influxdb/tests/influxdb08/client_test.py @@ -1,19 +1,20 @@ # -*- coding: utf-8 -*- -""" -unit tests -""" +"""Client unit tests.""" + import json -import requests -import requests.exceptions import socket import sys import unittest -import requests_mock import random -from nose.tools import raises -from mock import patch import warnings + import mock +import requests +import requests.exceptions +import requests_mock + +from nose.tools import raises +from mock import patch from influxdb.influxdb08 import InfluxDBClient from influxdb.influxdb08.client import session @@ -22,9 +23,11 @@ import codecs def u(x): + """Test codec.""" return codecs.unicode_escape_decode(x)[0] else: def u(x): + """Test codec.""" return x @@ -36,10 +39,10 @@ def _build_response_object(status_code=200, content=""): def _mocked_session(method="GET", status_code=200, content=""): - method = method.upper() def request(*args, **kwargs): + """Define a request for the _mocked_session.""" c = content # Check method @@ -73,8 +76,10 @@ def request(*args, **kwargs): class TestInfluxDBClient(unittest.TestCase): + """Define a TestInfluxDBClient object.""" def setUp(self): + """Set up a TestInfluxDBClient object.""" # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) @@ -92,6 +97,7 @@ def setUp(self): self.dsn_string = 'influxdb://uSr:pWd@host:1886/db' def test_scheme(self): + """Test database scheme for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual(cli._baseurl, 'http://host:8086') @@ -101,41 +107,46 @@ def test_scheme(self): self.assertEqual(cli._baseurl, 'https://host:8086') def test_dsn(self): - cli = InfluxDBClient.from_DSN(self.dsn_string) + """Test datasource name for TestInfluxDBClient object.""" + cli = InfluxDBClient.from_dsn(self.dsn_string) self.assertEqual('http://host:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) - cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string) + cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string) self.assertTrue(cli.use_udp) - cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) + cli = InfluxDBClient.from_dsn('https+' + self.dsn_string) self.assertEqual('https://host:1886', cli._baseurl) - cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, + cli = InfluxDBClient.from_dsn('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host:1886', cli._baseurl) def test_switch_database(self): + """Test switch database for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') self.assertEqual(cli._database, 'another_database') @raises(FutureWarning) def test_switch_db_deprecated(self): + """Test deprecated switch database for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_db('another_database') self.assertEqual(cli._database, 'another_database') def test_switch_user(self): + """Test switch user for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') self.assertEqual(cli._username, 'another_username') self.assertEqual(cli._password, 'another_password') def test_write(self): + """Test write to database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -164,6 +175,7 @@ def test_write(self): ) def test_write_points(self): + """Test write points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -181,6 +193,7 @@ def test_write_points(self): ) def test_write_points_string(self): + """Test write string points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -198,6 +211,7 @@ def test_write_points_string(self): ) def test_write_points_batch(self): + """Test write batch points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") @@ -207,6 +221,7 @@ def test_write_points_batch(self): self.assertEqual(1, m.call_count) def test_write_points_batch_invalid_size(self): + """Test write batch points invalid size for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") @@ -216,6 +231,7 @@ def test_write_points_batch_invalid_size(self): self.assertEqual(1, m.call_count) def test_write_points_batch_multiple_series(self): + """Test write points batch multiple series.""" dummy_points = [ {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0]], @@ -240,6 +256,7 @@ def test_write_points_batch_multiple_series(self): self.assertEqual(expected_last_body, m.request_history[4].json()) def test_write_points_udp(self): + """Test write points UDP for TestInfluxDBClient object.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) port = random.randint(4000, 8000) s.bind(('0.0.0.0', port)) @@ -256,6 +273,7 @@ def test_write_points_udp(self): json.loads(received_data.decode(), strict=True)) def test_write_bad_precision_udp(self): + """Test write UDP w/bad precision.""" cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=4444 @@ -272,16 +290,19 @@ def test_write_bad_precision_udp(self): @raises(Exception) def test_write_points_fails(self): + """Test failed write points for TestInfluxDBClient object.""" with _mocked_session('post', 500): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points([]) def test_write_points_with_precision(self): + """Test write points with precision.""" with _mocked_session('post', 200, self.dummy_points): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.write_points(self.dummy_points)) def test_write_points_bad_precision(self): + """Test write points with bad precision.""" cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, @@ -294,11 +315,13 @@ def test_write_points_bad_precision(self): @raises(Exception) def test_write_points_with_precision_fails(self): + """Test write points where precision fails.""" with _mocked_session('post', 500): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points_with_precision([]) def test_delete_points(self): + """Test delete points for TestInfluxDBClient object.""" with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.delete_points("foo")) @@ -312,26 +335,31 @@ def test_delete_points(self): @raises(Exception) def test_delete_points_with_wrong_name(self): + """Test delete points with wrong name.""" with _mocked_session('delete', 400): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_points("nonexist") @raises(NotImplementedError) def test_create_scheduled_delete(self): + """Test create scheduled deletes.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_scheduled_delete([]) @raises(NotImplementedError) def test_get_list_scheduled_delete(self): + """Test get schedule list of deletes TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.get_list_scheduled_delete() @raises(NotImplementedError) def test_remove_scheduled_delete(self): + """Test remove scheduled delete TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.remove_scheduled_delete(1) def test_query(self): + """Test query for TestInfluxDBClient object.""" data = [ { "name": "foo", @@ -348,6 +376,7 @@ def test_query(self): self.assertEqual(len(result[0]['points']), 4) def test_query_chunked(self): + """Test chunked query for TestInfluxDBClient object.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ @@ -380,6 +409,7 @@ def test_query_chunked(self): ) def test_query_chunked_unicode(self): + """Test unicode chunked query for TestInfluxDBClient object.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ @@ -410,11 +440,13 @@ def test_query_chunked_unicode(self): @raises(Exception) def test_query_fail(self): + """Test failed query for TestInfluxDBClient.""" with _mocked_session('get', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.query('select column_one from foo;') def test_query_bad_precision(self): + """Test query with bad precision for TestInfluxDBClient.""" cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, @@ -423,28 +455,33 @@ def test_query_bad_precision(self): cli.query('select column_one from foo', time_precision='g') def test_create_database(self): + """Test create database for TestInfluxDBClient.""" with _mocked_session('post', 201, {"name": "new_db"}): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.create_database('new_db')) @raises(Exception) def test_create_database_fails(self): + """Test failed create database for TestInfluxDBClient.""" with _mocked_session('post', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_database('new_db') def test_delete_database(self): + """Test delete database for TestInfluxDBClient.""" with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.delete_database('old_db')) @raises(Exception) def test_delete_database_fails(self): + """Test failed delete database for TestInfluxDBClient.""" with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db') def test_get_list_database(self): + """Test get list of databases for TestInfluxDBClient.""" data = [ {"name": "a_db"} ] @@ -455,12 +492,14 @@ def test_get_list_database(self): @raises(Exception) def test_get_list_database_fails(self): + """Test failed get list of databases for TestInfluxDBClient.""" with _mocked_session('get', 401): cli = InfluxDBClient('host', 8086, 'username', 'password') cli.get_list_database() @raises(FutureWarning) def test_get_database_list_deprecated(self): + """Test deprecated get database list for TestInfluxDBClient.""" data = [ {"name": "a_db"} ] @@ -470,17 +509,20 @@ def test_get_database_list_deprecated(self): self.assertEqual(cli.get_database_list()[0]['name'], 'a_db') def test_delete_series(self): + """Test delete series for TestInfluxDBClient.""" with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_series('old_series') @raises(Exception) def test_delete_series_fails(self): + """Test failed delete series for TestInfluxDBClient.""" with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_series('old_series') def test_get_series_list(self): + """Test get list of series for TestInfluxDBClient.""" cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: @@ -500,6 +542,7 @@ def test_get_series_list(self): ) def test_get_continuous_queries(self): + """Test get continuous queries for TestInfluxDBClient.""" cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: @@ -532,9 +575,11 @@ def test_get_continuous_queries(self): ) def test_get_list_cluster_admins(self): + """Test get list of cluster admins, not implemented.""" pass def test_add_cluster_admin(self): + """Test add cluster admin for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -556,6 +601,7 @@ def test_add_cluster_admin(self): ) def test_update_cluster_admin_password(self): + """Test update cluster admin pass for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -574,6 +620,7 @@ def test_update_cluster_admin_password(self): ) def test_delete_cluster_admin(self): + """Test delete cluster admin for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.DELETE, @@ -587,12 +634,15 @@ def test_delete_cluster_admin(self): self.assertIsNone(m.last_request.body) def test_set_database_admin(self): + """Test set database admin for TestInfluxDBClient.""" pass def test_unset_database_admin(self): + """Test unset database admin for TestInfluxDBClient.""" pass def test_alter_database_admin(self): + """Test alter database admin for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -614,25 +664,30 @@ def test_alter_database_admin(self): @raises(NotImplementedError) def test_get_list_database_admins(self): + """Test get list of database admins for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.get_list_database_admins() @raises(NotImplementedError) def test_add_database_admin(self): + """Test add database admins for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.add_database_admin('admin', 'admin_secret_password') @raises(NotImplementedError) def test_update_database_admin_password(self): + """Test update database admin pass for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_database_admin_password('admin', 'admin_secret_password') @raises(NotImplementedError) def test_delete_database_admin(self): + """Test delete database admin for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database_admin('admin') def test_get_database_users(self): + """Test get database users for TestInfluxDBClient.""" cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') example_response = \ @@ -650,6 +705,7 @@ def test_get_database_users(self): self.assertEqual(json.loads(example_response), users) def test_add_database_user(self): + """Test add database user for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -673,6 +729,7 @@ def test_add_database_user(self): ) def test_add_database_user_bad_permissions(self): + """Test add database user with bad perms for TestInfluxDBClient.""" cli = InfluxDBClient() with self.assertRaisesRegexp( @@ -686,6 +743,7 @@ def test_add_database_user_bad_permissions(self): ) def test_alter_database_user_password(self): + """Test alter database user pass for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -706,6 +764,7 @@ def test_alter_database_user_password(self): ) def test_alter_database_user_permissions(self): + """Test alter database user perms for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -727,6 +786,7 @@ def test_alter_database_user_permissions(self): ) def test_alter_database_user_password_and_permissions(self): + """Test alter database user pass and perms for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, @@ -750,6 +810,7 @@ def test_alter_database_user_password_and_permissions(self): ) def test_update_database_user_password_current_user(self): + """Test update database user pass for TestInfluxDBClient.""" cli = InfluxDBClient( username='root', password='hello', @@ -769,6 +830,7 @@ def test_update_database_user_password_current_user(self): self.assertEqual(cli._password, 'bye') def test_delete_database_user(self): + """Test delete database user for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.DELETE, @@ -782,17 +844,21 @@ def test_delete_database_user(self): @raises(NotImplementedError) def test_update_permission(self): + """Test update permission for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_permission('admin', []) @mock.patch('requests.Session.request') def test_request_retry(self, mock_request): - """Tests that two connection errors will be handled""" - + """Test that two connection errors will be handled.""" class CustomMock(object): - i = 0 + """Define CustomMock object.""" + + def __init__(self): + self.i = 0 def connection_error(self, *args, **kwargs): + """Test connection error in CustomMock.""" self.i += 1 if self.i < 3: @@ -811,12 +877,16 @@ def connection_error(self, *args, **kwargs): @mock.patch('requests.Session.request') def test_request_retry_raises(self, mock_request): - """Tests that three connection errors will not be handled""" - + """Test that three connection errors will not be handled.""" class CustomMock(object): - i = 0 + """Define CustomMock object.""" + + def __init__(self): + """Initialize the object.""" + self.i = 0 def connection_error(self, *args, **kwargs): + """Test the connection error for CustomMock.""" self.i += 1 if self.i < 4: diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py index 63a10c93..6e6fa2cc 100644 --- a/influxdb/tests/influxdb08/dataframe_client_test.py +++ b/influxdb/tests/influxdb08/dataframe_client_test.py @@ -1,17 +1,20 @@ # -*- coding: utf-8 -*- -""" -unit tests for misc module -""" -from .client_test import _mocked_session +"""Unit tests for misc module.""" -import unittest +from datetime import timedelta + +import copy import json +import unittest +import warnings + import requests_mock + from nose.tools import raises -from datetime import timedelta + from influxdb.tests import skipIfPYpy, using_pypy -import copy -import warnings + +from .client_test import _mocked_session if not using_pypy: import pandas as pd @@ -21,12 +24,15 @@ @skipIfPYpy class TestDataFrameClient(unittest.TestCase): + """Define the DataFramClient test object.""" def setUp(self): + """Set up an instance of TestDataFrameClient object.""" # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) def test_write_points_from_dataframe(self): + """Test write points from dataframe.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -53,6 +59,7 @@ def test_write_points_from_dataframe(self): self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_with_float_nan(self): + """Test write points from dataframe with NaN float.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -79,6 +86,7 @@ def test_write_points_from_dataframe_with_float_nan(self): self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_in_batches(self): + """Test write points from dataframe in batches.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -92,6 +100,7 @@ def test_write_points_from_dataframe_in_batches(self): self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1)) def test_write_points_from_dataframe_with_numeric_column_names(self): + """Test write points from dataframe with numeric columns.""" now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], @@ -117,6 +126,7 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_with_period_index(self): + """Test write points from dataframe with period index.""" dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[pd.Period('1970-01-01'), pd.Period('1970-01-02')], @@ -143,6 +153,7 @@ def test_write_points_from_dataframe_with_period_index(self): self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_with_time_precision(self): + """Test write points from dataframe with time precision.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], @@ -182,6 +193,7 @@ def test_write_points_from_dataframe_with_time_precision(self): @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): + """Test write points from dataframe that fails without time index.""" dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], columns=["column_one", "column_two", "column_three"]) @@ -195,6 +207,7 @@ def test_write_points_from_dataframe_fails_without_time_index(self): @raises(TypeError) def test_write_points_from_dataframe_fails_with_series(self): + """Test failed write points from dataframe with series.""" now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.Series(data=[1.0, 2.0], index=[now, now + timedelta(hours=1)]) @@ -207,6 +220,7 @@ def test_write_points_from_dataframe_fails_with_series(self): cli.write_points({"foo": dataframe}) def test_query_into_dataframe(self): + """Test query into a dataframe.""" data = [ { "name": "foo", @@ -229,6 +243,7 @@ def test_query_into_dataframe(self): assert_frame_equal(dataframe, result) def test_query_multiple_time_series(self): + """Test query for multiple time series.""" data = [ { "name": "series1", @@ -269,12 +284,14 @@ def test_query_multiple_time_series(self): assert_frame_equal(dataframes[key], result[key]) def test_query_with_empty_result(self): + """Test query with empty results.""" with _mocked_session('get', 200, []): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') self.assertEqual(result, []) def test_list_series(self): + """Test list of series for dataframe object.""" response = [ { 'columns': ['time', 'name'], @@ -288,6 +305,7 @@ def test_list_series(self): self.assertEqual(series_list, ['seriesA', 'seriesB']) def test_datetime_to_epoch(self): + """Test convert datetime to epoch.""" timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') cli = DataFrameClient('host', 8086, 'username', 'password', 'db') diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py index c9ce311f..2e305f3f 100644 --- a/influxdb/tests/influxdb08/helper_test.py +++ b/influxdb/tests/influxdb08/helper_test.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define set of helper functions for the dataframe.""" import unittest import warnings @@ -9,9 +10,11 @@ class TestSeriesHelper(unittest.TestCase): + """Define the SeriesHelper for test.""" @classmethod def setUpClass(cls): + """Set up an instance of the TestSerisHelper object.""" super(TestSeriesHelper, cls).setUpClass() TestSeriesHelper.client = InfluxDBClient( @@ -23,8 +26,11 @@ def setUpClass(cls): ) class MySeriesHelper(SeriesHelper): + """Define a subset SeriesHelper instance.""" class Meta: + """Define metadata for the TestSeriesHelper object.""" + client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] @@ -34,12 +40,13 @@ class Meta: TestSeriesHelper.MySeriesHelper = MySeriesHelper def test_auto_commit(self): - """ - Tests that write_points is called after the right number of events - """ + """Test that write_points called after the right number of events.""" class AutoCommitTest(SeriesHelper): + """Define an instance of SeriesHelper for AutoCommit test.""" class Meta: + """Define metadata AutoCommitTest object.""" + series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 @@ -57,9 +64,7 @@ class Meta: self.assertTrue(fake_write_points.called) def testSingleSeriesName(self): - """ - Tests JSON conversion when there is only one series name. - """ + """Test JSON conversion when there is only one series name.""" TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158) TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157) @@ -83,9 +88,7 @@ def testSingleSeriesName(self): 'Resetting helper did not empty datapoints.') def testSeveralSeriesNames(self): - """ - Tests JSON conversion when there is only one series name. - """ + """Test JSON conversion when there is only one series name.""" TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158) TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157) @@ -116,27 +119,36 @@ def testSeveralSeriesNames(self): 'Resetting helper did not empty datapoints.') def testInvalidHelpers(self): - """ - Tests errors in invalid helpers. - """ + """Test errors in invalid helpers.""" class MissingMeta(SeriesHelper): + """Define SeriesHelper object for MissingMeta test.""" + pass class MissingClient(SeriesHelper): + """Define SeriesHelper object for MissingClient test.""" class Meta: + """Define metadata for MissingClient object.""" + series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] autocommit = True class MissingSeriesName(SeriesHelper): + """Define SeriesHelper object for MissingSeries test.""" class Meta: + """Define metadata for MissingSeriesName object.""" + fields = ['time', 'server_name'] class MissingFields(SeriesHelper): + """Define SeriesHelper for MissingFields test.""" class Meta: + """Define metadata for MissingFields object.""" + series_name = 'events.stats.{server_name}' for cls in [MissingMeta, MissingClient, MissingFields, @@ -146,12 +158,13 @@ class Meta: 'server_name': 'us.east-1'}) def testWarnBulkSizeZero(self): - """ - Tests warning for an invalid bulk size. - """ + """Test warning for an invalid bulk size.""" class WarnBulkSizeZero(SeriesHelper): + """Define SeriesHelper for WarnBulkSizeZero test.""" class Meta: + """Define metadata for WarnBulkSizeZero object.""" + client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] @@ -180,12 +193,13 @@ class Meta: 'Warning message did not contain "forced to 1".') def testWarnBulkSizeNoEffect(self): - """ - Tests warning for a set bulk size but autocommit False. - """ + """Test warning for a set bulk size but autocommit False.""" class WarnBulkSizeNoEffect(SeriesHelper): + """Define SeriesHelper for WarnBulkSizeNoEffect object.""" class Meta: + """Define metadata for WarnBulkSizeNoEffect object.""" + series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 diff --git a/influxdb/tests/misc.py b/influxdb/tests/misc.py index 7dffc219..324d13c4 100644 --- a/influxdb/tests/misc.py +++ b/influxdb/tests/misc.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the misc handler for InfluxDBClient test.""" from __future__ import absolute_import from __future__ import division @@ -9,7 +10,9 @@ def get_free_ports(num_ports, ip='127.0.0.1'): - """Get `num_ports` free/available ports on the interface linked to the `ip´ + """Determine free ports on provided interface. + + Get `num_ports` free/available ports on the interface linked to the `ip` :param int num_ports: The number of free ports to get :param str ip: The ip on which the ports have to be taken :return: a set of ports number @@ -36,6 +39,7 @@ def get_free_ports(num_ports, ip='127.0.0.1'): def is_port_open(port, ip='127.0.0.1'): + """Check if given TCP port is open for connection.""" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: result = sock.connect_ex((ip, port)) diff --git a/influxdb/tests/resultset_test.py b/influxdb/tests/resultset_test.py index 4717b88c..dd088b79 100644 --- a/influxdb/tests/resultset_test.py +++ b/influxdb/tests/resultset_test.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the resultset test package.""" from __future__ import absolute_import from __future__ import division @@ -12,8 +13,10 @@ class TestResultSet(unittest.TestCase): + """Define the ResultSet test object.""" def setUp(self): + """Set up an instance of TestResultSet.""" self.query_response = { "results": [ {"series": [{"measurement": "cpu_load_short", @@ -39,9 +42,11 @@ def setUp(self): ]}]} ] } + self.rs = ResultSet(self.query_response['results'][0]) def test_filter_by_name(self): + """Test filtering by name in TestResultSet object.""" expected = [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} @@ -53,6 +58,7 @@ def test_filter_by_name(self): measurement='cpu_load_short'))) def test_filter_by_tags(self): + """Test filter by tags in TestResultSet object.""" expected = [ {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} @@ -69,6 +75,7 @@ def test_filter_by_tags(self): ) def test_filter_by_name_and_tags(self): + """Test filter by name and tags in TestResultSet object.""" self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] @@ -83,6 +90,7 @@ def test_filter_by_name_and_tags(self): ) def test_keys(self): + """Test keys in TestResultSet object.""" self.assertEqual( self.rs.keys(), [ @@ -93,12 +101,14 @@ def test_keys(self): ) def test_len(self): + """Test length in TestResultSet object.""" self.assertEqual( len(self.rs), 3 ) def test_items(self): + """Test items in TestResultSet object.""" items = list(self.rs.items()) items_lists = [(item[0], list(item[1])) for item in items] @@ -124,6 +134,7 @@ def test_items(self): ) def test_point_from_cols_vals(self): + """Test points from columns in TestResultSet object.""" cols = ['col1', 'col2'] vals = [1, '2'] @@ -134,6 +145,7 @@ def test_point_from_cols_vals(self): ) def test_system_query(self): + """Test system query capabilities in TestResultSet object.""" rs = ResultSet( {'series': [ {'values': [['another', '48h0m0s', 3, False], @@ -161,6 +173,7 @@ def test_system_query(self): ) def test_resultset_error(self): + """Test returning error in TestResultSet object.""" with self.assertRaises(InfluxDBClientError): ResultSet({ "series": [], diff --git a/influxdb/tests/server_tests/__init__.py b/influxdb/tests/server_tests/__init__.py index e69de29b..ce149ab4 100644 --- a/influxdb/tests/server_tests/__init__.py +++ b/influxdb/tests/server_tests/__init__.py @@ -0,0 +1 @@ +"""Define the server tests package.""" diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py index f217fce1..f4bd3ff9 100644 --- a/influxdb/tests/server_tests/base.py +++ b/influxdb/tests/server_tests/base.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the base module for server test.""" from __future__ import absolute_import from __future__ import division @@ -41,7 +42,9 @@ def _teardown_influxdb_server(inst): class SingleTestCaseWithServerMixin(object): - """ A mixin for unittest.TestCase to start an influxdb server instance + """Define the single testcase with server mixin. + + A mixin for unittest.TestCase to start an influxdb server instance in a temporary directory **for each test function/case** """ @@ -53,23 +56,29 @@ class SingleTestCaseWithServerMixin(object): class ManyTestCasesWithServerMixin(object): - """ Same than SingleTestCaseWithServerMixin - but creates a single instance for the whole class. - Also pre-creates a fresh database: 'db'. + """Define the many testcase with server mixin. + + Same as the SingleTestCaseWithServerMixin but this module creates + a single instance for the whole class. Also pre-creates a fresh + database: 'db'. """ # 'influxdb_template_conf' attribute must be set on the class itself ! @classmethod def setUpClass(cls): + """Set up an instance of the ManyTestCasesWithServerMixin.""" _setup_influxdb_server(cls) def setUp(self): + """Set up an instance of the ManyTestCasesWithServerMixin.""" self.cli.create_database('db') @classmethod def tearDownClass(cls): + """Deconstruct an instance of ManyTestCasesWithServerMixin.""" _teardown_influxdb_server(cls) def tearDown(self): + """Deconstruct an instance of ManyTestCasesWithServerMixin.""" self.cli.drop_database('db') diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index d81054c9..a7dedddd 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -""" -unit tests for checking the good/expected interaction between : +"""Unit tests for checking the InfluxDB server. + +The good/expected interaction between: + the python client.. (obviously) + and a *_real_* server instance running. This basically duplicates what's in client_test.py - but without mocking around every call. - +but without mocking around every call. """ from __future__ import absolute_import from __future__ import division @@ -20,9 +20,6 @@ import unittest import warnings -# By default, raise exceptions on warnings -warnings.simplefilter('error', FutureWarning) - from influxdb import InfluxDBClient from influxdb.exceptions import InfluxDBClientError @@ -30,6 +27,9 @@ from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin +# By default, raise exceptions on warnings +warnings.simplefilter('error', FutureWarning) + if not using_pypy: import pandas as pd from pandas.util.testing import assert_frame_equal @@ -39,11 +39,15 @@ def point(serie_name, timestamp=None, tags=None, **fields): + """Define what a point looks like.""" res = {'measurement': serie_name} + if timestamp: res['time'] = timestamp + if tags: res['tags'] = tags + res['fields'] = fields return res @@ -117,15 +121,17 @@ def point(serie_name, timestamp=None, tags=None, **fields): @skipServerTests -class SimpleTests(SingleTestCaseWithServerMixin, - unittest.TestCase): +class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase): + """Define the class of simple tests.""" influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_fresh_server_no_db(self): + """Test a fresh server without database.""" self.assertEqual([], self.cli.get_list_database()) def test_create_database(self): + """Test create a database.""" self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( @@ -134,44 +140,52 @@ def test_create_database(self): ) def test_drop_database(self): + """Test drop a database.""" self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) def test_query_fail(self): + """Test that a query failed.""" with self.assertRaises(InfluxDBClientError) as ctx: self.cli.query('select column_one from foo') self.assertIn('database not found: db', ctx.exception.content) def test_query_fail_ignore_errors(self): + """Test query failed but ignore errors.""" result = self.cli.query('select column_one from foo', raise_errors=False) self.assertEqual(result.error, 'database not found: db') def test_create_user(self): + """Test create user.""" self.cli.create_user('test_user', 'secret_password') rsp = list(self.cli.query("SHOW USERS")['results']) self.assertIn({'user': 'test_user', 'admin': False}, rsp) def test_create_user_admin(self): + """Test create admin user.""" self.cli.create_user('test_user', 'secret_password', True) rsp = list(self.cli.query("SHOW USERS")['results']) self.assertIn({'user': 'test_user', 'admin': True}, rsp) def test_create_user_blank_password(self): + """Test create user with a blank pass.""" self.cli.create_user('test_user', '') rsp = list(self.cli.query("SHOW USERS")['results']) self.assertIn({'user': 'test_user', 'admin': False}, rsp) def test_get_list_users_empty(self): + """Test get list of users, but empty.""" rsp = self.cli.get_list_users() self.assertEqual([], rsp) def test_get_list_users(self): + """Test get list of users.""" self.cli.query("CREATE USER test WITH PASSWORD 'test'") rsp = self.cli.get_list_users() @@ -181,6 +195,7 @@ def test_get_list_users(self): ) def test_create_user_blank_username(self): + """Test create blank username.""" with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_user('', 'secret_password') self.assertIn('username required', @@ -189,12 +204,14 @@ def test_create_user_blank_username(self): self.assertEqual(rsp, []) def test_drop_user(self): + """Test drop a user.""" self.cli.query("CREATE USER test WITH PASSWORD 'test'") self.cli.drop_user('test') users = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(users, []) def test_drop_user_nonexisting(self): + """Test dropping a nonexistant user.""" with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_user('test') self.assertIn('user not found', @@ -202,6 +219,7 @@ def test_drop_user_nonexisting(self): @unittest.skip("Broken as of 0.9.0") def test_revoke_admin_privileges(self): + """Test revoking admin privs, deprecated as of v0.9.0.""" self.cli.create_user('test', 'test', admin=True) self.assertEqual([{'user': 'test', 'admin': True}], self.cli.get_list_users()) @@ -210,12 +228,14 @@ def test_revoke_admin_privileges(self): self.cli.get_list_users()) def test_grant_privilege(self): + """Test grant privs to user.""" self.cli.create_user('test', 'test') self.cli.create_database('testdb') self.cli.grant_privilege('all', 'testdb', 'test') # TODO: when supported by InfluxDB, check if privileges are granted def test_grant_privilege_invalid(self): + """Test grant invalid privs to user.""" self.cli.create_user('test', 'test') self.cli.create_database('testdb') with self.assertRaises(InfluxDBClientError) as ctx: @@ -225,12 +245,14 @@ def test_grant_privilege_invalid(self): ctx.exception.content) def test_revoke_privilege(self): + """Test revoke privs from user.""" self.cli.create_user('test', 'test') self.cli.create_database('testdb') self.cli.revoke_privilege('all', 'testdb', 'test') # TODO: when supported by InfluxDB, check if privileges are revoked def test_revoke_privilege_invalid(self): + """Test revoke invalid privs from user.""" self.cli.create_user('test', 'test') self.cli.create_database('testdb') with self.assertRaises(InfluxDBClientError) as ctx: @@ -240,23 +262,26 @@ def test_revoke_privilege_invalid(self): ctx.exception.content) def test_invalid_port_fails(self): + """Test invalid port access fails.""" with self.assertRaises(ValueError): InfluxDBClient('host', '80/redir', 'username', 'password') @skipServerTests -class CommonTests(ManyTestCasesWithServerMixin, - unittest.TestCase): +class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase): + """Define a class to handle common tests for the server.""" influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_write(self): + """Test write to the server.""" self.assertIs(True, self.cli.write( {'points': dummy_point}, params={'db': 'db'}, )) def test_write_check_read(self): + """Test write and check read of data to server.""" self.test_write() time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db') @@ -265,10 +290,12 @@ def test_write_check_read(self): list(rsp.get_points())) def test_write_points(self): + """Test writing points to the server.""" self.assertIs(True, self.cli.write_points(dummy_point)) @skipIfPYpy def test_write_points_DF(self): + """Test writing points with dataframe.""" self.assertIs( True, self.cliDF.write_points( @@ -279,6 +306,7 @@ def test_write_points_DF(self): ) def test_write_points_check_read(self): + """Test writing points and check read back.""" self.test_write_points() time.sleep(1) # same as test_write_check_read() rsp = self.cli.query('SELECT * FROM cpu_load_short') @@ -307,6 +335,7 @@ def test_write_points_check_read(self): @unittest.skip("Broken as of 0.9.0") def test_write_points_check_read_DF(self): + """Test write points and check back with dataframe.""" self.test_write_points_DF() time.sleep(1) # same as test_write_check_read() @@ -326,6 +355,7 @@ def test_write_points_check_read_DF(self): ) def test_write_multiple_points_different_series(self): + """Test write multiple points to different series.""" self.assertIs(True, self.cli.write_points(dummy_points)) time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short') @@ -355,6 +385,7 @@ def test_write_multiple_points_different_series(self): @unittest.skip("Broken as of 0.9.0") def test_write_multiple_points_different_series_DF(self): + """Test write multiple points using dataframe to different series.""" for i in range(2): self.assertIs( True, self.cliDF.write_points( @@ -376,6 +407,7 @@ def test_write_multiple_points_different_series_DF(self): ) def test_write_points_batch(self): + """Test writing points in a batch.""" dummy_points = [ {"measurement": "cpu_usage", "tags": {"unit": "percent"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, @@ -399,10 +431,12 @@ def test_write_points_batch(self): self.assertIn(12.34, cpu['series'][0]['values'][0]) def test_query(self): + """Test querying data back from server.""" self.assertIs(True, self.cli.write_points(dummy_point)) @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): + """Test query for chunked response from server.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ @@ -424,10 +458,12 @@ def test_query_chunked(self): # TODO ? def test_delete_series_invalid(self): + """Test delete invalid series.""" with self.assertRaises(InfluxDBClientError): self.cli.delete_series() def test_default_retention_policy(self): + """Test add default retention policy.""" rsp = self.cli.get_list_retention_policies() self.assertEqual( [ @@ -441,6 +477,7 @@ def test_default_retention_policy(self): ) def test_create_retention_policy_default(self): + """Test create a new default retention policy.""" self.cli.create_retention_policy('somename', '1d', 1, default=True) self.cli.create_retention_policy('another', '2d', 1, default=False) rsp = self.cli.get_list_retention_policies() @@ -467,6 +504,7 @@ def test_create_retention_policy_default(self): ) def test_create_retention_policy(self): + """Test creating a new retention policy, not default.""" self.cli.create_retention_policy('somename', '1d', 1) # NB: creating a retention policy without specifying # shard group duration @@ -489,6 +527,7 @@ def test_create_retention_policy(self): ) def test_alter_retention_policy(self): + """Test alter a retention policy, not default.""" self.cli.create_retention_policy('somename', '1d', 1) # Test alter duration @@ -555,6 +594,7 @@ def test_alter_retention_policy(self): ) def test_alter_retention_policy_invalid(self): + """Test invalid alter retention policy.""" self.cli.create_retention_policy('somename', '1d', 1) with self.assertRaises(InfluxDBClientError) as ctx: self.cli.alter_retention_policy('somename', 'db') @@ -579,6 +619,7 @@ def test_alter_retention_policy_invalid(self): ) def test_drop_retention_policy(self): + """Test drop a retention policy.""" self.cli.create_retention_policy('somename', '1d', 1) # Test drop retention @@ -596,6 +637,7 @@ def test_drop_retention_policy(self): ) def test_issue_143(self): + """Test for PR#143 from repo.""" pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ pt(value=15), @@ -644,6 +686,7 @@ def test_issue_143(self): ) def test_query_multiple_series(self): + """Test query for multiple series.""" pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), @@ -659,14 +702,15 @@ def test_query_multiple_series(self): @skipServerTests -class UdpTests(ManyTestCasesWithServerMixin, - unittest.TestCase): +class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): + """Define a class to test UDP series.""" influxdb_udp_enabled = True influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_write_points_udp(self): + """Test write points UDP.""" cli = InfluxDBClient( 'localhost', self.influxd_inst.http_port, diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 59cbcd4f..21e20fde 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the test module for an influxdb instance.""" from __future__ import absolute_import from __future__ import division @@ -6,13 +7,13 @@ from __future__ import unicode_literals import datetime +import distutils import os import tempfile -import distutils -import time import shutil import subprocess import sys +import time import unittest from influxdb.tests.misc import is_port_open, get_free_ports @@ -20,6 +21,7 @@ # hack in check_output if it's not defined, like for python 2.6 if "check_output" not in dir(subprocess): def f(*popenargs, **kwargs): + """Check for output.""" if 'stdout' in kwargs: raise ValueError( 'stdout argument not allowed, it will be overridden.' @@ -39,14 +41,14 @@ def f(*popenargs, **kwargs): class InfluxDbInstance(object): - """ A class to launch of fresh influxdb server instance + """Define an instance of InfluxDB. + + A class to launch of fresh influxdb server instance in a temporary place, using a config file template. """ - def __init__(self, - conf_template, - udp_enabled=False): - + def __init__(self, conf_template, udp_enabled=False): + """Initialize an instance of InfluxDbInstance.""" if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True': raise unittest.SkipTest( "Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)" @@ -66,7 +68,6 @@ def __init__(self, raise e def _start_server(self, conf_template, udp_enabled): - # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() @@ -149,6 +150,7 @@ def _start_server(self, conf_template, udp_enabled): % data) def find_influxd_path(self): + """Find the path for InfluxDB.""" influxdb_bin_path = os.environ.get( 'INFLUXDB_PYTHON_INFLUXD_PATH', None @@ -174,6 +176,7 @@ def find_influxd_path(self): return influxdb_bin_path def get_logs_and_output(self): + """Query for logs and output.""" proc = self.proc try: with open(self.logs_file) as fh: @@ -188,6 +191,7 @@ def get_logs_and_output(self): } def close(self, remove_tree=True): + """Close an instance of InfluxDB.""" self.proc.terminate() self.proc.wait() if remove_tree: diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 61f54557..dbee8cda 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Define the line protocol test module.""" from __future__ import absolute_import from __future__ import division @@ -13,8 +14,10 @@ class TestLineProtocol(unittest.TestCase): + """Define the LineProtocol test object.""" def test_make_lines(self): + """Test make new lines in TestLineProtocol object.""" data = { "tags": { "empty_tag": "", @@ -43,6 +46,7 @@ def test_make_lines(self): ) def test_timezone(self): + """Test timezone in TestLineProtocol object.""" dt = datetime(2009, 11, 10, 23, 0, 0, 123456) utc = UTC.localize(dt) berlin = timezone('Europe/Berlin').localize(dt) @@ -72,6 +76,7 @@ def test_timezone(self): ) def test_string_val_newline(self): + """Test string value with newline in TestLineProtocol object.""" data = { "points": [ { @@ -89,6 +94,7 @@ def test_string_val_newline(self): ) def test_make_lines_unicode(self): + """Test make unicode lines in TestLineProtocol object.""" data = { "tags": { "unicode_tag": "\'Привет!\'" # Hello! in Russian @@ -109,12 +115,14 @@ def test_make_lines_unicode(self): ) def test_quote_ident(self): + """Test quote indentation in TestLineProtocol object.""" self.assertEqual( line_protocol.quote_ident(r"""\foo ' bar " Örf"""), r'''"\\foo ' bar \" Örf"''' ) def test_quote_literal(self): + """Test quote literal in TestLineProtocol object.""" self.assertEqual( line_protocol.quote_literal(r"""\foo ' bar " Örf"""), r"""'\\foo \' bar " Örf'""" diff --git a/setup.py b/setup.py index 7e66a779..cd6e4e9b 100755 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +"""Define the setup options.""" try: import distribute_setup From 795907d7b3f0ddb6f8d07a8b54ce2e1777e87010 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 28 Jun 2017 00:32:59 -0500 Subject: [PATCH 393/536] adding pep257 linter --- .travis.yml | 2 ++ tox.ini | 6 +++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e7765582..8c0093b8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,6 +12,8 @@ matrix: include: - python: 2.7 env: TOX_ENV=py27 + - python: 2.7 + env: TOX_ENV=pep257 - python: pypy-5.3.1 env: TOX_ENV=pypy - python: 3.4 diff --git a/tox.ini b/tox.ini index d9409903..bfa25165 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py34, py35, py36, pypy, pypy3, flake8, coverage, docs +envlist = py27, py34, py35, py36, pypy, pypy3, flake8, pep257, coverage, docs [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH @@ -16,6 +16,10 @@ deps = pep8-naming commands = flake8 influxdb +[testenv:pep257] +deps = pydocstyle +commands = pydocstyle --count -ve examples influxdb + [testenv:coverage] deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt From 87683f41c0de86fb526164dfa5e9acb01736bf5e Mon Sep 17 00:00:00 2001 From: Anton Cherkasov Date: Fri, 7 Jul 2017 04:04:26 +0300 Subject: [PATCH 394/536] Add method close to InfluxDBClient class. (#465) * Add method close to InfluxDBClient class. This method just closes Session if it's exist. * Add method close to InfluxDBClient class. This method just closes Session if it's exist. --- influxdb/client.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index d2f7c545..14bf3f72 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -823,6 +823,11 @@ def send_packet(self, packet, protocol='json'): data = ('\n'.join(packet) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self.udp_port)) + def close(self): + """Close http session.""" + if isinstance(self._session, requests.Session): + self._session.close() + def _parse_dsn(dsn): """Parse data source name. From 1a7c56c536dcecb0ab0502804e11d4cae9592c91 Mon Sep 17 00:00:00 2001 From: Gustavo Bezerra Date: Sun, 23 Jul 2017 12:47:43 +0900 Subject: [PATCH 395/536] Replace kwargs with explicitly named arguments --- influxdb/_dataframe_client.py | 36 +++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index e5399cae..7c98f7bc 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -132,16 +132,44 @@ def write_points(self, protocol=protocol) return True - def query(self, query, dropna=True, **kwargs): + def query(self, + query, + params=None, + epoch=None, + expected_response_code=200, + database=None, + raise_errors=True, + chunked=False, + chunk_size=0, + dropna=True): """ Quering data into a DataFrame. :param query: the actual query string + :param params: additional parameters for the request, defaults to {} + :param epoch: response timestamps to be in epoch format either 'h', + 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is + RFC3339 UTC format with nanosecond precision + :param expected_response_code: the expected status code of response, + defaults to 200 + :param database: database to query, defaults to None + :param raise_errors: Whether or not to raise exceptions when InfluxDB + returns errors, defaults to True + :param chunked: Enable to use chunked responses from InfluxDB. + With ``chunked`` enabled, one ResultSet is returned per chunk + containing all results within that chunk + :param chunk_size: Size of each chunk to tell InfluxDB to use. :param dropna: drop columns where all values are missing - :param **kwargs: additional parameters for ``InfluxDBClient.query`` - + :returns: the queried data + :rtype: :class:`~.ResultSet` """ - results = super(DataFrameClient, self).query(query, **kwargs) + query_args = dict(params=params, + epoch=epoch, + expected_response_code=expected_response_code, + raise_errors=raise_errors, + chunked=chunked, + chunk_size=chunk_size) + results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): if len(results) > 0: return self._to_dataframe(results, dropna) From 02564efc8a0d86dbbdf71965e024ddeb1c6fbb2b Mon Sep 17 00:00:00 2001 From: Maxence Dolle Date: Thu, 3 Aug 2017 16:06:35 +0200 Subject: [PATCH 396/536] Improve performance on dataframe to line conversion --- influxdb/_dataframe_client.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index ac110a4a..b753bb33 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -302,17 +302,10 @@ def _convert_dataframe_to_lines(self, tag_df = self._stringify_dataframe( tag_df, numeric_precision, datatype='tag') - # prepend tag keys - tag_df = tag_df.apply( - lambda s: s.apply( - lambda v, l: l + '=' + v if v else None, l=s.name)) - - # join tags, but leave out None values + # join preprendded tags, leaving None values out tags = tag_df.apply( - lambda r: ','.join(r.dropna()), axis=1) - - # prepend comma - tags = tags.apply(lambda v: ',' + v if v else '') + lambda s: [',' + s.name + '=' + v if v else '' for v in s]) + tags = tags.sum(axis=1) del tag_df else: From 758663a9107ed2ddb2c67b0c61d77ecbb56ae411 Mon Sep 17 00:00:00 2001 From: BenHewins Date: Tue, 15 Aug 2017 06:29:08 +0100 Subject: [PATCH 397/536] _is_float should return false rather than an error when the object being tested is not a valid param for float() (#475) --- influxdb/line_protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 4ec56dc5..c399a1d5 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -75,7 +75,7 @@ def quote_literal(value): def _is_float(value): try: float(value) - except ValueError: + except (TypeError, ValueError): return False return True From 3f54847176533156da2ac26a88183c5a16e9d24a Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 7 Jul 2017 08:00:16 -0500 Subject: [PATCH 398/536] move use_udp and udp_port private --- influxdb/client.py | 18 +++++++++++++----- influxdb/influxdb08/client.py | 10 +++++----- influxdb/tests/client_test.py | 4 ++-- influxdb/tests/influxdb08/client_test.py | 4 ++-- 4 files changed, 22 insertions(+), 14 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 14bf3f72..f8cea2ac 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -91,8 +91,8 @@ def __init__(self, self._verify_ssl = verify_ssl - self.use_udp = use_udp - self.udp_port = udp_port + self.__use_udp = use_udp + self.__udp_port = udp_port self._session = requests.Session() if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -129,6 +129,14 @@ def _host(self): def _port(self): return self.__port + @property + def _udp_port(self): + return self.__udp_port + + @property + def _use_udp(self): + return self.__use_udp + @classmethod def from_dsn(cls, dsn, **kwargs): r"""Generate an instance of InfluxDBClient from given data source name. @@ -467,7 +475,7 @@ def _write_points(self, "Invalid time precision is given. " "(use 'n', 'u', 'ms', 's', 'm' or 'h')") - if self.use_udp and time_precision and time_precision != 's': + if self._use_udp and time_precision and time_precision != 's': raise ValueError( "InfluxDB only supports seconds precision for udp writes" ) @@ -492,7 +500,7 @@ def _write_points(self, if retention_policy is not None: params['rp'] = retention_policy - if self.use_udp: + if self._use_udp: self.send_packet(data, protocol=protocol) else: self.write( @@ -821,7 +829,7 @@ def send_packet(self, packet, protocol='json'): data = make_lines(packet).encode('utf-8') elif protocol == 'line': data = ('\n'.join(packet) + '\n').encode('utf-8') - self.udp_socket.sendto(data, (self._host, self.udp_port)) + self.udp_socket.sendto(data, (self._host, self._udp_port)) def close(self): """Close http session.""" diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 59a01f54..9954133e 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -92,8 +92,8 @@ def __init__(self, self._verify_ssl = verify_ssl - self.use_udp = use_udp - self.udp_port = udp_port + self._use_udp = use_udp + self._udp_port = udp_port if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -344,7 +344,7 @@ def _write_points(self, data, time_precision): raise Exception( "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") - if self.use_udp and time_precision != 's': + if self._use_udp and time_precision != 's': raise Exception( "InfluxDB only supports seconds precision for udp writes" ) @@ -355,7 +355,7 @@ def _write_points(self, data, time_precision): 'time_precision': time_precision } - if self.use_udp: + if self._use_udp: self.send_packet(data) else: self.request( @@ -849,4 +849,4 @@ def send_packet(self, packet): """Send a UDP packet along the wire.""" data = json.dumps(packet) byte = data.encode('utf-8') - self.udp_socket.sendto(byte, (self._host, self.udp_port)) + self.udp_socket.sendto(byte, (self._host, self._udp_port)) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 8def6629..8768d170 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -119,10 +119,10 @@ def test_dsn(self): self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) - self.assertFalse(cli.use_udp) + self.assertFalse(cli._use_udp) cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string) - self.assertTrue(cli.use_udp) + self.assertTrue(cli._use_udp) cli = InfluxDBClient.from_dsn('https+' + self.dsn_string) self.assertEqual('https://my.host.fr:1886', cli._baseurl) diff --git a/influxdb/tests/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py index 4a789fb5..39ab52d6 100644 --- a/influxdb/tests/influxdb08/client_test.py +++ b/influxdb/tests/influxdb08/client_test.py @@ -113,10 +113,10 @@ def test_dsn(self): self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) - self.assertFalse(cli.use_udp) + self.assertFalse(cli._use_udp) cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string) - self.assertTrue(cli.use_udp) + self.assertTrue(cli._use_udp) cli = InfluxDBClient.from_dsn('https+' + self.dsn_string) self.assertEqual('https://host:1886', cli._baseurl) From 265d14736bb739aed0f357d8da248d51f549244b Mon Sep 17 00:00:00 2001 From: baftek Date: Tue, 22 Aug 2017 08:18:52 +0200 Subject: [PATCH 399/536] Data arg in write() can be single string (#492) * data arg in write() can be single string data argument in write() was expected to be list of strings, now it can be single string as well * Fix linter error * Trailing whitespace removed * Transformed to isinstance instead of using type() * Change list(str) to [str] --- influxdb/client.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index f8cea2ac..7930679c 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -271,6 +271,7 @@ def write(self, data, params=None, expected_response_code=204, :param data: the data to be written :type data: (if protocol is 'json') dict (if protocol is 'line') sequence of line protocol strings + or single string :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write @@ -292,6 +293,8 @@ def write(self, data, params=None, expected_response_code=204, if protocol == 'json': data = make_lines(data, precision).encode('utf-8') elif protocol == 'line': + if isinstance(data, str): + data = [data] data = ('\n'.join(data) + '\n').encode('utf-8') self.request( From df6e2fe2f9f5eab97f0b48092820bcb28c18b664 Mon Sep 17 00:00:00 2001 From: VicYu Date: Fri, 8 Sep 2017 09:58:17 +0800 Subject: [PATCH 400/536] Add get_list_measurements and drop_measurement (#402) * Add get_list_measurements and drop_measurement Dear maintainer, Look likes we are missing the get list measurements and drop measurement feature for client. pls accept this pr. * add test case * fix: dbname -> measurement * add docstring --- influxdb/client.py | 26 ++++++++++++++++++++++++++ influxdb/tests/client_test.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 7930679c..2b6dc689 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -547,6 +547,32 @@ def drop_database(self, dbname): """ self.query("DROP DATABASE {0}".format(quote_ident(dbname))) + def get_list_measurements(self): + """Get the list of measurements in InfluxDB. + + :returns: all measurements in InfluxDB + :rtype: list of dictionaries + + :Example: + + :: + + >> dbs = client.get_list_measurements() + >> dbs + [{u'name': u'measurements1'}, + {u'name': u'measurements2'}, + {u'name': u'measurements3'}] + """ + return list(self.query("SHOW MEASUREMENTS").get_points()) + + def drop_measurement(self, measurement): + """Drop a measurement from InfluxDB. + + :param measurement: the name of the measurement to drop + :type measurement: str + """ + self.query("DROP MEASUREMENT {0}".format(quote_ident(measurement))) + def create_retention_policy(self, name, duration, replication, database=None, default=False): """Create a retention policy for a database. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 8768d170..5c4f26dd 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -466,6 +466,20 @@ def test_drop_database(self): 'drop database "new_db"' ) + def test_drop_measurement(self): + """Test drop measurement for TestInfluxDBClient object.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + self.cli.drop_measurement('new_measurement') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop measurement "new_measurement"' + ) + def test_drop_numeric_named_database(self): """Test drop numeric db for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: @@ -504,6 +518,24 @@ def test_get_list_database_fails(self): with _mocked_session(cli, 'get', 401): cli.get_list_database() + def test_get_list_measurements(self): + """Test get list of measurements for TestInfluxDBClient object.""" + data = { + "results": [{ + "series": [ + {"name": "measurements", + "columns": ["name"], + "values": [["cpu"], ["disk"] + ]}]} + ] + } + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_measurements(), + [{'name': 'cpu'}, {'name': 'disk'}] + ) + def test_create_retention_policy_default(self): """Test create default ret policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' From c9a1b866c93e981a81ee55632466638ee584b37a Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 7 Sep 2017 01:43:38 -0500 Subject: [PATCH 401/536] updating README --- README.rst | 71 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 47 insertions(+), 24 deletions(-) diff --git a/README.rst b/README.rst index 1ae75e35..0b7c144a 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,5 @@ - - -InfluxDB-Python is a client for interacting with InfluxDB_. Maintained by @aviau (https://github.com/aviau). - -**Help needed:** Development of this library is made by the community and help is needed. A co-maintainer would be welcome. To contribute, take a look at the issues list of simply contact @aviau. +InfluxDB-Python +=============== .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python @@ -15,22 +12,39 @@ InfluxDB-Python is a client for interacting with InfluxDB_. Maintained by @aviau :target: https://coveralls.io/r/influxdata/influxdb-python :alt: Coverage + + +InfluxDB-Python is a client for interacting with InfluxDB_. Development of this library is maintained by + ++-----------+-------------------------------+ +| Github ID | URL | ++===========+===============================+ +| @aviau | (https://github.com/aviau) | ++-----------+-------------------------------+ +| @xginn8 | (https://github.com/xginn8) | ++-----------+-------------------------------+ +| @sebito91 | (https://github.com/sebito91) | ++-----------+-------------------------------+ + .. _readme-about: -InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdata.com/ +InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at https://docs.influxdata.com/influxdb/latest .. _installation: InfluxDB pre v1.1.0 users -========================= +------------------------- + +This module is tested with InfluxDB v1.2.4, our recommended version. Though there have been v1.3 (initial TSI branch) and v1.4 releases these are not +yet supported. -InfluxDB 1.1.0 was released and it is the new recommended version. InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. +Those users still on InfluxDB v0.8.x users may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. Installation -============ +------------ -Install, upgrade and uninstall InfluxDB-Python with these commands:: +Install, upgrade and uninstall influxdb-python with these commands:: $ pip install influxdb $ pip install --upgrade influxdb @@ -41,9 +55,9 @@ On Debian/Ubuntu, you can install it with this command:: $ sudo apt-get install python-influxdb Dependencies -============ +------------ -The InfluxDB-Python distribution is supported and tested on Python 2.7, 3.3, 3.4, 3.5, 3.6, PyPy and PyPy3. +The influxdb-python distribution is supported and tested on Python 2.7, 3.3, 3.4, 3.5, 3.6, PyPy and PyPy3. **Note:** Python 3.2 is currently untested. See ``.travis.yml``. @@ -61,9 +75,9 @@ Additional dependencies are: Documentation -============= +------------- -InfluxDB-Python documentation is available at http://influxdb-python.readthedocs.org +Documentation is available at https://influxdb-python.readthedocs.io/en/latest/. You will need Sphinx_ installed to generate the documentation. @@ -76,7 +90,7 @@ Generated documentation can be found in the *docs/build/html/* directory. Examples -======== +-------- Here's a basic example (for more see the examples directory):: @@ -110,7 +124,7 @@ Here's a basic example (for more see the examples directory):: Testing -======= +------- Make sure you have tox by running the following:: @@ -122,14 +136,19 @@ To test influxdb-python with multiple version of Python, you can use Tox_:: Support -======= +------- For issues with, questions about, or feedback for InfluxDB_, please look into our community page: http://influxdb.com/community/. +We are also lurking on the following: + +- #influxdb on irc.freenode.net +- #influxdb on gophers.slack.com + Development -=========== +----------- All development is done on Github_. Use Issues_ to report problems or submit contributions. @@ -137,19 +156,23 @@ problems or submit contributions. .. _Github: https://github.com/influxdb/influxdb-python/ .. _Issues: https://github.com/influxdb/influxdb-python/issues - -TODO -==== - -The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues +Please note that we WILL get to your questions/issues/concerns as quickly as possible. We maintain many +software repositories and sometimes things may get pushed to the backburner. Please don't take offense, +we will do our best to reply as soon as possible! Source code -=========== +----------- The source code is currently available on Github: https://github.com/influxdata/influxdb-python +TODO +---- + +The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues + + .. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org From 87100759013dda5b3b9aeb2af7632f188dbe314a Mon Sep 17 00:00:00 2001 From: TH Chen Date: Tue, 19 Sep 2017 17:25:01 -0400 Subject: [PATCH 402/536] Improve DataFrameClient tag conversion performance In `_convert_dataframe_to_lines`, if only `global_tags` is specified but not `tag_columns`, take a faster route to process the tags. Previously, in such a case, global tags are duplicated as tag columns and processed as if they were tag columns. Such processing is wasteful and results in a slowdown that becomes noticeable when batch loading many thousands of data points with a handful of global tags. --- influxdb/_dataframe_client.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 31ee1c32..62dc8084 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -298,12 +298,6 @@ def _convert_dataframe_to_lines(self, field_columns = list(field_columns) if list(field_columns) else [] tag_columns = list(tag_columns) if list(tag_columns) else [] - # Make global_tags as tag_columns - if global_tags: - for tag in global_tags: - dataframe[tag] = global_tags[tag] - tag_columns.append(tag) - # If field columns but no tag columns, assume rest of columns are tags if field_columns and (not tag_columns): tag_columns = list(column_series[~column_series.isin( @@ -333,6 +327,13 @@ def _convert_dataframe_to_lines(self, # If tag columns exist, make an array of formatted tag keys and values if tag_columns: + + # Make global_tags as tag_columns + if global_tags: + for tag in global_tags: + dataframe[tag] = global_tags[tag] + tag_columns.append(tag) + tag_df = dataframe[tag_columns] tag_df = tag_df.fillna('') # replace NA with empty string tag_df = tag_df.sort_index(axis=1) @@ -345,6 +346,12 @@ def _convert_dataframe_to_lines(self, tags = tags.sum(axis=1) del tag_df + elif global_tags: + tag_string = ''.join( + [",{}={}".format(k, _escape_tag(v)) if v else '' + for k, v in sorted(global_tags.items())] + ) + tags = pd.Series(tag_string, index=dataframe.index) else: tags = '' From a66c555126bd6e6590909bc0c7f3c92ec4c0e339 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Wed, 27 Sep 2017 07:57:23 -0400 Subject: [PATCH 403/536] adding retry logic for all requests.exceptions adding an exponential backoff with jitter on POST calls reducing the number of retries in some of the tests to comply with the backoff feature. pass through all requests.exceptions after the permitted number of retries. --- influxdb/client.py | 30 ++++++++++++++++++++---------- influxdb/tests/client_test.py | 10 +++++----- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 2b6dc689..5e1f0c81 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -7,6 +7,8 @@ from __future__ import unicode_literals from sys import version_info +import time +import random import json import socket @@ -237,6 +239,7 @@ def request(self, url, method='GET', params=None, data=None, _try = 0 while retry: try: + _error = False response = self._session.request( method=method, url=url, @@ -249,20 +252,27 @@ def request(self, url, method='GET', params=None, data=None, timeout=self._timeout ) break - except requests.exceptions.ConnectionError: + except (requests.exceptions.ConnectionError, + requests.exceptions.HTTPError, + requests.exceptions.Timeout) as _e: + _error = _e _try += 1 if self._retries != 0: retry = _try < self._retries - - else: - raise requests.exceptions.ConnectionError - - if 500 <= response.status_code < 600: - raise InfluxDBServerError(response.content) - elif response.status_code == expected_response_code: - return response + if method == "POST": + time.sleep((2 ** _try) * random.random() / 100.0) + if _error: + raise(_error) else: - raise InfluxDBClientError(response.content, response.status_code) + # if there's not an error, there must have been a successful + # response + if 500 <= response.status_code < 600: + raise InfluxDBServerError(response.content) + elif response.status_code == expected_response_code: + return response + else: + raise InfluxDBClientError(response.content, + response.status_code) def write(self, data, params=None, expected_response_code=204, protocol='json'): diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 5c4f26dd..3413c0bf 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -686,7 +686,7 @@ def connection_error(self, *args, **kwargs): @mock.patch('requests.Session.request') def test_request_retry_raises(self, mock_request): - """Test that three connection errors will not be handled.""" + """Test that three requests errors will not be handled.""" class CustomMock(object): """Create custom mock object for test.""" @@ -698,7 +698,7 @@ def connection_error(self, *args, **kwargs): self.i += 1 if self.i < 4: - raise requests.exceptions.ConnectionError + raise requests.exceptions.HTTPError else: r = requests.Response() r.status_code = 200 @@ -708,7 +708,7 @@ def connection_error(self, *args, **kwargs): cli = InfluxDBClient(database='db') - with self.assertRaises(requests.exceptions.ConnectionError): + with self.assertRaises(requests.exceptions.HTTPError): cli.write_points(self.dummy_points) @mock.patch('requests.Session.request') @@ -732,7 +732,7 @@ def connection_error(self, *args, **kwargs): r.status_code = 204 return r - retries = random.randint(1, 100) + retries = random.randint(1, 5) mock_request.side_effect = CustomMock(retries).connection_error cli = InfluxDBClient(database='db', retries=retries) @@ -759,7 +759,7 @@ def connection_error(self, *args, **kwargs): r.status_code = 200 return r - retries = random.randint(1, 100) + retries = random.randint(1, 5) mock_request.side_effect = CustomMock(retries).connection_error cli = InfluxDBClient(database='db', retries=retries) From a99c90eb350ea795ff39bf59e9d006296dd4862e Mon Sep 17 00:00:00 2001 From: Milan Cermak Date: Tue, 10 Oct 2017 12:05:34 +0200 Subject: [PATCH 404/536] Fixes typo in Content-Type header --- influxdb/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 2b6dc689..a8adb915 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -113,7 +113,7 @@ def __init__(self, self._port) self._headers = { - 'Content-type': 'application/json', + 'Content-Type': 'application/json', 'Accept': 'text/plain' } @@ -283,7 +283,7 @@ def write(self, data, params=None, expected_response_code=204, :rtype: bool """ headers = self._headers - headers['Content-type'] = 'application/octet-stream' + headers['Content-Type'] = 'application/octet-stream' if params: precision = params.get('precision') From 9590a4d689bf5eb76d57ea06fda2f3bbe759fe21 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Thu, 26 Oct 2017 08:10:19 -0400 Subject: [PATCH 405/536] fixing broken pypy test --- .travis.yml | 1 + test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8c0093b8..03e6bc36 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,6 +31,7 @@ matrix: install: - pip install tox + - pip install setuptools==20.6.6 - pip install coveralls - mkdir influxdb_install - wget https://dl.influxdata.com/influxdb/releases/influxdb_1.2.4_amd64.deb diff --git a/test-requirements.txt b/test-requirements.txt index cbc6add3..9b31f5f1 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,4 @@ nose nose-cov mock -requests-mock \ No newline at end of file +requests-mock From 4d9564aac34bc1d41f61bee72bd827935bd7e040 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sat, 28 Oct 2017 14:47:17 -0400 Subject: [PATCH 406/536] replace dataframe.ix with dataframe.iloc. fixes #528 --- influxdb/_dataframe_client.py | 4 ++-- influxdb/influxdb08/dataframe_client.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 31ee1c32..af1decd0 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -86,7 +86,7 @@ def write_points(self, if protocol == 'line': points = self._convert_dataframe_to_lines( - dataframe.ix[start_index:end_index].copy(), + dataframe.iloc[start_index:end_index].copy(), measurement=measurement, global_tags=tags, time_precision=time_precision, @@ -95,7 +95,7 @@ def write_points(self, numeric_precision=numeric_precision) else: points = self._convert_dataframe_to_json( - dataframe.ix[start_index:end_index].copy(), + dataframe.iloc[start_index:end_index].copy(), measurement=measurement, tags=tags, time_precision=time_precision, diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py index 71e7e881..2867125d 100644 --- a/influxdb/influxdb08/dataframe_client.py +++ b/influxdb/influxdb08/dataframe_client.py @@ -59,7 +59,7 @@ def write_points(self, data, *args, **kwargs): self._convert_dataframe_to_json( name=key, dataframe=data_frame - .ix[start_index:end_index].copy(), + .iloc[start_index:end_index].copy(), time_precision=time_precision)] InfluxDBClient.write_points(self, outdata, *args, **kwargs) return True From 56ab7203d2a2b125ab5eac50802b0ae88f2b7bca Mon Sep 17 00:00:00 2001 From: Andrew Spott Date: Tue, 14 Nov 2017 06:24:39 -0700 Subject: [PATCH 407/536] Added support for not including all fields when using the series helper (#518) * Added support for not including all fields when using the series helper * fixed flake8 check * Added tests * fixed pep error * more fixes for pep * fixed flake8 errors --- examples/tutorial_serieshelper.py | 3 ++- influxdb/helper.py | 23 ++++++++++++++++------- influxdb/tests/helper_test.py | 16 ++++++++++++++++ 3 files changed, 34 insertions(+), 8 deletions(-) diff --git a/examples/tutorial_serieshelper.py b/examples/tutorial_serieshelper.py index 13929df2..72b80bb5 100644 --- a/examples/tutorial_serieshelper.py +++ b/examples/tutorial_serieshelper.py @@ -51,7 +51,8 @@ class Meta: MySeriesHelper(server_name='us.east-1', some_stat=159, other_stat=10) MySeriesHelper(server_name='us.east-1', some_stat=158, other_stat=20) MySeriesHelper(server_name='us.east-1', some_stat=157, other_stat=30) -MySeriesHelper(server_name='us.east-1', some_stat=156, other_stat=40) +MySeriesHelper(server_name='us.east-1', some_stat=156, other_stat=30) +MySeriesHelper(server_name='us.east-1', some_stat=156) MySeriesHelper(server_name='us.east-1', some_stat=155, other_stat=50) # To manually submit data points which are not yet written, call commit: diff --git a/influxdb/helper.py b/influxdb/helper.py index c56a636a..e622526d 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -98,24 +98,31 @@ def __new__(cls, *args, **kwargs): if 'time' in cls._fields: cls._fields.remove('time') cls._type = namedtuple(cls.__name__, - cls._fields + cls._tags + ['time']) + ['time'] + cls._tags + cls._fields) + cls._type.__new__.__defaults__ = (None,) * len(cls._fields) return super(SeriesHelper, cls).__new__(cls) def __init__(self, **kw): - """Call to constructor creates a new data point. All fields must be present. + """Call to constructor creates a new data point. :note: Data points written when `bulk_size` is reached per Helper. :warning: Data points are *immutable* (`namedtuples`). """ cls = self.__class__ timestamp = kw.pop('time', self._current_timestamp()) + tags = set(cls._tags) + fields = set(cls._fields) + keys = set(kw.keys()) - if sorted(cls._fields + cls._tags) != sorted(kw.keys()): + # all tags should be passed, and keys - tags should be a subset of keys + if not(tags <= keys): raise NameError( - 'Expected {0}, got {1}.'.format( - sorted(cls._fields + cls._tags), - kw.keys())) + 'Expected arguments to contain all tags {0}, instead got {1}.' + .format(cls._tags, kw.keys())) + if not(keys - tags <= fields): + raise NameError('Got arguments not in tags or fields: {0}' + .format(keys - tags - fields)) cls._datapoints[cls._series_name.format(**kw)].append( cls._type(time=timestamp, **kw) @@ -157,7 +164,9 @@ def _json_body_(cls): } for field in cls._fields: - json_point['fields'][field] = getattr(point, field) + value = getattr(point, field) + if value is not None: + json_point['fields'][field] = value for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 39e5ee75..6f24e85d 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -231,6 +231,22 @@ def testSeriesWithoutTimeField(self, current_timestamp): self.assertEqual(point1['time'], current_date) self.assertEqual(point2['time'], yesterday) + def testSeriesWithoutAllTags(self): + """Test that creating a data point without a tag throws an error.""" + class MyTimeFieldSeriesHelper(SeriesHelper): + + class Meta: + client = TestSeriesHelper.client + series_name = 'events.stats.{server_name}' + fields = ['some_stat', 'time'] + tags = ['server_name', 'other_tag'] + bulk_size = 5 + autocommit = True + + self.assertRaises(NameError, MyTimeFieldSeriesHelper, + **{"server_name": 'us.east-1', + "some_stat": 158}) + @mock.patch('influxdb.helper.SeriesHelper._current_timestamp') def testSeriesWithTimeField(self, current_timestamp): """Test that time is optional on a series with a time field.""" From 2b95797fc36cec74dba2044122b34f6b28e4d424 Mon Sep 17 00:00:00 2001 From: Jason Swails Date: Tue, 14 Nov 2017 08:41:20 -0500 Subject: [PATCH 408/536] Add some small improvements (#536) * Add some small improvements - six is already listed as a requirement, so use six.moves instead of defining builtins based on checking sys.version_info - Fix formatting of several exceptions. * Add a blank line to appease flake8 --- influxdb/_dataframe_client.py | 8 ++++---- influxdb/client.py | 36 +++++++++++------------------------ influxdb/influxdb08/client.py | 13 ++----------- 3 files changed, 17 insertions(+), 40 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 31a78672..6a66558b 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -222,8 +222,8 @@ def _convert_dataframe_to_json(dataframe, .format(type(dataframe))) if not (isinstance(dataframe.index, pd.PeriodIndex) or isinstance(dataframe.index, pd.DatetimeIndex)): - raise TypeError('Must be DataFrame with DatetimeIndex or \ - PeriodIndex.') + raise TypeError('Must be DataFrame with DatetimeIndex or ' + 'PeriodIndex.') # Make sure tags and tag columns are correctly typed tag_columns = tag_columns if tag_columns is not None else [] @@ -279,8 +279,8 @@ def _convert_dataframe_to_lines(self, .format(type(dataframe))) if not (isinstance(dataframe.index, pd.PeriodIndex) or isinstance(dataframe.index, pd.DatetimeIndex)): - raise TypeError('Must be DataFrame with DatetimeIndex or \ - PeriodIndex.') + raise TypeError('Must be DataFrame with DatetimeIndex or ' + 'PeriodIndex.') # Create a Series of columns for easier indexing column_series = pd.Series(dataframe.columns) diff --git a/influxdb/client.py b/influxdb/client.py index 4721fa3f..608e8dbc 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -6,7 +6,6 @@ from __future__ import print_function from __future__ import unicode_literals -from sys import version_info import time import random @@ -14,22 +13,14 @@ import socket import requests import requests.exceptions +from six.moves import xrange +from six.moves.urllib.parse import urlparse from influxdb.line_protocol import make_lines, quote_ident, quote_literal from influxdb.resultset import ResultSet from .exceptions import InfluxDBClientError from .exceptions import InfluxDBServerError -try: - xrange -except NameError: - xrange = range - -if version_info[0] == 3: - from urllib.parse import urlparse -else: - from urlparse import urlparse - class InfluxDBClient(object): """InfluxDBClient primary client object to connect InfluxDB. @@ -239,7 +230,6 @@ def request(self, url, method='GET', params=None, data=None, _try = 0 while retry: try: - _error = False response = self._session.request( method=method, url=url, @@ -254,25 +244,21 @@ def request(self, url, method='GET', params=None, data=None, break except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError, - requests.exceptions.Timeout) as _e: - _error = _e + requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries if method == "POST": time.sleep((2 ** _try) * random.random() / 100.0) - if _error: - raise(_error) + if not retry: + raise + # if there's not an error, there must have been a successful response + if 500 <= response.status_code < 600: + raise InfluxDBServerError(response.content) + elif response.status_code == expected_response_code: + return response else: - # if there's not an error, there must have been a successful - # response - if 500 <= response.status_code < 600: - raise InfluxDBServerError(response.content) - elif response.status_code == expected_response_code: - return response - else: - raise InfluxDBClientError(response.content, - response.status_code) + raise InfluxDBClientError(response.content, response.status_code) def write(self, data, params=None, expected_response_code=204, protocol='json'): diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 9954133e..41600404 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -2,25 +2,16 @@ """Python client for InfluxDB v0.8.""" import warnings -from sys import version_info import json import socket import requests import requests.exceptions +from six.moves import xrange +from six.moves.urllib.parse import urlparse from influxdb import chunked_json -try: - xrange -except NameError: - xrange = range - -if version_info[0] == 3: - from urllib.parse import urlparse -else: - from urlparse import urlparse - session = requests.Session() From 16c02ecf2e9031ade91f956ea64830490b173761 Mon Sep 17 00:00:00 2001 From: Martin Englund Date: Wed, 15 Nov 2017 16:29:35 -0800 Subject: [PATCH 409/536] add ping method to the client (#409) * add ping method to the client * capitalize and pep257 compliance * one more try for pep257 compliance for the ping function * Update client.py * Update client_test.py fixing up failing CI tests * Update client_test.py fixing up failing CI tests --- influxdb/client.py | 13 +++++++++++++ influxdb/tests/client_test.py | 12 ++++++++++++ 2 files changed, 25 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 608e8dbc..68261ece 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -457,6 +457,19 @@ def write_points(self, retention_policy=retention_policy, tags=tags, protocol=protocol) + def ping(self): + """Check connectivity to InfluxDB. + + :returns: The version of the InfluxDB the client is connected to + """ + response = self.request( + url="ping", + method='GET', + expected_response_code=204 + ) + + return response.headers['X-Influxdb-Version'] + @staticmethod def _batches(iterable, size): for i in xrange(0, len(iterable), size): diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 3413c0bf..ff325907 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -418,6 +418,18 @@ def test_query_fail(self): with _mocked_session(self.cli, 'get', 401): self.cli.query('select column_one from foo;') + def test_ping(self): + """Test ping querying InfluxDB version.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/ping", + status_code=204, + headers={'X-Influxdb-Version': '1.2.3'} + ) + version = self.cli.ping() + self.assertEqual(version, '1.2.3') + def test_create_database(self): """Test create database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: From 530b4c504c114259475f46540f9ded8b6214bba2 Mon Sep 17 00:00:00 2001 From: Ivan <8692788+vaniakov@users.noreply.github.com> Date: Fri, 17 Nov 2017 04:37:30 +0200 Subject: [PATCH 410/536] Add pool size parameter to client constructor (#534) * Add pool size parameter to client constructor * come back removed newlines * fix flake8: line too long * Cast InfluxDBClient pool_size parameter to int * Move pool_size parameter to the end of args to prevent tests fail --- influxdb/client.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 68261ece..02128462 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -37,6 +37,8 @@ class InfluxDBClient(object): :type username: str :param password: password of the user, defaults to 'root' :type password: str + :param pool_size: urllib3 connection pool size, defaults to 10. + :type pool_size: int :param database: database name to connect to, defaults to None :type database: str :param ssl: use https instead of http to connect to InfluxDB, defaults to @@ -72,6 +74,7 @@ def __init__(self, use_udp=False, udp_port=4444, proxies=None, + pool_size=10, ): """Construct a new InfluxDBClient object.""" self.__host = host @@ -87,6 +90,11 @@ def __init__(self, self.__use_udp = use_udp self.__udp_port = udp_port self._session = requests.Session() + adapter = requests.adapters.HTTPAdapter( + pool_connections=int(pool_size), + pool_maxsize=int(pool_size) + ) + if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -95,6 +103,8 @@ def __init__(self, if ssl is True: self._scheme = "https" + self._session.mount(self._scheme, adapter) + if proxies is None: self._proxies = {} else: From 89961cf3e82d77e31c88cd6933888f591172acb6 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Mon, 20 Nov 2017 23:46:21 -0500 Subject: [PATCH 411/536] Fix failing tags match in get_points() on a ResultSet (#511) * rename serie to series * pass in tags variable to perform match fix the format of the object in the ResultSet test to match what's returned from a query fix other flake8 formatting issues * meh * more serie -> series * fixing broken tags filtering * readding missing parameter that i somehow dropped --- docs/source/resultset.rst | 4 +- influxdb/influxdb08/client.py | 2 +- influxdb/resultset.py | 67 +++++------ influxdb/tests/resultset_test.py | 105 +++++++++++------- .../server_tests/client_test_with_server.py | 33 +++--- 5 files changed, 118 insertions(+), 93 deletions(-) diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst index 6c36463f..b1c3206f 100644 --- a/docs/source/resultset.rst +++ b/docs/source/resultset.rst @@ -18,7 +18,7 @@ Using ``rs.get_points()`` will return a generator for all the points in the Resu Filtering by measurement ------------------------ -Using ``rs.get_points('cpu')`` will return a generator for all the points that are in a serie with measurement name ``cpu``, no matter the tags. +Using ``rs.get_points('cpu')`` will return a generator for all the points that are in a series with measurement name ``cpu``, no matter the tags. :: rs = cli.query("SELECT * from cpu") @@ -36,7 +36,7 @@ Using ``rs.get_points(tags={'host_name': 'influxdb.com'})`` will return a genera Filtering by measurement and tags --------------------------------- -Using measurement name and tags will return a generator for all the points that are in a serie with the specified measurement name AND whose tags match the given tags. +Using measurement name and tags will return a generator for all the points that are in a series with the specified measurement name AND whose tags match the given tags. :: rs = cli.query("SELECT * from cpu") diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 41600404..965a91db 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -435,7 +435,7 @@ def _query(self, query, time_precision='s', chunked=False): else: chunked_param = 'false' - # Build the URL of the serie to query + # Build the URL of the series to query url = "db/{0}/series".format(self._database) params = { diff --git a/influxdb/resultset.py b/influxdb/resultset.py index 79d72ca8..ba4f3c13 100644 --- a/influxdb/resultset.py +++ b/influxdb/resultset.py @@ -41,12 +41,12 @@ def error(self): def __getitem__(self, key): """Retrieve the series name or specific set based on key. - :param key: Either a serie name, or a tags_dict, or - a 2-tuple(serie_name, tags_dict). - If the serie name is None (or not given) then any serie + :param key: Either a series name, or a tags_dict, or + a 2-tuple(series_name, tags_dict). + If the series name is None (or not given) then any serie matching the eventual given tags will be given its points one after the other. - To get the points of every serie in this resultset then + To get the points of every series in this resultset then you have to provide None as key. :return: A generator yielding `Point`s matching the given key. NB: @@ -93,22 +93,25 @@ def get_points(self, measurement=None, tags=None): (bytes, type(b''.decode()), type(None))): raise TypeError('measurement must be an str or None') - for serie in self._get_series(): - serie_name = serie.get('measurement', serie.get('name', 'results')) - if serie_name is None: + for series in self._get_series(): + series_name = series.get('measurement', + series.get('name', 'results')) + if series_name is None: # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. if tags is None: - for item in self._get_points_for_serie(serie): + for item in self._get_points_for_series(series): yield item - elif measurement in (None, serie_name): + elif measurement in (None, series_name): # by default if no tags was provided then - # we will matches every returned serie - serie_tags = serie.get('tags', {}) - if tags is None or self._tag_matches(serie_tags, tags): - for item in self._get_points_for_serie(serie): + # we will matches every returned series + series_tags = series.get('tags', {}) + for item in self._get_points_for_series(series): + if tags is None or \ + self._tag_matches(item, tags) or \ + self._tag_matches(series_tags, tags): yield item def __repr__(self): @@ -121,7 +124,7 @@ def __repr__(self): return "ResultSet({%s})" % ", ".join(items) def __iter__(self): - """Yield one dict instance per serie result.""" + """Yield one dict instance per series result.""" for key in self.keys(): yield list(self.__getitem__(key)) @@ -131,10 +134,10 @@ def _tag_matches(tags, filter): for tag_name, tag_value in filter.items(): # using _sentinel as I'm not sure that "None" # could be used, because it could be a valid - # serie_tags value : when a serie has no such tag + # series_tags value : when a series has no such tag # then I think it's set to /null/None/.. TBC.. - serie_tag_value = tags.get(tag_name, _sentinel) - if serie_tag_value != tag_value: + series_tag_value = tags.get(tag_name, _sentinel) + if series_tag_value != tag_value: return False return True @@ -150,14 +153,14 @@ def __len__(self): def keys(self): """Return the list of keys in the ResultSet. - :return: List of keys. Keys are tuples (serie_name, tags) + :return: List of keys. Keys are tuples (series_name, tags) """ keys = [] - for serie in self._get_series(): + for series in self._get_series(): keys.append( - (serie.get('measurement', - serie.get('name', 'results')), - serie.get('tags', None)) + (series.get('measurement', + series.get('name', 'results')), + series.get('tags', None)) ) return keys @@ -167,24 +170,24 @@ def items(self): :return: List of tuples, (key, generator) """ items = [] - for serie in self._get_series(): - serie_key = (serie.get('measurement', - serie.get('name', 'results')), - serie.get('tags', None)) + for series in self._get_series(): + series_key = (series.get('measurement', + series.get('name', 'results')), + series.get('tags', None)) items.append( - (serie_key, self._get_points_for_serie(serie)) + (series_key, self._get_points_for_series(series)) ) return items - def _get_points_for_serie(self, serie): - """Return generator of dict from columns and values of a serie. + def _get_points_for_series(self, series): + """Return generator of dict from columns and values of a series. - :param serie: One serie + :param series: One series :return: Generator of dicts """ - for point in serie.get('values', []): + for point in series.get('values', []): yield self.point_from_cols_vals( - serie['columns'], + series['columns'], point ) diff --git a/influxdb/tests/resultset_test.py b/influxdb/tests/resultset_test.py index dd088b79..83faa4dd 100644 --- a/influxdb/tests/resultset_test.py +++ b/influxdb/tests/resultset_test.py @@ -19,26 +19,25 @@ def setUp(self): """Set up an instance of TestResultSet.""" self.query_response = { "results": [ - {"series": [{"measurement": "cpu_load_short", - "tags": {"host": "server01", - "region": "us-west"}, - "columns": ["time", "value"], + {"series": [{"name": "cpu_load_short", + "columns": ["time", "value", "host", "region"], "values": [ - ["2015-01-29T21:51:28.968422294Z", 0.64] + ["2015-01-29T21:51:28.968422294Z", + 0.64, + "server01", + "us-west"], + ["2015-01-29T21:51:28.968422294Z", + 0.65, + "server02", + "us-west"], ]}, - {"measurement": "cpu_load_short", - "tags": {"host": "server02", - "region": "us-west"}, - "columns": ["time", "value"], + {"name": "other_series", + "columns": ["time", "value", "host", "region"], "values": [ - ["2015-01-29T21:51:28.968422294Z", 0.65] - ]}, - {"measurement": "other_serie", - "tags": {"host": "server01", - "region": "us-west"}, - "columns": ["time", "value"], - "values": [ - ["2015-01-29T21:51:28.968422294Z", 0.66] + ["2015-01-29T21:51:28.968422294Z", + 0.66, + "server01", + "us-west"], ]}]} ] } @@ -48,8 +47,14 @@ def setUp(self): def test_filter_by_name(self): """Test filtering by name in TestResultSet object.""" expected = [ - {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, - {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} + {'value': 0.64, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server01', + 'region': 'us-west'}, + {'value': 0.65, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server02', + 'region': 'us-west'}, ] self.assertEqual(expected, list(self.rs['cpu_load_short'])) @@ -60,8 +65,14 @@ def test_filter_by_name(self): def test_filter_by_tags(self): """Test filter by tags in TestResultSet object.""" expected = [ - {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, - {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} + {'value': 0.64, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server01', + 'region': 'us-west'}, + {'value': 0.66, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server01', + 'region': 'us-west'}, ] self.assertEqual( @@ -78,14 +89,23 @@ def test_filter_by_name_and_tags(self): """Test filter by name and tags in TestResultSet object.""" self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), - [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] + [{'value': 0.64, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server01', + 'region': 'us-west'}] ) self.assertEqual( list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ - {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, - {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} + {'value': 0.64, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server01', + 'region': 'us-west'}, + {'value': 0.65, + 'time': '2015-01-29T21:51:28.968422294Z', + 'host': 'server02', + 'region': 'us-west'}, ] ) @@ -94,9 +114,8 @@ def test_keys(self): self.assertEqual( self.rs.keys(), [ - ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), - ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), - ('other_serie', {'host': 'server01', 'region': 'us-west'}) + ('cpu_load_short', None), + ('other_series', None), ] ) @@ -104,7 +123,7 @@ def test_len(self): """Test length in TestResultSet object.""" self.assertEqual( len(self.rs), - 3 + 2 ) def test_items(self): @@ -116,21 +135,23 @@ def test_items(self): items_lists, [ ( - ('cpu_load_short', - {'host': 'server01', 'region': 'us-west'}), - [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] - ), + ('cpu_load_short', None), + [ + {'time': '2015-01-29T21:51:28.968422294Z', + 'value': 0.64, + 'host': 'server01', + 'region': 'us-west'}, + {'time': '2015-01-29T21:51:28.968422294Z', + 'value': 0.65, + 'host': 'server02', + 'region': 'us-west'}]), ( - ('cpu_load_short', - {'host': 'server02', 'region': 'us-west'}), - [{'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}] - ), - ( - ('other_serie', - {'host': 'server01', 'region': 'us-west'}), - [{'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z'}] - ) - ] + ('other_series', None), + [ + {'time': '2015-01-29T21:51:28.968422294Z', + 'value': 0.66, + 'host': 'server01', + 'region': 'us-west'}])] ) def test_point_from_cols_vals(self): diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index a7dedddd..701f72ac 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -38,9 +38,9 @@ THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -def point(serie_name, timestamp=None, tags=None, **fields): +def point(series_name, timestamp=None, tags=None, **fields): """Define what a point looks like.""" - res = {'measurement': serie_name} + res = {'measurement': series_name} if timestamp: res['time'] = timestamp @@ -638,7 +638,7 @@ def test_drop_retention_policy(self): def test_issue_143(self): """Test for PR#143 from repo.""" - pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') + pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z') pts = [ pt(value=15), pt(tags={'tag_1': 'value1'}, value=5), @@ -646,19 +646,20 @@ def test_issue_143(self): ] self.cli.write_points(pts) time.sleep(1) - rsp = list(self.cli.query('SELECT * FROM a_serie_name GROUP BY tag_1')) + rsp = list(self.cli.query('SELECT * FROM a_series_name \ +GROUP BY tag_1').get_points()) self.assertEqual( [ - [{'value': 15, 'time': '2015-03-30T16:16:37Z'}], - [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], - [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] + {'time': '2015-03-30T16:16:37Z', 'value': 15}, + {'time': '2015-03-30T16:16:37Z', 'value': 5}, + {'time': '2015-03-30T16:16:37Z', 'value': 10} ], rsp ) # a slightly more complex one with 2 tags values: - pt = partial(point, 'serie2', timestamp='2015-03-30T16:16:37Z') + pt = partial(point, 'series2', timestamp='2015-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), @@ -666,18 +667,18 @@ def test_issue_143(self): ] self.cli.write_points(pts) time.sleep(1) - rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2') + rsp = self.cli.query('SELECT * FROM series2 GROUP BY tag1,tag2') self.assertEqual( [ - [{'value': 0, 'time': '2015-03-30T16:16:37Z'}], - [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], - [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] + {'value': 0, 'time': '2015-03-30T16:16:37Z'}, + {'value': 5, 'time': '2015-03-30T16:16:37Z'}, + {'value': 10, 'time': '2015-03-30T16:16:37Z'} ], - list(rsp) + list(rsp['series2']) ) - all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}]) + all_tag2_equal_v1 = list(rsp.get_points(tags={'tag2': 'v1'})) self.assertEqual( [{'value': 0, 'time': '2015-03-30T16:16:37Z'}, @@ -687,13 +688,13 @@ def test_issue_143(self): def test_query_multiple_series(self): """Test query for multiple series.""" - pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z') + pt = partial(point, 'series1', timestamp='2015-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), ] self.cli.write_points(pts) - pt = partial(point, 'serie2', timestamp='1970-03-30T16:16:37Z') + pt = partial(point, 'series2', timestamp='1970-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0, data1=33, data2="bla"), From b59fe971af88d6e2a02f4f4c153c8fc4e464349c Mon Sep 17 00:00:00 2001 From: aviau Date: Mon, 20 Nov 2017 23:51:42 -0500 Subject: [PATCH 412/536] 5.0.0 release --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 6442e26b..33b7df4f 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '4.1.1' +__version__ = '5.0.0' From 1fbba9719b2da619bc9ac73c5aa34dd4a60fb584 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Tue, 21 Nov 2017 00:03:46 -0500 Subject: [PATCH 413/536] README: add PyPI status --- README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 0b7c144a..2eaf949f 100644 --- a/README.rst +++ b/README.rst @@ -12,7 +12,9 @@ InfluxDB-Python :target: https://coveralls.io/r/influxdata/influxdb-python :alt: Coverage - +.. image:: https://img.shields.io/pypi/v/influxdb.svg + :target: https://pypi.python.org/pypi/influxdb + :alt: PyPI Status InfluxDB-Python is a client for interacting with InfluxDB_. Development of this library is maintained by From dda70e51ed7da1e483f4eb059b91ae0c1008e2b0 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Tue, 21 Nov 2017 00:04:40 -0500 Subject: [PATCH 414/536] README: styling --- README.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 2eaf949f..c145cfc4 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,6 @@ InfluxDB-Python .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python - .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style :target: http://influxdb-python.readthedocs.org/ :alt: Documentation Status @@ -16,7 +15,9 @@ InfluxDB-Python :target: https://pypi.python.org/pypi/influxdb :alt: PyPI Status -InfluxDB-Python is a client for interacting with InfluxDB_. Development of this library is maintained by +InfluxDB-Python is a client for interacting with InfluxDB_. + +Development of this library is maintained by: +-----------+-------------------------------+ | Github ID | URL | From bf232a7aef9eb498751170ec3223f2020eadfecf Mon Sep 17 00:00:00 2001 From: Patrick Hoebeke Date: Sat, 25 Nov 2017 17:31:19 +0100 Subject: [PATCH 415/536] Fix for DataFrameClient issue - seems does not process correctly DateTimeIndex dates (issue #479) (#495) * [FIX] : compatibility with new version of pandas pd.tseries.period.PeriodIndex has been moved to pd.PeriodIndex since at least pandas 0.18.1 pd.tseries.period.DatetimeIndex has been moved to pd.DatetimeIndex since at least pandas 0.18.1 * [FIX] : Fixes #479 : DateTimeIndex not correctly converted to Unix Epoch (e.g .on (some?) Windows machines) * [FIX] : new fix for #479 : DateTimeIndex not correctly converted to Unix Epoch (e.g .on (some?) Windows machines) * [ENH] : added feature : DataFrame.write_points : NaNs and None values allowed in input DataFrame (corresponding entries are removed from the list of points to push to Influx) * [FIX] : error in unittest dataframe_client test_write_points_from_dataframe_with_all_none --- influxdb/_dataframe_client.py | 34 ++++++++--- influxdb/tests/dataframe_client_test.py | 75 +++++++++++++++++++++++++ 2 files changed, 101 insertions(+), 8 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 6a66558b..b8c83f59 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -10,6 +10,7 @@ from collections import defaultdict import pandas as pd +import numpy as np from .client import InfluxDBClient from .line_protocol import _escape_tag @@ -257,7 +258,7 @@ def _convert_dataframe_to_json(dataframe, {'measurement': measurement, 'tags': dict(list(tag.items()) + list(tags.items())), 'fields': rec, - 'time': int(ts.value / precision_factor)} + 'time': np.int64(ts.value / precision_factor)} for ts, tag, rec in zip(dataframe.index, dataframe[tag_columns].to_dict('record'), dataframe[field_columns].to_dict('record')) @@ -274,6 +275,10 @@ def _convert_dataframe_to_lines(self, time_precision=None, numeric_precision=None): + dataframe = dataframe.dropna(how='all').copy() + if len(dataframe) == 0: + return [] + if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) @@ -319,11 +324,11 @@ def _convert_dataframe_to_lines(self, # Make array of timestamp ints if isinstance(dataframe.index, pd.PeriodIndex): - time = ((dataframe.index.to_timestamp().values.astype(int) / - precision_factor).astype(int).astype(str)) + time = ((dataframe.index.to_timestamp().values.astype(np.int64) / + precision_factor).astype(np.int64).astype(str)) else: - time = ((pd.to_datetime(dataframe.index).values.astype(int) / - precision_factor).astype(int).astype(str)) + time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) / + precision_factor).astype(np.int64).astype(str)) # If tag columns exist, make an array of formatted tag keys and values if tag_columns: @@ -357,12 +362,16 @@ def _convert_dataframe_to_lines(self, # Make an array of formatted field keys and values field_df = dataframe[field_columns] + field_df = self._stringify_dataframe(field_df, numeric_precision, datatype='field') - field_df = (field_df.columns.values + '=').tolist() + field_df - field_df[field_df.columns[1:]] = ',' + field_df[field_df.columns[1:]] - fields = field_df.sum(axis=1) + + def format_line(line): + line = line[~line.isnull()] # drop None entries + return ",".join((line.index + '=' + line.values)) + + fields = field_df.apply(format_line, axis=1) del field_df # Generate line protocol string @@ -371,6 +380,13 @@ def _convert_dataframe_to_lines(self, @staticmethod def _stringify_dataframe(dframe, numeric_precision, datatype='field'): + + # Prevent modification of input dataframe + dframe = dframe.copy() + + # Keep the positions where Null values are found + mask_null = dframe.isnull().values + # Find int and string columns for field-type data int_columns = dframe.select_dtypes(include=['integer']).columns string_columns = dframe.select_dtypes(include=['object']).columns @@ -414,6 +430,8 @@ def _stringify_dataframe(dframe, numeric_precision, datatype='field'): dframe = dframe.apply(_escape_pandas_series) dframe.columns = dframe.columns.astype(str) + + dframe = dframe.where(~mask_null, None) return dframe def _datetime_to_epoch(self, datetime, time_precision='s'): diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 02aaac5f..269261d5 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -59,6 +59,81 @@ def test_write_points_from_dataframe(self): cli.write_points(dataframe, 'foo', tags=None) self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_with_none(self): + """Test write points from df in TestDataFrameClient object.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", None, 1.0], ["2", 2.0, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + expected = ( + b"foo column_one=\"1\",column_three=1.0 0\n" + b"foo column_one=\"2\",column_two=2.0,column_three=2.0 " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo') + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None) + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_line_of_none(self): + """Test write points from df in TestDataFrameClient object.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[[None, None, None], ["2", 2.0, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + expected = ( + b"foo column_one=\"2\",column_two=2.0,column_three=2.0 " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo') + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None) + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_all_none(self): + """Test write points from df in TestDataFrameClient object.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[[None, None, None], [None, None, None]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + expected = ( + b"\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo') + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None) + self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_in_batches(self): """Test write points in batch from df in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') From 12125309ca44c49d25ac4bbcfc614b749e7a9187 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Wed, 29 Nov 2017 08:11:00 -0500 Subject: [PATCH 416/536] adding back dropped database param fixes #539 --- influxdb/_dataframe_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index b8c83f59..d5b41d9f 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -175,6 +175,7 @@ def query(self, expected_response_code=expected_response_code, raise_errors=raise_errors, chunked=chunked, + database=database, chunk_size=chunk_size) results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): From df70f5ce395d5650c14b9f68707220ab451b5ce8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Thiago=20Figueir=C3=B3?= Date: Mon, 11 Dec 2017 08:21:39 +1100 Subject: [PATCH 417/536] doc: clarify that send_packet takes a list (#545) --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 02128462..01559cfc 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -869,7 +869,7 @@ def send_packet(self, packet, protocol='json'): :param packet: the packet to be sent :type packet: (if protocol is 'json') dict - (if protocol is 'line') sequence of line protocol strings + (if protocol is 'line') list of line protocol strings :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str """ From 6b5db784dcb37495c1f24e3722973878eb3dd53d Mon Sep 17 00:00:00 2001 From: Ivan <8692788+vaniakov@users.noreply.github.com> Date: Mon, 11 Dec 2017 15:40:18 +0200 Subject: [PATCH 418/536] Escape tag values that ends with backslash (#537) * FAM-1163 escape tag values that ends with backslash * Add tests for _escape_tag_value func * Remove print statement --- influxdb/line_protocol.py | 9 ++++++++- influxdb/tests/test_line_protocol.py | 3 ++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index c399a1d5..e8816fc0 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -57,6 +57,13 @@ def _escape_tag(tag): ) +def _escape_tag_value(value): + ret = _escape_tag(value) + if ret.endswith('\\'): + ret += ' ' + return ret + + def quote_ident(value): """Indent the quotes.""" return "\"{}\"".format(value @@ -135,7 +142,7 @@ def make_lines(data, precision=None): # tags should be sorted client-side to take load off server for tag_key, tag_value in sorted(iteritems(tags)): key = _escape_tag(tag_key) - value = _escape_tag(tag_value) + value = _escape_tag_value(tag_value) if key != '' and value != '': key_values.append(key + "=" + value) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index dbee8cda..a3d84793 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -22,6 +22,7 @@ def test_make_lines(self): "tags": { "empty_tag": "", "none_tag": None, + "backslash_tag": "C:\\", "integer_tag": 2, "string_tag": "hello" }, @@ -41,7 +42,7 @@ def test_make_lines(self): self.assertEqual( line_protocol.make_lines(data), - 'test,integer_tag=2,string_tag=hello ' + 'test,backslash_tag=C:\\\\ ,integer_tag=2,string_tag=hello ' 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n' ) From 35732cd7dfe5a585564999c9f881bd88e7c1531d Mon Sep 17 00:00:00 2001 From: Tzong Hao Chen Date: Tue, 12 Dec 2017 08:14:07 -0500 Subject: [PATCH 419/536] DataFrameClient should escape measurement names (#542) * Fixed: DataFrameClient should escape measurement names Issue #520 * Fix pep257 error --- influxdb/_dataframe_client.py | 1 + influxdb/tests/dataframe_client_test.py | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index d5b41d9f..86b582af 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -376,6 +376,7 @@ def format_line(line): del field_df # Generate line protocol string + measurement = _escape_tag(measurement) points = (measurement + tags + ' ' + fields + ' ' + time).tolist() return points diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 269261d5..5a717f5c 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -59,6 +59,28 @@ def test_write_points_from_dataframe(self): cli.write_points(dataframe, 'foo', tags=None) self.assertEqual(m.last_request.body, expected) + def test_dataframe_write_points_with_whitespace_measurement(self): + """write_points should escape white space in measurements.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + expected = ( + b"meas\\ with\\ space " + b"column_one=\"1\",column_two=1i,column_three=1.0 0\n" + b"meas\\ with\\ space " + b"column_one=\"2\",column_two=2i,column_three=2.0 " + b"3600000000000\n" + ) + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + cli = DataFrameClient(database='db') + cli.write_points(dataframe, 'meas with space') + self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_with_none(self): """Test write points from df in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') From a26b39fe3fbdc6471c8d21bdc581c677b3dbd09f Mon Sep 17 00:00:00 2001 From: xginn8 Date: Sat, 10 Feb 2018 18:32:16 -0500 Subject: [PATCH 420/536] specify the numpy dependency explicitly to prevent regression in test (#563) suite --- tox.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tox.ini b/tox.ini index bfa25165..5c54d680 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,7 @@ setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt py27,py34,py35,py36: pandas==0.20.1 + py27,py34,py35,py36: numpy==1.13.3 # Only install pandas with non-pypy interpreters commands = nosetests -v --with-doctest {posargs} @@ -25,11 +26,13 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt pandas coverage + numpy==1.13.3 commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb [testenv:docs] deps = -r{toxinidir}/requirements.txt pandas==0.20.1 + numpy==1.13.3 Sphinx==1.5.5 sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build From b47362c04938330237b49234d62b5dacfa8b5036 Mon Sep 17 00:00:00 2001 From: Jan Stodt Date: Sun, 11 Feb 2018 19:31:00 +0100 Subject: [PATCH 421/536] Fix pandas example (#547) Current example code raises KeyError: '[0] not in index' and can be fixed by adding columns=['0'] to the DataFrame creation. Fixed issue #497 --- examples/tutorial_pandas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 9cec910d..b5fb8f79 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -20,7 +20,7 @@ def main(host='localhost', port=8086): print("Create pandas DataFrame") df = pd.DataFrame(data=list(range(30)), index=pd.date_range(start='2014-11-16', - periods=30, freq='H')) + periods=30, freq='H'), columns=['0']) print("Create database: " + dbname) client.create_database(dbname) From ed276e2187c5961ef92018c61febaa538acda644 Mon Sep 17 00:00:00 2001 From: Maura Hausman Date: Tue, 13 Feb 2018 17:48:38 -0500 Subject: [PATCH 422/536] Remove UDP Precision Restrictions (#557) - address issue #554 - UDP writes can now convert timestamps to the desired precision - add time_precision argument to `InfluxDBClient.send_packet`, defaults to None - add tests for udp precision - remove old udp precision failure tests --- influxdb/client.py | 15 ++++--- influxdb/tests/client_test.py | 75 +++++++++++++++++++++++++++-------- 2 files changed, 66 insertions(+), 24 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 01559cfc..e38d4b78 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -497,11 +497,6 @@ def _write_points(self, "Invalid time precision is given. " "(use 'n', 'u', 'ms', 's', 'm' or 'h')") - if self._use_udp and time_precision and time_precision != 's': - raise ValueError( - "InfluxDB only supports seconds precision for udp writes" - ) - if protocol == 'json': data = { 'points': points @@ -523,7 +518,9 @@ def _write_points(self, params['rp'] = retention_policy if self._use_udp: - self.send_packet(data, protocol=protocol) + self.send_packet( + data, protocol=protocol, time_precision=time_precision + ) else: self.write( data=data, @@ -864,7 +861,7 @@ def get_list_privileges(self, username): text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points()) - def send_packet(self, packet, protocol='json'): + def send_packet(self, packet, protocol='json', time_precision=None): """Send an UDP packet. :param packet: the packet to be sent @@ -872,9 +869,11 @@ def send_packet(self, packet, protocol='json'): (if protocol is 'line') list of line protocol strings :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str + :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None + :type time_precision: str """ if protocol == 'json': - data = make_lines(packet).encode('utf-8') + data = make_lines(packet, time_precision).encode('utf-8') elif protocol == 'line': data = ('\n'.join(packet) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self._udp_port)) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index ff325907..ebf5d424 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -259,22 +259,6 @@ def test_write_points_udp(self): received_data.decode() ) - def test_write_bad_precision_udp(self): - """Test write bad precision in UDP for TestInfluxDBClient object.""" - cli = InfluxDBClient( - 'localhost', 8086, 'root', 'root', - 'test', use_udp=True, udp_port=4444 - ) - - with self.assertRaisesRegexp( - Exception, - "InfluxDB only supports seconds precision for udp writes" - ): - cli.write_points( - self.dummy_points, - time_precision='ms' - ) - @raises(Exception) def test_write_points_fails(self): """Test write points fail for TestInfluxDBClient object.""" @@ -335,6 +319,65 @@ def test_write_points_with_precision(self): m.last_request.body, ) + def test_write_points_with_precision_udp(self): + """Test write points with precision for TestInfluxDBClient object.""" + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + port = random.randint(4000, 8000) + s.bind(('0.0.0.0', port)) + + cli = InfluxDBClient( + 'localhost', 8086, 'root', 'root', + 'test', use_udp=True, udp_port=port + ) + + cli.write_points(self.dummy_points, time_precision='n') + received_data, addr = s.recvfrom(1024) + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123456000\n', + received_data, + ) + + cli.write_points(self.dummy_points, time_precision='u') + received_data, addr = s.recvfrom(1024) + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123456\n', + received_data, + ) + + cli.write_points(self.dummy_points, time_precision='ms') + received_data, addr = s.recvfrom(1024) + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123\n', + received_data, + ) + + cli.write_points(self.dummy_points, time_precision='s') + received_data, addr = s.recvfrom(1024) + self.assertEqual( + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000\n", + received_data, + ) + + cli.write_points(self.dummy_points, time_precision='m') + received_data, addr = s.recvfrom(1024) + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 20964900\n', + received_data, + ) + + cli.write_points(self.dummy_points, time_precision='h') + received_data, addr = s.recvfrom(1024) + self.assertEqual( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 349415\n', + received_data, + ) + def test_write_points_bad_precision(self): """Test write points w/bad precision TestInfluxDBClient object.""" cli = InfluxDBClient() From 13cdddb178656f3069aa806cdad2c7964b7fb167 Mon Sep 17 00:00:00 2001 From: Ivan <8692788+vaniakov@users.noreply.github.com> Date: Fri, 30 Mar 2018 20:07:43 +0300 Subject: [PATCH 423/536] Fix wrong session mount (#571) --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index e38d4b78..f5c0b55b 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -103,7 +103,7 @@ def __init__(self, if ssl is True: self._scheme = "https" - self._session.mount(self._scheme, adapter) + self._session.mount(self._scheme + '://', adapter) if proxies is None: self._proxies = {} From fd4579c95856e0e79014204f308f02ed8a2a55a0 Mon Sep 17 00:00:00 2001 From: dennis Date: Thu, 3 May 2018 05:05:20 +0300 Subject: [PATCH 424/536] Remove comment as issues have been resolved (#581) --- examples/tutorial_pandas.py | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index b5fb8f79..67a5457d 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -12,7 +12,6 @@ def main(host='localhost', port=8086): user = 'root' password = 'root' dbname = 'demo' - # Temporarily avoid line protocol time conversion issues #412, #426, #431. protocol = 'json' client = DataFrameClient(host, port, user, password, dbname) From ed4561b8d31a8f8490660a192948527c17f55c5a Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 8 May 2018 03:48:49 +0300 Subject: [PATCH 425/536] Parse column names in a dataframe to avoid breaking the line protocol (#584) * Remove comment as issues have been resolved * Parse column names in a dataframe to handle spaces in tag of field keys * Patch for ERROR: Test failed write points from df with series * Patch for flake8 issues(E231, E501) * Test case for spaces in column names * flake8 issues * flake8 issues: trailing wspace * Testing if test case would catch a regression * Test catches a regressed build * Re-run the build --- influxdb/_dataframe_client.py | 2 ++ influxdb/tests/dataframe_client_test.py | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 86b582af..2444a77f 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -288,6 +288,8 @@ def _convert_dataframe_to_lines(self, raise TypeError('Must be DataFrame with DatetimeIndex or ' 'PeriodIndex.') + dataframe = dataframe.rename( + columns={item: _escape_tag(item) for item in dataframe.columns}) # Create a Series of columns for easier indexing column_series = pd.Series(dataframe.columns) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 5a717f5c..78f5437f 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -81,6 +81,26 @@ def test_dataframe_write_points_with_whitespace_measurement(self): cli.write_points(dataframe, 'meas with space') self.assertEqual(m.last_request.body, expected) + def test_dataframe_write_points_with_whitespace_in_column_names(self): + """write_points should escape white space in column names.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], + index=[now, now + timedelta(hours=1)], + columns=["column one", "column two", + "column three"]) + expected = ( + b"foo column\\ one=\"1\",column\\ two=1i,column\\ three=1.0 0\n" + b"foo column\\ one=\"2\",column\\ two=2i,column\\ three=2.0 " + b"3600000000000\n" + ) + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + cli = DataFrameClient(database='db') + cli.write_points(dataframe, 'foo') + self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_with_none(self): """Test write points from df in TestDataFrameClient object.""" now = pd.Timestamp('1970-01-01 00:00+00:00') From f3c6acf503e1befd767c4cd70f46d9561341c05a Mon Sep 17 00:00:00 2001 From: Frederik Gladhorn Date: Tue, 8 May 2018 03:22:20 +0200 Subject: [PATCH 426/536] Allow connecting to influxdb running on a path on the server (#556) * Allow connecting to influxdb running on a path on the server Make it possible to connect to the databases on a path on servers. https://someserver.com/myinfluxdb instead of the root of the server. * Test and fix for None path --- influxdb/client.py | 19 +++++++++++++++++-- influxdb/tests/client_test.py | 18 ++++++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index f5c0b55b..62d5a025 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -59,6 +59,8 @@ class InfluxDBClient(object): :type udp_port: int :param proxies: HTTP(S) proxy to use for Requests, defaults to {} :type proxies: dict + :param path: path of InfluxDB on the server to connect, defaults to '' + :type path: str """ def __init__(self, @@ -75,6 +77,7 @@ def __init__(self, udp_port=4444, proxies=None, pool_size=10, + path='', ): """Construct a new InfluxDBClient object.""" self.__host = host @@ -98,6 +101,13 @@ def __init__(self, if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + if not path: + self.__path = '' + elif path[0] == '/': + self.__path = path + else: + self.__path = '/' + path + self._scheme = "http" if ssl is True: @@ -110,10 +120,11 @@ def __init__(self, else: self._proxies = proxies - self.__baseurl = "{0}://{1}:{2}".format( + self.__baseurl = "{0}://{1}:{2}{3}".format( self._scheme, self._host, - self._port) + self._port, + self._path) self._headers = { 'Content-Type': 'application/json', @@ -132,6 +143,10 @@ def _host(self): def _port(self): return self.__port + @property + def _path(self): + return self.__path + @property def _udp_port(self): return self.__udp_port diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index ebf5d424..efdfb770 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -109,6 +109,24 @@ def test_scheme(self): ) self.assertEqual('https://host:8086', cli._baseurl) + cli = InfluxDBClient( + 'host', 8086, 'username', 'password', 'database', ssl=True, + path="somepath" + ) + self.assertEqual('https://host:8086/somepath', cli._baseurl) + + cli = InfluxDBClient( + 'host', 8086, 'username', 'password', 'database', ssl=True, + path=None + ) + self.assertEqual('https://host:8086', cli._baseurl) + + cli = InfluxDBClient( + 'host', 8086, 'username', 'password', 'database', ssl=True, + path="/somepath" + ) + self.assertEqual('https://host:8086/somepath', cli._baseurl) + def test_dsn(self): """Set up the test datasource name for TestInfluxDBClient object.""" cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886') From 472de65b99bb0228f24688ef3e84ed1d5263e426 Mon Sep 17 00:00:00 2001 From: dragoshenron Date: Mon, 11 Jun 2018 04:13:22 +0200 Subject: [PATCH 427/536] Update _dataframe_client.py (#593) Permanent error "NameError: name 'to_datetime' is not defined". Reason: to_datetime is a function defined in pandas not a method on a DataFrame. (see: https://stackoverflow.com/questions/48387878/attributeerror-dataframe-object-has-no-attribute-to-datetime) Tested with Python 3.5.3 and Pandas 0.23.0 --- influxdb/_dataframe_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 2444a77f..4273ef1b 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -236,7 +236,7 @@ def _convert_dataframe_to_json(dataframe, field_columns = list( set(dataframe.columns).difference(set(tag_columns))) - dataframe.index = dataframe.index.to_datetime() + dataframe.index = pd.to_datetime(dataframe.index) if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') From b3ed5db34345e1763974048f700bd7ae9cf147e4 Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 26 Jun 2018 19:14:53 -0400 Subject: [PATCH 428/536] set version to 5.1.0 --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 33b7df4f..374fddc7 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.0.0' +__version__ = '5.1.0' From c300105d906dd44fb9ef894fda295010e227f521 Mon Sep 17 00:00:00 2001 From: Shu Shen Date: Sat, 30 Jun 2018 09:24:06 -0700 Subject: [PATCH 429/536] Fix performance degradation with line protocol (#592) Assemble line by line in the commit bf232a7aef to remove NaN has significant performance impact. This change fixes the issue by keeping the NaN fields before stringify the dataframe, replacing the fields with empty string, and reverting back to use pd.DataFrame.sum() function to yield the lines. Fixes: #591 --- influxdb/_dataframe_client.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 4273ef1b..646f298c 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -365,16 +365,18 @@ def _convert_dataframe_to_lines(self, # Make an array of formatted field keys and values field_df = dataframe[field_columns] + # Keep the positions where Null values are found + mask_null = field_df.isnull().values field_df = self._stringify_dataframe(field_df, numeric_precision, datatype='field') - def format_line(line): - line = line[~line.isnull()] # drop None entries - return ",".join((line.index + '=' + line.values)) - - fields = field_df.apply(format_line, axis=1) + field_df = (field_df.columns.values + '=').tolist() + field_df + field_df[field_df.columns[1:]] = ',' + field_df[ + field_df.columns[1:]] + field_df = field_df.where(~mask_null, '') # drop Null entries + fields = field_df.sum(axis=1) del field_df # Generate line protocol string @@ -388,9 +390,6 @@ def _stringify_dataframe(dframe, numeric_precision, datatype='field'): # Prevent modification of input dataframe dframe = dframe.copy() - # Keep the positions where Null values are found - mask_null = dframe.isnull().values - # Find int and string columns for field-type data int_columns = dframe.select_dtypes(include=['integer']).columns string_columns = dframe.select_dtypes(include=['object']).columns @@ -435,7 +434,6 @@ def _stringify_dataframe(dframe, numeric_precision, datatype='field'): dframe.columns = dframe.columns.astype(str) - dframe = dframe.where(~mask_null, None) return dframe def _datetime_to_epoch(self, datetime, time_precision='s'): From 9b2d7d1c75a620f161582e9a21127b5083b64213 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sat, 30 Jun 2018 18:11:18 -0400 Subject: [PATCH 430/536] Add an initial stub for a CHANGELOG.md --- CHANGELOG.md | 263 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 263 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..2f9cbf97 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,263 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## [Unreleased] +### Added +### Changed +- Fix performance degradation when removing NaN values via line protocol (#592) +### Removed + +## [v5.1.0] - 2018-06-26 +### Added +- Connect to InfluxDB path running on server (#556 thx @gladhorn) +- Escape measurement names in DataFrameClient (#542 thx @tzonghao) +- Escape tags that end with a backslash (#537 thx @vaniakov) +- Add back mistakenly-dropped database parameter (#540) +- Add PyPI status to README.md +### Changed +- Fix bad session mount scheme (#571 thx @vaniakov) +- Fixed issue with DataFrameClient calling to_datetime function (#593 thx @dragoshenron) +- Escape columns in DataFrameClient for line protocol (#584 thx @dmuiruri) +- Convert DataFrameClient times from int to np.int64 (#495 thx patrickhoebeke) +- Updated pandas tutorial (#547 thx @techaddicted) +- Explicitly set numpy version for tox (#563) +### Removed +- Removed UDP precision restrictions on timestamp (#557 thx @mdhausman) + +## [v5.0.0] - 2017-11-20 +### Added +### Changed +### Removed + +## [v4.1.1] - 2017-06-06 +### Added +### Changed +### Removed + +## [v4.1.0] - 2017-04-12 +### Added +### Changed +### Removed + +## [v4.0.0] - 2016-12-07 +### Added +### Changed +### Removed + +## [v3.0.0] - 2016-06-26 +### Added +### Changed +### Removed + +## [v2.12.0] - 2016-01-29 +### Added +### Changed +### Removed + +## [v2.11.0] - 2016-01-11 +### Added +### Changed +### Removed + +## [v2.10.0] - 2015-11-13 +### Added +### Changed +### Removed + +## [v2.9.3] - 2015-10-30 +### Added +### Changed +### Removed + +## [v2.9.2] - 2015-10-07 +### Added +### Changed +### Removed + +## [v2.9.1] - 2015-08-30 +### Added +### Changed +### Removed + +## [v2.9.0] - 2015-08-28 +### Added +### Changed +### Removed + +## [v2.8.0] - 2015-08-06 +### Added +### Changed +### Removed + +## [v2.7.3] - 2015-07-31 +### Added +### Changed +### Removed + +## [v2.7.2] - 2015-07-31 +### Added +### Changed +### Removed + +## [v2.7.1] - 2015-07-26 +### Added +### Changed +### Removed + +## [v2.7.0] - 2015-07-23 +### Added +### Changed +### Removed + +## [v2.6.0] - 2015-06-16 +### Added +### Changed +### Removed + +## [v2.5.1] - 2015-06-15 +### Added +### Changed +### Removed + +## [v2.5.0] - 2015-06-15 +### Added +### Changed +### Removed + +## [v2.4.0] - 2015-06-12 +### Added +### Changed +### Removed + +## [v2.3.0] - 2015-05-13 +### Added +### Changed +### Removed + +## [v2.2.0] - 2015-05-05 +### Added +### Changed +### Removed + +## [v2.1.0] - 2015-04-24 +### Added +### Changed +### Removed + +## [v2.0.2] - 2015-04-22 +### Added +### Changed +### Removed + +## [v2.0.1] - 2015-04-17 +### Added +### Changed +### Removed + +## [v2.0.0] - 2015-04-17 +### Added +### Changed +### Removed + +## [v1.0.1] - 2015-03-30 +### Added +### Changed +### Removed + +## [v1.0.0] - 2015-03-20 +### Added +### Changed +### Removed + +## [v0.4.1] - 2015-03-18 +### Added +### Changed +### Removed + +## [v0.4.0] - 2015-03-17 +### Added +### Changed +### Removed + +## [v0.3.1] - 2015-02-23 +### Added +### Changed +### Removed + +## [v0.3.0] - 2015-02-17 +### Added +### Changed +### Removed + +## [v0.2.0] - 2015-01-23 +### Added +### Changed +### Removed + +## [v0.1.13] - 2014-11-12 +### Added +### Changed +### Removed + +## [v0.1.12] - 2014-08-22 +### Added +### Changed +### Removed + +## [v0.1.11] - 2014-06-20 +### Added +### Changed +### Removed + +## [v0.1.10] - 2014-06-09 +### Added +### Changed +### Removed + +## [v0.1.9] - 2014-06-06 +### Added +### Changed +### Removed + +## [v0.1.8] - 2014-06-06 +### Added +### Changed +### Removed + +## [v0.1.7] - 2014-05-21 +### Added +### Changed +### Removed + +## [v0.1.6] - 2014-04-02 +### Added +### Changed +### Removed + +## [v0.1.5] - 2014-03-25 +### Added +### Changed +### Removed + +## [v0.1.4] - 2014-03-03 +### Added +### Changed +### Removed + +## [v0.1.3] - 2014-02-11 +### Added +### Changed +### Removed + +## [v0.1.2] - 2013-12-09 +### Added +### Changed +### Removed + +## [v0.1.1] - 2013-11-14 +### Added +### Changed +### Removed From 6847fe4936a9f640a23f18f9f8946f7154b91d2a Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sun, 1 Jul 2018 12:01:25 -0400 Subject: [PATCH 431/536] Add changelog entries for 5.0.0 release --- CHANGELOG.md | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2f9cbf97..326fde7a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,8 +29,28 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [v5.0.0] - 2017-11-20 ### Added -### Changed -### Removed +- Add pool size parameter to client constructor (#534 thx @vaniakov) +- Add ping method to client for checking connectivity (#409 thx @pmenglund) +- Add retry logic & exponential backoff when a connection fails (#508) +- Declare which setuptools version is required in PyPy env +- Functions for drop_measurement and get_list_measurements in InfluxDBClient (#402 thx @Vic020) +- Allow single string as data argument in write (#492 thx @baftek) +- Support chunked queries in DataFrameClient (#439 thx @gusutabopb) +- Add close method to InfluxDBClient (#465 thx @Linux-oiD) +- PEP257 linting & code compliance (#473) +### Changed +- Fix broken tags filtering on a ResultSet (#511) +- Improve retry codepath for connecting to InfluxDB (#536 thx @swails) +- Clean up imports using six instead of sys.version (#536 thx @swails) +- Replace references to dataframe.ix with dataframe.iloc (#528) +- Improve performance of tag processing when converting DataFrameClient to line protocol (#503 thx @tzonghao) +- Typo in Content-Type header (#513 thx @milancermak) +- Clean up README.md formatting +- Catch TypeError when casting to float to return False with objects (#475 thx @BenHewins) +- Improve efficiency of tag appending in DataFrameClient when converting to line protocol (#486 thx @maxdolle) +### Removed +- Drop requirement for all fields in SeriesHelper (#518 thx @spott) +- use_udp and udp_port are now private properties in InfluxDBClient ## [v4.1.1] - 2017-06-06 ### Added From 25b7b8957e08ec959b5ecfa18d28fbfdf8819075 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sun, 1 Jul 2018 12:12:30 -0400 Subject: [PATCH 432/536] Add changelog to CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 326fde7a..757e20e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added +- Finally add a CHANGELOG.md to communicate breaking changes (#598) ### Changed - Fix performance degradation when removing NaN values via line protocol (#592) ### Removed From 8022fb6f1db37a06c974f9e354b42152942fb35f Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sun, 1 Jul 2018 14:43:49 -0400 Subject: [PATCH 433/536] Mention in README.md that Python 3.3 is no longer supported --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index c145cfc4..65e000f8 100644 --- a/README.rst +++ b/README.rst @@ -60,9 +60,9 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ------------ -The influxdb-python distribution is supported and tested on Python 2.7, 3.3, 3.4, 3.5, 3.6, PyPy and PyPy3. +The influxdb-python distribution is supported and tested on Python 2.7, 3.4, 3.5, 3.6, PyPy and PyPy3. -**Note:** Python 3.2 is currently untested. See ``.travis.yml``. +**Note:** Python 3.2 and 3.3 are currently untested. See ``.travis.yml``. Main dependency is: From 4dc7e024f8c26a2109f6936d0f3cf7c70fffb8e1 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sun, 1 Jul 2018 17:13:38 -0400 Subject: [PATCH 434/536] Update travis to test multiple versions of InfluxDB Comment out other InfluxDB versions for now (pending compatibility) --- .travis.yml | 42 ++++++++++++++++++++++-------------------- CHANGELOG.md | 1 + 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/.travis.yml b/.travis.yml index 03e6bc36..da25d8ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,27 +1,26 @@ language: python +python: + - "2.7" + - "3.5" + - "3.6" + - "pypy-5.3.1" + +env: + - INFLUXDB_VER=1.2.4 +# - INFLUXDB_VER=1.3.9 +# - INFLUXDB_VER=1.4.2 +# - INFLUXDB_VER=1.5.4 + addons: apt: packages: - wget matrix: - allow_failures: - - python: 3.4 - env: TOX_ENV=docs include: - - python: 2.7 - env: TOX_ENV=py27 - python: 2.7 env: TOX_ENV=pep257 - - python: pypy-5.3.1 - env: TOX_ENV=pypy - - python: 3.4 - env: TOX_ENV=py34 - - python: 3.5 - env: TOX_ENV=py35 - - python: 3.6 - env: TOX_ENV=py36 - python: 3.6 env: TOX_ENV=docs - python: 3.6 @@ -30,17 +29,20 @@ matrix: env: TOX_ENV=coverage install: - - pip install tox + - pip install tox-travis - pip install setuptools==20.6.6 - pip install coveralls - - mkdir influxdb_install - - wget https://dl.influxdata.com/influxdb/releases/influxdb_1.2.4_amd64.deb - - dpkg -x influxdb*.deb influxdb_install + - mkdir -p "influxdb_install/${INFLUXDB_VER}" + - if [ -n "${INFLUXDB_VER}" ] ; then wget "https://dl.influxdata.com/influxdb/releases/influxdb_${INFLUXDB_VER}_amd64.deb" ; fi + - if [ -n "${INFLUXDB_VER}" ] ; then dpkg -x influxdb*.deb "influxdb_install/${INFLUXDB_VER}" ; fi + script: - - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/usr/bin/influxd - - tox -e $TOX_ENV + - export "INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/${INFLUXDB_VER}/usr/bin/influxd" + - if [ -n "${TOX_ENV}" ]; then tox -e "${TOX_ENV}"; else tox; fi + after_success: - - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi + - if [ "${TOX_ENV}" == "coverage" ] ; then coveralls; fi + notifications: email: false diff --git a/CHANGELOG.md b/CHANGELOG.md index 757e20e1..fe7c5b9b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added - Finally add a CHANGELOG.md to communicate breaking changes (#598) +- Test multiple versions of InfluxDB in travis ### Changed - Fix performance degradation when removing NaN values via line protocol (#592) ### Removed From ae2a3c71c11d9b58a6538195b4282cee1d76bdb9 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Sun, 1 Jul 2018 16:14:40 -0400 Subject: [PATCH 435/536] Enable testing for InfluxDB v1.3.9, v1.4.2, and v1.5.4 (tsi) Swap admin_port config for global_port config --- .travis.yml | 6 +++--- CHANGELOG.md | 2 ++ README.rst | 3 +-- influxdb/tests/server_tests/influxdb.conf.template | 7 +++---- influxdb/tests/server_tests/influxdb_instance.py | 6 +++--- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.travis.yml b/.travis.yml index da25d8ea..a5fc1831 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,9 +8,9 @@ python: env: - INFLUXDB_VER=1.2.4 -# - INFLUXDB_VER=1.3.9 -# - INFLUXDB_VER=1.4.2 -# - INFLUXDB_VER=1.5.4 + - INFLUXDB_VER=1.3.9 + - INFLUXDB_VER=1.4.2 + - INFLUXDB_VER=1.5.4 addons: apt: diff --git a/CHANGELOG.md b/CHANGELOG.md index fe7c5b9b..0791c2ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,8 +9,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Finally add a CHANGELOG.md to communicate breaking changes (#598) - Test multiple versions of InfluxDB in travis ### Changed +- Update test suite to support InfluxDB v1.3.9, v1.4.2, and v1.5.4 - Fix performance degradation when removing NaN values via line protocol (#592) ### Removed +- Dropped support for Python3.4 ## [v5.1.0] - 2018-06-26 ### Added diff --git a/README.rst b/README.rst index 65e000f8..af7c50d0 100644 --- a/README.rst +++ b/README.rst @@ -39,8 +39,7 @@ InfluxDB is an open-source distributed time series database, find more about Inf InfluxDB pre v1.1.0 users ------------------------- -This module is tested with InfluxDB v1.2.4, our recommended version. Though there have been v1.3 (initial TSI branch) and v1.4 releases these are not -yet supported. +This module is tested with Python {2.7,3.5,3.6} and InfluxDB v{1.2.4,1.3.9,1.4.2,1.5.4}. Those users still on InfluxDB v0.8.x users may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. diff --git a/influxdb/tests/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template index 9a289635..efcff78a 100644 --- a/influxdb/tests/server_tests/influxdb.conf.template +++ b/influxdb/tests/server_tests/influxdb.conf.template @@ -1,3 +1,5 @@ +bind-address = ":{global_port}" + [meta] dir = "{meta_dir}" hostname = "localhost" @@ -6,10 +8,7 @@ [data] dir = "{data_dir}" wal-dir = "{wal_dir}" - -[admin] - enabled = true - bind-address = ":{admin_port}" + index-version = "tsi1" [http] enabled = true diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 21e20fde..1dcd7567 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -80,7 +80,7 @@ def _start_server(self, conf_template, udp_enabled): # find a couple free ports : free_ports = get_free_ports(4) ports = {} - for service in 'http', 'admin', 'meta', 'udp': + for service in 'http', 'global', 'meta', 'udp': ports[service + '_port'] = free_ports.pop() if not udp_enabled: ports['udp_port'] = -1 @@ -113,7 +113,7 @@ def _start_server(self, conf_template, udp_enabled): "%s > Started influxdb bin in %r with ports %s and %s.." % ( datetime.datetime.now(), self.temp_dir_base, - self.admin_port, + self.global_port, self.http_port ) ) @@ -126,7 +126,7 @@ def _start_server(self, conf_template, udp_enabled): try: while time.time() < timeout: if (is_port_open(self.http_port) and - is_port_open(self.admin_port)): + is_port_open(self.global_port)): # it's hard to check if a UDP port is open.. if udp_enabled: # so let's just sleep 0.5 sec in this case From 4fea43ee6c84b683a06bb63e2c37cab899e04f83 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Tue, 3 Jul 2018 20:29:02 -0400 Subject: [PATCH 436/536] Add back PyPy3, remove references to Python 3.4 --- .travis.yml | 1 + README.rst | 6 +++--- tox.ini | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index a5fc1831..7f3d4a5d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,7 @@ python: - "3.5" - "3.6" - "pypy-5.3.1" + - "pypy3" env: - INFLUXDB_VER=1.2.4 diff --git a/README.rst b/README.rst index af7c50d0..d4f9611c 100644 --- a/README.rst +++ b/README.rst @@ -39,7 +39,7 @@ InfluxDB is an open-source distributed time series database, find more about Inf InfluxDB pre v1.1.0 users ------------------------- -This module is tested with Python {2.7,3.5,3.6} and InfluxDB v{1.2.4,1.3.9,1.4.2,1.5.4}. +This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.2, and v1.5.4. Those users still on InfluxDB v0.8.x users may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. @@ -59,9 +59,9 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ------------ -The influxdb-python distribution is supported and tested on Python 2.7, 3.4, 3.5, 3.6, PyPy and PyPy3. +The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 3.6, PyPy and PyPy3. -**Note:** Python 3.2 and 3.3 are currently untested. See ``.travis.yml``. +**Note:** Python <3.5 are currently untested. See ``.travis.yml``. Main dependency is: diff --git a/tox.ini b/tox.ini index 5c54d680..d0d87fec 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py34, py35, py36, pypy, pypy3, flake8, pep257, coverage, docs +envlist = py27, py35, py36, pypy, pypy3, flake8, pep257, coverage, docs [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH From b7d75afa438a73ad0d8cee415326a6004b277b10 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Fri, 6 Jul 2018 09:53:15 -0400 Subject: [PATCH 437/536] Use GET & POST appropriately according to InfluxDB documentation (#608) * Use GET & POST appropriately according to InfluxDB documentation From 'https://docs.influxdata.com/influxdb/v1.5/tools/api/#verb-usage', certain queries should be POST (where previously they have been GET) * Appease flake8 --- CHANGELOG.md | 1 + examples/tutorial_sine_wave.py | 2 +- influxdb/client.py | 44 ++++++++++------- influxdb/tests/client_test.py | 49 ++++++++++++++----- .../server_tests/client_test_with_server.py | 20 +++++++- 5 files changed, 84 insertions(+), 32 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0791c2ea..c6f8761b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Finally add a CHANGELOG.md to communicate breaking changes (#598) - Test multiple versions of InfluxDB in travis ### Changed +- Update POST/GET requests to follow verb guidelines from InfluxDB documentation - Update test suite to support InfluxDB v1.3.9, v1.4.2, and v1.5.4 - Fix performance degradation when removing NaN values via line protocol (#592) ### Removed diff --git a/examples/tutorial_sine_wave.py b/examples/tutorial_sine_wave.py index 99b3d388..5dfebf3c 100644 --- a/examples/tutorial_sine_wave.py +++ b/examples/tutorial_sine_wave.py @@ -43,7 +43,7 @@ def main(host='localhost', port=8086): time.sleep(3) query = 'SELECT * FROM foobar' - print("Queying data: " + query) + print("Querying data: " + query) result = client.query(query, database=DBNAME) print("Result: {0}".format(result)) diff --git a/influxdb/client.py b/influxdb/client.py index 62d5a025..e3299fe8 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -350,7 +350,8 @@ def query(self, database=None, raise_errors=True, chunked=False, - chunk_size=0): + chunk_size=0, + method="GET"): """Send a query to InfluxDB. :param query: the actual query string @@ -384,6 +385,9 @@ def query(self, :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int + :param method: the HTTP method for the request, defaults to GET + :type method: str + :returns: the queried data :rtype: :class:`~.ResultSet` """ @@ -401,9 +405,12 @@ def query(self, if chunk_size > 0: params['chunk_size'] = chunk_size + if query.lower().startswith("select ") and " into " in query.lower(): + method = "POST" + response = self.request( url="query", - method='GET', + method=method, params=params, data=None, expected_response_code=expected_response_code @@ -568,7 +575,8 @@ def create_database(self, dbname): :param dbname: the name of the database to create :type dbname: str """ - self.query("CREATE DATABASE {0}".format(quote_ident(dbname))) + self.query("CREATE DATABASE {0}".format(quote_ident(dbname)), + method="POST") def drop_database(self, dbname): """Drop a database from InfluxDB. @@ -576,7 +584,8 @@ def drop_database(self, dbname): :param dbname: the name of the database to drop :type dbname: str """ - self.query("DROP DATABASE {0}".format(quote_ident(dbname))) + self.query("DROP DATABASE {0}".format(quote_ident(dbname)), + method="POST") def get_list_measurements(self): """Get the list of measurements in InfluxDB. @@ -602,7 +611,8 @@ def drop_measurement(self, measurement): :param measurement: the name of the measurement to drop :type measurement: str """ - self.query("DROP MEASUREMENT {0}".format(quote_ident(measurement))) + self.query("DROP MEASUREMENT {0}".format(quote_ident(measurement)), + method="POST") def create_retention_policy(self, name, duration, replication, database=None, default=False): @@ -634,11 +644,11 @@ def create_retention_policy(self, name, duration, replication, if default is True: query_string += " DEFAULT" - self.query(query_string) + self.query(query_string, method="POST") def alter_retention_policy(self, name, database=None, duration=None, replication=None, default=None): - """Mofidy an existing retention policy for a database. + """Modify an existing retention policy for a database. :param name: the name of the retention policy to modify :type name: str @@ -671,7 +681,7 @@ def alter_retention_policy(self, name, database=None, if default is True: query_string += " DEFAULT" - self.query(query_string) + self.query(query_string, method="POST") def drop_retention_policy(self, name, database=None): """Drop an existing retention policy for a database. @@ -685,7 +695,7 @@ def drop_retention_policy(self, name, database=None): query_string = ( "DROP RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database)) - self.query(query_string) + self.query(query_string, method="POST") def get_list_retention_policies(self, database=None): """Get the list of retention policies for a database. @@ -751,7 +761,7 @@ def create_user(self, username, password, admin=False): quote_ident(username), quote_literal(password)) if admin: text += ' WITH ALL PRIVILEGES' - self.query(text) + self.query(text, method="POST") def drop_user(self, username): """Drop a user from InfluxDB. @@ -759,8 +769,8 @@ def drop_user(self, username): :param username: the username to drop :type username: str """ - text = "DROP USER {0}".format(quote_ident(username)) - self.query(text) + text = "DROP USER {0}".format(quote_ident(username), method="POST") + self.query(text, method="POST") def set_user_password(self, username, password): """Change the password of an existing user. @@ -796,7 +806,7 @@ def delete_series(self, database=None, measurement=None, tags=None): tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v)) for k, v in tags.items()] query_str += ' WHERE ' + ' AND '.join(tag_eq_list) - self.query(query_str, database=database) + self.query(query_str, database=database, method="POST") def grant_admin_privileges(self, username): """Grant cluster administration privileges to a user. @@ -808,7 +818,7 @@ def grant_admin_privileges(self, username): and manage users. """ text = "GRANT ALL PRIVILEGES TO {0}".format(quote_ident(username)) - self.query(text) + self.query(text, method="POST") def revoke_admin_privileges(self, username): """Revoke cluster administration privileges from a user. @@ -820,7 +830,7 @@ def revoke_admin_privileges(self, username): and manage users. """ text = "REVOKE ALL PRIVILEGES FROM {0}".format(quote_ident(username)) - self.query(text) + self.query(text, method="POST") def grant_privilege(self, privilege, database, username): """Grant a privilege on a database to a user. @@ -836,7 +846,7 @@ def grant_privilege(self, privilege, database, username): text = "GRANT {0} ON {1} TO {2}".format(privilege, quote_ident(database), quote_ident(username)) - self.query(text) + self.query(text, method="POST") def revoke_privilege(self, privilege, database, username): """Revoke a privilege on a database from a user. @@ -852,7 +862,7 @@ def revoke_privilege(self, privilege, database, username): text = "REVOKE {0} ON {1} FROM {2}".format(privilege, quote_ident(database), quote_ident(username)) - self.query(text) + self.query(text, method="POST") def get_list_privileges(self, username): """Get the list of all privileges granted to given user. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index efdfb770..859e8bc9 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -439,6 +439,29 @@ def test_query(self): [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] ) + def test_select_into_post(self): + """Test SELECT.*INTO is POSTed.""" + example_response = ( + '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' + '"columns": ["time", "value"], "values": ' + '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' + '[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' + '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' + ) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + rs = self.cli.query('select * INTO newmeas from foo') + + self.assertListEqual( + list(rs[0].get_points()), + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] + ) + @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): """Test chunked query for TestInfluxDBClient object.""" @@ -495,7 +518,7 @@ def test_create_database(self): """Test create database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}' ) @@ -509,7 +532,7 @@ def test_create_numeric_named_database(self): """Test create db w/numeric name for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}' ) @@ -529,7 +552,7 @@ def test_drop_database(self): """Test drop database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}' ) @@ -543,7 +566,7 @@ def test_drop_measurement(self): """Test drop measurement for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}' ) @@ -557,7 +580,7 @@ def test_drop_numeric_named_database(self): """Test drop numeric db for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}' ) @@ -615,7 +638,7 @@ def test_create_retention_policy_default(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -635,7 +658,7 @@ def test_create_retention_policy(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -655,7 +678,7 @@ def test_alter_retention_policy(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -695,7 +718,7 @@ def test_drop_retention_policy(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -879,7 +902,7 @@ def test_grant_admin_privileges(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -903,7 +926,7 @@ def test_revoke_admin_privileges(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -927,7 +950,7 @@ def test_grant_privilege(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) @@ -951,7 +974,7 @@ def test_revoke_privilege(self): with requests_mock.Mocker() as m: m.register_uri( - requests_mock.GET, + requests_mock.POST, "http://localhost:8086/query", text=example_response ) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 701f72ac..d2370e63 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -211,7 +211,7 @@ def test_drop_user(self): self.assertEqual(users, []) def test_drop_user_nonexisting(self): - """Test dropping a nonexistant user.""" + """Test dropping a nonexistent user.""" with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_user('test') self.assertIn('user not found', @@ -383,6 +383,24 @@ def test_write_multiple_points_different_series(self): ]] ) + def test_select_into_as_post(self): + """Test SELECT INTO is POSTed.""" + self.assertIs(True, self.cli.write_points(dummy_points)) + time.sleep(1) + rsp = self.cli.query('SELECT * INTO "newmeas" FROM "memory"') + rsp = self.cli.query('SELECT * FROM "newmeas"') + lrsp = list(rsp) + + self.assertEqual( + lrsp, + [[ + {'value': 33, + 'time': '2009-11-10T23:01:35Z', + "host": "server01", + "region": "us-west"} + ]] + ) + @unittest.skip("Broken as of 0.9.0") def test_write_multiple_points_different_series_DF(self): """Test write multiple points using dataframe to different series.""" From b7e43c32c9de0577e6174dd2eeaf2ee5bb92afa5 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Fri, 6 Jul 2018 10:20:53 -0400 Subject: [PATCH 438/536] =?UTF-8?q?Add=20shard=5Fduration=20parameter=20wh?= =?UTF-8?q?en=20creating=20or=20altering=20retention=20poli=E2=80=A6=20(#6?= =?UTF-8?q?06)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add shard_duration parameter when creating or altering retention policies Fixes #560 * Remove debug print statement --- CHANGELOG.md | 1 + influxdb/client.py | 32 ++++++-- influxdb/tests/client_test.py | 12 ++- .../server_tests/client_test_with_server.py | 74 ++++++++++++++++++- 4 files changed, 108 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6f8761b..9306a7ed 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - Finally add a CHANGELOG.md to communicate breaking changes (#598) - Test multiple versions of InfluxDB in travis +- Add SHARD DURATION parameter to retention policy create/alter ### Changed - Update POST/GET requests to follow verb guidelines from InfluxDB documentation - Update test suite to support InfluxDB v1.3.9, v1.4.2, and v1.5.4 diff --git a/influxdb/client.py b/influxdb/client.py index e3299fe8..8f8b14ae 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -615,7 +615,8 @@ def drop_measurement(self, measurement): method="POST") def create_retention_policy(self, name, duration, replication, - database=None, default=False): + database=None, + default=False, shard_duration="0s"): """Create a retention policy for a database. :param name: the name of the new retention policy @@ -634,12 +635,21 @@ def create_retention_policy(self, name, duration, replication, :type database: str :param default: whether or not to set the policy as default :type default: bool + :param shard_duration: the shard duration of the retention policy. + Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and + mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, + respectively. Infinite retention is not supported. As a workaround, + specify a "1000w" duration to achieve an extremely long shard group + duration. Defaults to "0s", which is interpreted by the database + to mean the default value given the duration. + The minimum shard group duration is 1 hour. + :type shard_duration: str """ query_string = \ "CREATE RETENTION POLICY {0} ON {1} " \ - "DURATION {2} REPLICATION {3}".format( + "DURATION {2} REPLICATION {3} SHARD DURATION {4}".format( quote_ident(name), quote_ident(database or self._database), - duration, replication) + duration, replication, shard_duration) if default is True: query_string += " DEFAULT" @@ -647,7 +657,8 @@ def create_retention_policy(self, name, duration, replication, self.query(query_string, method="POST") def alter_retention_policy(self, name, database=None, - duration=None, replication=None, default=None): + duration=None, replication=None, + default=None, shard_duration=None): """Modify an existing retention policy for a database. :param name: the name of the retention policy to modify @@ -667,15 +678,26 @@ def alter_retention_policy(self, name, database=None, :type replication: int :param default: whether or not to set the modified policy as default :type default: bool + :param shard_duration: the shard duration of the retention policy. + Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and + mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, + respectively. Infinite retention is not supported. As a workaround, + specify a "1000w" duration to achieve an extremely long shard group + duration. + The minimum shard group duration is 1 hour. + :type shard_duration: str .. note:: at least one of duration, replication, or default flag should be set. Otherwise the operation will fail. """ query_string = ( "ALTER RETENTION POLICY {0} ON {1}" - ).format(quote_ident(name), quote_ident(database or self._database)) + ).format(quote_ident(name), + quote_ident(database or self._database), shard_duration) if duration: query_string += " DURATION {0}".format(duration) + if shard_duration: + query_string += " SHARD DURATION {0}".format(shard_duration) if replication: query_string += " REPLICATION {0}".format(replication) if default is True: diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 859e8bc9..e27eef17 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -649,7 +649,7 @@ def test_create_retention_policy_default(self): self.assertEqual( m.last_request.qs['q'][0], 'create retention policy "somename" on ' - '"db" duration 1d replication 4 default' + '"db" duration 1d replication 4 shard duration 0s default' ) def test_create_retention_policy(self): @@ -669,7 +669,7 @@ def test_create_retention_policy(self): self.assertEqual( m.last_request.qs['q'][0], 'create retention policy "somename" on ' - '"db" duration 1d replication 4' + '"db" duration 1d replication 4 shard duration 0s' ) def test_alter_retention_policy(self): @@ -697,6 +697,14 @@ def test_alter_retention_policy(self): 'alter retention policy "somename" on "db" replication 4' ) + # Test alter shard duration + self.cli.alter_retention_policy('somename', 'db', + shard_duration='1h') + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy "somename" on "db" shard duration 1h' + ) + # Test alter default self.cli.alter_retention_policy('somename', 'db', default=True) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index d2370e63..2f8a2097 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -544,13 +544,57 @@ def test_create_retention_policy(self): rsp ) + self.cli.drop_retention_policy('somename', 'db') + # recreate the RP + self.cli.create_retention_policy('somename', '1w', 1, + shard_duration='1h') + + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [ + {'duration': '0s', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'autogen'}, + {'duration': '168h0m0s', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'1h0m0s', + 'name': 'somename'} + ], + rsp + ) + + self.cli.drop_retention_policy('somename', 'db') + # recreate the RP + self.cli.create_retention_policy('somename', '1w', 1) + + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [ + {'duration': '0s', + 'default': True, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'autogen'}, + {'duration': '168h0m0s', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'24h0m0s', + 'name': 'somename'} + ], + rsp + ) + def test_alter_retention_policy(self): """Test alter a retention policy, not default.""" self.cli.create_retention_policy('somename', '1d', 1) # Test alter duration self.cli.alter_retention_policy('somename', 'db', - duration='4d') + duration='4d', + shard_duration='2h') # NB: altering retention policy doesn't change shard group duration rsp = self.cli.get_list_retention_policies() self.assertEqual( @@ -563,7 +607,7 @@ def test_alter_retention_policy(self): {'duration': '96h0m0s', 'default': False, 'replicaN': 1, - 'shardGroupDuration': u'1h0m0s', + 'shardGroupDuration': u'2h0m0s', 'name': 'somename'} ], rsp @@ -572,6 +616,7 @@ def test_alter_retention_policy(self): # Test alter replication self.cli.alter_retention_policy('somename', 'db', replication=4) + # NB: altering retention policy doesn't change shard group duration rsp = self.cli.get_list_retention_policies() self.assertEqual( @@ -584,7 +629,7 @@ def test_alter_retention_policy(self): {'duration': '96h0m0s', 'default': False, 'replicaN': 4, - 'shardGroupDuration': u'1h0m0s', + 'shardGroupDuration': u'2h0m0s', 'name': 'somename'} ], rsp @@ -605,7 +650,28 @@ def test_alter_retention_policy(self): {'duration': '96h0m0s', 'default': True, 'replicaN': 4, - 'shardGroupDuration': u'1h0m0s', + 'shardGroupDuration': u'2h0m0s', + 'name': 'somename'} + ], + rsp + ) + + # Test alter shard_duration + self.cli.alter_retention_policy('somename', 'db', + shard_duration='4h') + + rsp = self.cli.get_list_retention_policies() + self.assertEqual( + [ + {'duration': '0s', + 'default': False, + 'replicaN': 1, + 'shardGroupDuration': u'168h0m0s', + 'name': 'autogen'}, + {'duration': '96h0m0s', + 'default': True, + 'replicaN': 4, + 'shardGroupDuration': u'4h0m0s', 'name': 'somename'} ], rsp From 1c96ce244869b8d4fdee3b70bd441d707f42cd2c Mon Sep 17 00:00:00 2001 From: aviau Date: Tue, 10 Jul 2018 18:07:52 -0400 Subject: [PATCH 439/536] v5.2.0 --- CHANGELOG.md | 7 +++++++ influxdb/__init__.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9306a7ed..22a5b19d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,13 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added + +### Changed + +### Removed + +## [v5.2.0] - 2018-07-10 +### Added - Finally add a CHANGELOG.md to communicate breaking changes (#598) - Test multiple versions of InfluxDB in travis - Add SHARD DURATION parameter to retention policy create/alter diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 374fddc7..03f74581 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.1.0' +__version__ = '5.2.0' From caa9e7d044f0ac344200d45c385799207c311701 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Sun, 15 Jul 2018 19:31:44 -0400 Subject: [PATCH 440/536] Pass through method kwarg to DataFrameClient query method (#617) Also, add db maintenance tests from InfluxDBClient fixes #616 --- CHANGELOG.md | 1 + influxdb/_dataframe_client.py | 4 +- influxdb/tests/dataframe_client_test.py | 242 ++++++++++++++++++++++++ 3 files changed, 246 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22a5b19d..c156b678 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added ### Changed +- Pass through the "method" kwarg to DataFrameClient queries ### Removed diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 646f298c..06da7ac4 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -148,9 +148,10 @@ def query(self, raise_errors=True, chunked=False, chunk_size=0, + method="GET", dropna=True): """ - Quering data into a DataFrame. + Query data into a DataFrame. :param query: the actual query string :param params: additional parameters for the request, defaults to {} @@ -176,6 +177,7 @@ def query(self, raise_errors=raise_errors, chunked=chunked, database=database, + method=method, chunk_size=chunk_size) results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 78f5437f..72447c89 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -552,6 +552,248 @@ def test_write_points_from_dataframe_fails_with_series(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") + def test_create_database(self): + """Test create database for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli.create_database('new_db') + self.assertEqual( + m.last_request.qs['q'][0], + 'create database "new_db"' + ) + + def test_create_numeric_named_database(self): + """Test create db w/numeric name for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli.create_database('123') + self.assertEqual( + m.last_request.qs['q'][0], + 'create database "123"' + ) + + @raises(Exception) + def test_create_database_fails(self): + """Test create database fail for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + with _mocked_session(cli, 'post', 401): + cli.create_database('new_db') + + def test_drop_database(self): + """Test drop database for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli.drop_database('new_db') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop database "new_db"' + ) + + def test_drop_measurement(self): + """Test drop measurement for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli.drop_measurement('new_measurement') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop measurement "new_measurement"' + ) + + def test_drop_numeric_named_database(self): + """Test drop numeric db for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text='{"results":[{}]}' + ) + cli.drop_database('123') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop database "123"' + ) + + @raises(Exception) + def test_get_list_database_fails(self): + """Test get list of dbs fail for TestInfluxDBClient object.""" + cli = DataFrameClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 401): + cli.get_list_database() + + def test_get_list_measurements(self): + """Test get list of measurements for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + data = { + "results": [{ + "series": [ + {"name": "measurements", + "columns": ["name"], + "values": [["cpu"], ["disk"] + ]}]} + ] + } + + with _mocked_session(cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + cli.get_list_measurements(), + [{'name': 'cpu'}, {'name': 'disk'}] + ) + + def test_create_retention_policy_default(self): + """Test create default ret policy for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + cli.create_retention_policy( + 'somename', '1d', 4, default=True, database='db' + ) + + self.assertEqual( + m.last_request.qs['q'][0], + 'create retention policy "somename" on ' + '"db" duration 1d replication 4 shard duration 0s default' + ) + + def test_create_retention_policy(self): + """Test create retention policy for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + cli.create_retention_policy( + 'somename', '1d', 4, database='db' + ) + + self.assertEqual( + m.last_request.qs['q'][0], + 'create retention policy "somename" on ' + '"db" duration 1d replication 4 shard duration 0s' + ) + + def test_alter_retention_policy(self): + """Test alter retention policy for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + # Test alter duration + cli.alter_retention_policy('somename', 'db', + duration='4d') + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy "somename" on "db" duration 4d' + ) + # Test alter replication + cli.alter_retention_policy('somename', 'db', + replication=4) + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy "somename" on "db" replication 4' + ) + + # Test alter shard duration + cli.alter_retention_policy('somename', 'db', + shard_duration='1h') + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy "somename" on "db" shard duration 1h' + ) + + # Test alter default + cli.alter_retention_policy('somename', 'db', + default=True) + self.assertEqual( + m.last_request.qs['q'][0], + 'alter retention policy "somename" on "db" default' + ) + + @raises(Exception) + def test_alter_retention_policy_invalid(self): + """Test invalid alter ret policy for TestInfluxDBClient object.""" + cli = DataFrameClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 400): + cli.alter_retention_policy('somename', 'db') + + def test_drop_retention_policy(self): + """Test drop retention policy for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + cli.drop_retention_policy('somename', 'db') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop retention policy "somename" on "db"' + ) + + @raises(Exception) + def test_drop_retention_policy_fails(self): + """Test failed drop ret policy for TestInfluxDBClient object.""" + cli = DataFrameClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'delete', 401): + cli.drop_retention_policy('default', 'db') + + def test_get_list_retention_policies(self): + """Test get retention policies for TestInfluxDBClient object.""" + cli = DataFrameClient(database='db') + example_response = \ + '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ + ' "columns": ["name", "duration", "replicaN"]}]}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=example_response + ) + self.assertListEqual( + cli.get_list_retention_policies("db"), + [{'duration': '24h0m0s', + 'name': 'fsfdsdf', 'replicaN': 2}] + ) + def test_query_into_dataframe(self): """Test query into df for TestDataFrameClient object.""" data = { From 7b7a719f4a61304f13f3b13ff20b5f118d6674bb Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 7 Dec 2018 15:56:57 -0500 Subject: [PATCH 441/536] 5.2.1 release --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 03f74581..a1eb3789 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.2.0' +__version__ = '5.2.1' From d37089508794c4bd5c63775357a4967b832e1f31 Mon Sep 17 00:00:00 2001 From: aviau Date: Fri, 7 Dec 2018 15:59:02 -0500 Subject: [PATCH 442/536] release 5.2.1 in changelog --- CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c156b678..14a9abf4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,14 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## [Unreleased] + +### Added + +### Changed + +### Removed + +## [v5.2.1] - 2018-12-07 ### Added ### Changed From 7d924d5faa5afc0f698297709e95b5d8209a7f52 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Tue, 12 Feb 2019 12:48:49 -0300 Subject: [PATCH 443/536] Unpin setuptools to fix travis (#674) * Unpin setuptools to fix travis Signed-off-by: Matthew McGinn * Add some ignores for new flake8 tests Signed-off-by: Matthew McGinn --- .travis.yml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7f3d4a5d..22626f40 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,7 +31,7 @@ matrix: install: - pip install tox-travis - - pip install setuptools==20.6.6 + - pip install setuptools - pip install coveralls - mkdir -p "influxdb_install/${INFLUXDB_VER}" - if [ -n "${INFLUXDB_VER}" ] ; then wget "https://dl.influxdata.com/influxdb/releases/influxdb_${INFLUXDB_VER}_amd64.deb" ; fi diff --git a/tox.ini b/tox.ini index d0d87fec..2f9c212c 100644 --- a/tox.ini +++ b/tox.ini @@ -15,7 +15,7 @@ commands = nosetests -v --with-doctest {posargs} deps = flake8 pep8-naming -commands = flake8 influxdb +commands = flake8 --ignore=W503,W504,W605,N802,F821 influxdb [testenv:pep257] deps = pydocstyle From 1cce011d6c4f5623141b3bcff65197c2db18ddc8 Mon Sep 17 00:00:00 2001 From: Colas Le Guernic Date: Thu, 14 Mar 2019 11:25:58 +0000 Subject: [PATCH 444/536] unpin pypy (#682) cryptography-2.5 is not compatible with PyPy < 5.4 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 22626f40..a1cf7b55 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ python: - "2.7" - "3.5" - "3.6" - - "pypy-5.3.1" + - "pypy" - "pypy3" env: From 02de23fb2c27abc8473d108af2751997634d906a Mon Sep 17 00:00:00 2001 From: xginn8 Date: Thu, 14 Mar 2019 09:11:52 -0400 Subject: [PATCH 445/536] Rename all mixedCase globals to snake case to appease N816 (#689) Signed-off-by: Matthew McGinn --- influxdb/tests/__init__.py | 6 ++-- influxdb/tests/dataframe_client_test.py | 4 +-- .../tests/influxdb08/dataframe_client_test.py | 4 +-- .../server_tests/client_test_with_server.py | 34 +++++++++---------- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py index adf2f20c..f7c5dfb9 100644 --- a/influxdb/tests/__init__.py +++ b/influxdb/tests/__init__.py @@ -12,10 +12,10 @@ import unittest using_pypy = hasattr(sys, "pypy_version_info") -skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") +skip_if_pypy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") _skip_server_tests = os.environ.get( 'INFLUXDB_PYTHON_SKIP_SERVER_TESTS', None) == 'True' -skipServerTests = unittest.skipIf(_skip_server_tests, - "Skipping server tests...") +skip_server_tests = unittest.skipIf(_skip_server_tests, + "Skipping server tests...") diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 72447c89..9fd6427b 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -13,7 +13,7 @@ import warnings import requests_mock -from influxdb.tests import skipIfPYpy, using_pypy +from influxdb.tests import skip_if_pypy, using_pypy from nose.tools import raises from .client_test import _mocked_session @@ -24,7 +24,7 @@ from influxdb import DataFrameClient -@skipIfPYpy +@skip_if_pypy class TestDataFrameClient(unittest.TestCase): """Set up a test DataFrameClient object.""" diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py index 6e6fa2cc..0a766af0 100644 --- a/influxdb/tests/influxdb08/dataframe_client_test.py +++ b/influxdb/tests/influxdb08/dataframe_client_test.py @@ -12,7 +12,7 @@ from nose.tools import raises -from influxdb.tests import skipIfPYpy, using_pypy +from influxdb.tests import skip_if_pypy, using_pypy from .client_test import _mocked_session @@ -22,7 +22,7 @@ from influxdb.influxdb08 import DataFrameClient -@skipIfPYpy +@skip_if_pypy class TestDataFrameClient(unittest.TestCase): """Define the DataFramClient test object.""" diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 2f8a2097..4dbc1b75 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -23,7 +23,7 @@ from influxdb import InfluxDBClient from influxdb.exceptions import InfluxDBClientError -from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests +from influxdb.tests import skip_if_pypy, using_pypy, skip_server_tests from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin @@ -82,7 +82,7 @@ def point(series_name, timestamp=None, tags=None, **fields): ] if not using_pypy: - dummy_pointDF = { + dummy_point_df = { "measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, @@ -90,7 +90,7 @@ def point(series_name, timestamp=None, tags=None, **fields): [[0.64]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) } - dummy_pointsDF = [{ + dummy_points_df = [{ "measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "dataframe": pd.DataFrame( @@ -120,7 +120,7 @@ def point(series_name, timestamp=None, tags=None, **fields): ] -@skipServerTests +@skip_server_tests class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase): """Define the class of simple tests.""" @@ -267,7 +267,7 @@ def test_invalid_port_fails(self): InfluxDBClient('host', '80/redir', 'username', 'password') -@skipServerTests +@skip_server_tests class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase): """Define a class to handle common tests for the server.""" @@ -293,15 +293,15 @@ def test_write_points(self): """Test writing points to the server.""" self.assertIs(True, self.cli.write_points(dummy_point)) - @skipIfPYpy + @skip_if_pypy def test_write_points_DF(self): """Test writing points with dataframe.""" self.assertIs( True, self.cliDF.write_points( - dummy_pointDF['dataframe'], - dummy_pointDF['measurement'], - dummy_pointDF['tags'] + dummy_point_df['dataframe'], + dummy_point_df['measurement'], + dummy_point_df['tags'] ) ) @@ -342,7 +342,7 @@ def test_write_points_check_read_DF(self): rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], - dummy_pointDF['dataframe'] + dummy_point_df['dataframe'] ) # Query with Tags @@ -351,7 +351,7 @@ def test_write_points_check_read_DF(self): assert_frame_equal( rsp[('cpu_load_short', (('host', 'server01'), ('region', 'us-west')))], - dummy_pointDF['dataframe'] + dummy_point_df['dataframe'] ) def test_write_multiple_points_different_series(self): @@ -407,21 +407,21 @@ def test_write_multiple_points_different_series_DF(self): for i in range(2): self.assertIs( True, self.cliDF.write_points( - dummy_pointsDF[i]['dataframe'], - dummy_pointsDF[i]['measurement'], - dummy_pointsDF[i]['tags'])) + dummy_points_df[i]['dataframe'], + dummy_points_df[i]['measurement'], + dummy_points_df[i]['tags'])) time.sleep(1) rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], - dummy_pointsDF[0]['dataframe'] + dummy_points_df[0]['dataframe'] ) rsp = self.cliDF.query('SELECT * FROM memory') assert_frame_equal( rsp['memory'], - dummy_pointsDF[1]['dataframe'] + dummy_points_df[1]['dataframe'] ) def test_write_points_batch(self): @@ -786,7 +786,7 @@ def test_query_multiple_series(self): self.cli.write_points(pts) -@skipServerTests +@skip_server_tests class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): """Define a class to test UDP series.""" From 78dafd7d9d58cbf0f0ab183b0268770251f1b9b6 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Thu, 14 Mar 2019 10:33:50 -0300 Subject: [PATCH 446/536] Fixup small test docstring typo Signed-off-by: Matthew McGinn --- influxdb/tests/dataframe_client_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 9fd6427b..aa055032 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -837,7 +837,7 @@ def test_query_into_dataframe(self): assert_frame_equal(expected[k], result[k]) def test_multiquery_into_dataframe(self): - """Test multiquyer into df for TestDataFrameClient object.""" + """Test multiquery into df for TestDataFrameClient object.""" data = { "results": [ { From f03f4957c70ac5429576281cbe8aaaec06e803fd Mon Sep 17 00:00:00 2001 From: Colas Le Guernic Date: Thu, 14 Mar 2019 14:04:32 +0000 Subject: [PATCH 447/536] Fix tz localize (#684) * fix already tz-aware error * fix tests tz_localize * update CHANGELOG.md --- CHANGELOG.md | 1 + influxdb/_dataframe_client.py | 3 ++- influxdb/tests/dataframe_client_test.py | 15 ++++++++++----- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 14a9abf4..590bd4f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added ### Changed +- Fix 'TypeError: Already tz-aware' introduced with recent versions of Panda (#671, #676, thx @f4bsch @clslgrnc) ### Removed diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 06da7ac4..3b7a39db 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -202,7 +202,8 @@ def _to_dataframe(self, rs, dropna=True): df = pd.DataFrame(data) df.time = pd.to_datetime(df.time) df.set_index('time', inplace=True) - df.index = df.index.tz_localize('UTC') + if df.index.tzinfo is None: + df.index = df.index.tz_localize('UTC') df.index.name = None result[key].append(df) for key, data in result.items(): diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index aa055032..ad910a6d 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -818,13 +818,15 @@ def test_query_into_dataframe(self): pd1 = pd.DataFrame( [[23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) - pd1.index = pd1.index.tz_localize('UTC') + if pd1.index.tzinfo is None: + pd1.index = pd1.index.tz_localize('UTC') pd2 = pd.DataFrame( [[23422], [23422], [23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z"])) - pd2.index = pd2.index.tz_localize('UTC') + if pd2.index.tzinfo is None: + pd2.index = pd2.index.tz_localize('UTC') expected = { ('network', (('direction', ''),)): pd1, ('network', (('direction', 'in'),)): pd2 @@ -871,11 +873,14 @@ def test_multiquery_into_dataframe(self): index=pd.to_datetime([ "2015-01-29 21:55:43.702900257+0000", "2015-01-29 21:55:43.702900257+0000", - "2015-06-11 20:46:02+0000"])).tz_localize('UTC') + "2015-06-11 20:46:02+0000"])) + if pd1.index.tzinfo is None: + pd1.index = pd1.index.tz_localize('UTC') pd2 = pd.DataFrame( [[3]], columns=['count'], - index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))\ - .tz_localize('UTC') + index=pd.to_datetime(["1970-01-01 00:00:00+00:00"])) + if pd2.index.tzinfo is None: + pd2.index = pd2.index.tz_localize('UTC') expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}] cli = DataFrameClient('host', 8086, 'username', 'password', 'db') From 5eda20403ca9ace10d2b5e0f76b28a703c5d5de8 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Thu, 14 Mar 2019 11:19:55 -0300 Subject: [PATCH 448/536] Bump version to 5.2.2 Signed-off-by: Matthew McGinn --- CHANGELOG.md | 7 ++++++- influxdb/__init__.py | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 590bd4f3..035476ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,10 +9,15 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added ### Changed -- Fix 'TypeError: Already tz-aware' introduced with recent versions of Panda (#671, #676, thx @f4bsch @clslgrnc) ### Removed +## [v5.2.2] - 2019-03-14 +### Added + +### Changed +- Fix 'TypeError: Already tz-aware' introduced with recent versions of Panda (#671, #676, thx @f4bsch @clslgrnc) + ## [v5.2.1] - 2018-12-07 ### Added diff --git a/influxdb/__init__.py b/influxdb/__init__.py index a1eb3789..288880b1 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.2.1' +__version__ = '5.2.2' From bf95e0f2969fc9a07042b7419a18fc2a2c698085 Mon Sep 17 00:00:00 2001 From: Colas Le Guernic Date: Fri, 15 Mar 2019 17:13:43 +0000 Subject: [PATCH 449/536] [WIP] add py37 and recent influxdb (#692) * add py37 and recent influxdb * remove useless py34 dep * use py36 for pydocstyle (py27 soon deprecated) * ugly fix to numpy inconsistencies * py37 is not in ubuntu 14.04 * move import numpy and add noqa * get 3.7 into travis matrix * get 3.7 into travis matrix --- .travis.yml | 36 +++++++++++++++++++++---- influxdb/tests/dataframe_client_test.py | 18 ++++++++++--- tox.ini | 30 ++++++++++++++------- 3 files changed, 65 insertions(+), 19 deletions(-) diff --git a/.travis.yml b/.travis.yml index a1cf7b55..8c660b67 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,10 +8,12 @@ python: - "pypy3" env: - - INFLUXDB_VER=1.2.4 - - INFLUXDB_VER=1.3.9 - - INFLUXDB_VER=1.4.2 - - INFLUXDB_VER=1.5.4 + - INFLUXDB_VER=1.2.4 # 2017-05-08 + - INFLUXDB_VER=1.3.9 # 2018-01-19 + - INFLUXDB_VER=1.4.3 # 2018-01-30 + - INFLUXDB_VER=1.5.4 # 2018-06-22 + - INFLUXDB_VER=1.6.4 # 2018-10-24 + - INFLUXDB_VER=1.7.4 # 2019-02-14 addons: apt: @@ -20,7 +22,31 @@ addons: matrix: include: - - python: 2.7 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.2.4 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.3.9 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.4.3 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.5.4 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.6.4 + - python: 3.7 + dist: xenial + sudo: true + env: INFLUXDB_VER=1.7.4 + - python: 3.6 env: TOX_ENV=pep257 - python: 3.6 env: TOX_ENV=docs diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index ad910a6d..1de3a501 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -22,6 +22,7 @@ import pandas as pd from pandas.util.testing import assert_frame_equal from influxdb import DataFrameClient + import numpy @skip_if_pypy @@ -396,10 +397,16 @@ def test_write_points_from_dataframe_with_numeric_precision(self): ["2", 2, 2.2222222222222]], index=[now, now + timedelta(hours=1)]) - expected_default_precision = ( - b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' - b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' - ) + if tuple(map(int, numpy.version.version.split('.'))) <= (1, 13, 3): + expected_default_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' + ) + else: + expected_default_precision = ( + b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n' + b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n' # noqa E501 line too long + ) expected_specified_precision = ( b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n' @@ -419,6 +426,9 @@ def test_write_points_from_dataframe_with_numeric_precision(self): cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo", {"hello": "there"}) + print(expected_default_precision) + print(m.last_request.body) + self.assertEqual(m.last_request.body, expected_default_precision) cli = DataFrameClient(database='db') diff --git a/tox.ini b/tox.ini index 2f9c212c..4a1921e2 100644 --- a/tox.ini +++ b/tox.ini @@ -1,21 +1,28 @@ [tox] -envlist = py27, py35, py36, pypy, pypy3, flake8, pep257, coverage, docs +envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - py27,py34,py35,py36: pandas==0.20.1 - py27,py34,py35,py36: numpy==1.13.3 + py27: pandas==0.21.1 + py27: numpy==1.13.3 + py35: pandas==0.22.0 + py35: numpy==1.14.6 + py36: pandas==0.23.4 + py36: numpy==1.15.4 + py37: pandas==0.24.2 + py37: numpy==1.16.2 # Only install pandas with non-pypy interpreters +# Testing all combinations would be too expensive commands = nosetests -v --with-doctest {posargs} [testenv:flake8] deps = flake8 pep8-naming -commands = flake8 --ignore=W503,W504,W605,N802,F821 influxdb +commands = flake8 influxdb [testenv:pep257] deps = pydocstyle @@ -26,19 +33,22 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt pandas coverage - numpy==1.13.3 + numpy commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb [testenv:docs] deps = -r{toxinidir}/requirements.txt - pandas==0.20.1 - numpy==1.13.3 - Sphinx==1.5.5 + pandas==0.24.2 + numpy==1.16.2 + Sphinx==1.8.5 sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build [flake8] -ignore = N802,F821,E402 -# E402: module level import not at top of file +ignore = W503,W504,W605,N802,F821,E402 +# W503: Line break occurred before a binary operator +# W504: Line break occurred after a binary operator +# W605: invalid escape sequence # N802: nosetests's setUp function # F821: False positive in intluxdb/dataframe_client.py +# E402: module level import not at top of file From 05a101d5e4b4554899a2a8830d50ddcf07fba747 Mon Sep 17 00:00:00 2001 From: Colas Le Guernic Date: Sat, 16 Mar 2019 17:47:41 +0000 Subject: [PATCH 450/536] Python and influxdb supported versions (#693) * numpy might use non-numerical version * update * update CHANGELOG.md --- CHANGELOG.md | 1 + README.rst | 4 ++-- influxdb/tests/dataframe_client_test.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 035476ab..d18d5bc4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added ### Changed +- Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) ### Removed diff --git a/README.rst b/README.rst index d4f9611c..026171b2 100644 --- a/README.rst +++ b/README.rst @@ -39,7 +39,7 @@ InfluxDB is an open-source distributed time series database, find more about Inf InfluxDB pre v1.1.0 users ------------------------- -This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.2, and v1.5.4. +This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.3, v1.5.4, v1.6.4, and 1.7.4. Those users still on InfluxDB v0.8.x users may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. @@ -59,7 +59,7 @@ On Debian/Ubuntu, you can install it with this command:: Dependencies ------------ -The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 3.6, PyPy and PyPy3. +The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 3.6, 3.7, PyPy and PyPy3. **Note:** Python <3.5 are currently untested. See ``.travis.yml``. diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 1de3a501..f861cf2e 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -397,7 +397,7 @@ def test_write_points_from_dataframe_with_numeric_precision(self): ["2", 2, 2.2222222222222]], index=[now, now + timedelta(hours=1)]) - if tuple(map(int, numpy.version.version.split('.'))) <= (1, 13, 3): + if numpy.lib.NumpyVersion(numpy.__version__) <= '1.13.3': expected_default_precision = ( b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' From 02865ae6ca184102559d8d6579e9a86f0498e363 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Tue, 19 Mar 2019 23:27:47 -0500 Subject: [PATCH 451/536] Add CODEOWNERS file for automatic reviewers on GitHub Signed-off-by: Matthew McGinn --- CODEOWNERS | 1 + 1 file changed, 1 insertion(+) create mode 100644 CODEOWNERS diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..0acbd7c8 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @aviau @xginn8 @sebito91 From afcfd25b21523d84a7d1088eff2abb4d08de7647 Mon Sep 17 00:00:00 2001 From: Colas Le Guernic Date: Wed, 20 Mar 2019 04:30:26 +0000 Subject: [PATCH 452/536] Parameter binding for client's `query()` method (#678) * add bind_params to query * tutorial for bind_params --- CHANGELOG.md | 1 + examples/tutorial.py | 9 ++++++++- influxdb/_dataframe_client.py | 12 ++++++++++++ influxdb/client.py | 18 ++++++++++++++++++ influxdb/tests/dataframe_client_test.py | 7 ++++--- .../server_tests/client_test_with_server.py | 4 +++- 6 files changed, 46 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d18d5bc4..a5bc07fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added +- query() now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) ### Changed - Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) diff --git a/examples/tutorial.py b/examples/tutorial.py index 4083bfc5..12cd49c1 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -13,7 +13,9 @@ def main(host='localhost', port=8086): dbname = 'example' dbuser = 'smly' dbuser_password = 'my_secret_password' - query = 'select value from cpu_load_short;' + query = 'select Float_value from cpu_load_short;' + query_where = 'select Int_value from cpu_load_short where host=$host;' + bind_params = {'host': 'server01'} json_body = [ { "measurement": "cpu_load_short", @@ -50,6 +52,11 @@ def main(host='localhost', port=8086): print("Result: {0}".format(result)) + print("Querying data: " + query_where) + result = client.query(query_where, bind_params=bind_params) + + print("Result: {0}".format(result)) + print("Switch user: " + user) client.switch_user(user, password) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 3b7a39db..1ce6e947 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -142,6 +142,7 @@ def write_points(self, def query(self, query, params=None, + bind_params=None, epoch=None, expected_response_code=200, database=None, @@ -153,8 +154,18 @@ def query(self, """ Query data into a DataFrame. + .. danger:: + In order to avoid injection vulnerabilities (similar to `SQL + injection `_ + vulnerabilities), do not directly include untrusted data into the + ``query`` parameter, use ``bind_params`` instead. + :param query: the actual query string :param params: additional parameters for the request, defaults to {} + :param bind_params: bind parameters for the query: + any variable in the query written as ``'$var_name'`` will be + replaced with ``bind_params['var_name']``. Only works in the + ``WHERE`` clause and takes precedence over ``params['params']`` :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision @@ -172,6 +183,7 @@ def query(self, :rtype: :class:`~.ResultSet` """ query_args = dict(params=params, + bind_params=bind_params, epoch=epoch, expected_response_code=expected_response_code, raise_errors=raise_errors, diff --git a/influxdb/client.py b/influxdb/client.py index 8f8b14ae..e94ae25d 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -345,6 +345,7 @@ def _read_chunked_response(response, raise_errors=True): def query(self, query, params=None, + bind_params=None, epoch=None, expected_response_code=200, database=None, @@ -354,6 +355,12 @@ def query(self, method="GET"): """Send a query to InfluxDB. + .. danger:: + In order to avoid injection vulnerabilities (similar to `SQL + injection `_ + vulnerabilities), do not directly include untrusted data into the + ``query`` parameter, use ``bind_params`` instead. + :param query: the actual query string :type query: str @@ -361,6 +368,12 @@ def query(self, defaults to {} :type params: dict + :param bind_params: bind parameters for the query: + any variable in the query written as ``'$var_name'`` will be + replaced with ``bind_params['var_name']``. Only works in the + ``WHERE`` clause and takes precedence over ``params['params']`` + :type bind_params: dict + :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision @@ -394,6 +407,11 @@ def query(self, if params is None: params = {} + if bind_params is not None: + params_dict = json.loads(params.get('params', '{}')) + params_dict.update(bind_params) + params['params'] = json.dumps(params_dict) + params['q'] = query params['db'] = database or self._database diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index f861cf2e..cb380ac5 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -894,10 +894,11 @@ def test_multiquery_into_dataframe(self): expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}] cli = DataFrameClient('host', 8086, 'username', 'password', 'db') - iql = "SELECT value FROM cpu_load_short WHERE region='us-west';"\ - "SELECT count(value) FROM cpu_load_short WHERE region='us-west'" + iql = "SELECT value FROM cpu_load_short WHERE region=$region;"\ + "SELECT count(value) FROM cpu_load_short WHERE region=$region" + bind_params = {'region': 'us-west'} with _mocked_session(cli, 'GET', 200, data): - result = cli.query(iql) + result = cli.query(iql, bind_params=bind_params) for r, e in zip(result, expected): for k in e: assert_frame_equal(e[k], r[k]) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 4dbc1b75..121d2c82 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -440,7 +440,9 @@ def test_write_points_batch(self): batch_size=2) time.sleep(5) net_in = self.cli.query("SELECT value FROM network " - "WHERE direction='in'").raw + "WHERE direction=$dir", + bind_params={'dir': 'in'} + ).raw net_out = self.cli.query("SELECT value FROM network " "WHERE direction='out'").raw cpu = self.cli.query("SELECT value FROM cpu_usage").raw From 73503b5396ed3e9dbc115af31edb0112622b160a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Dudek?= <45991310+lukaszdudek-silvair@users.noreply.github.com> Date: Mon, 1 Apr 2019 19:05:02 +0200 Subject: [PATCH 453/536] Add CQs management methods to the client (#681) * Add CQs management methods to the client --- CHANGELOG.md | 2 + influxdb/client.py | 92 +++++++++++++++ influxdb/tests/client_test.py | 108 ++++++++++++++++++ .../server_tests/client_test_with_server.py | 30 +++++ 4 files changed, 232 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a5bc07fb..9834a5ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added +- Add `get_list_continuous_queries`, `drop_continuous_query`, and `create_continuous_query` management methods for + continuous queries (#681 thx @lukaszdudek-silvair) - query() now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) ### Changed diff --git a/influxdb/client.py b/influxdb/client.py index e94ae25d..d365643d 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -926,6 +926,98 @@ def get_list_privileges(self, username): text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points()) + def get_list_continuous_queries(self): + """Get the list of continuous queries in InfluxDB. + + :return: all CQs in InfluxDB + :rtype: list of dictionaries + + :Example: + + :: + + >> cqs = client.get_list_cqs() + >> cqs + [ + { + u'db1': [] + }, + { + u'db2': [ + { + u'name': u'vampire', + u'query': u'CREATE CONTINUOUS QUERY vampire ON ' + 'mydb BEGIN SELECT count(dracula) INTO ' + 'mydb.autogen.all_of_them FROM ' + 'mydb.autogen.one GROUP BY time(5m) END' + } + ] + } + ] + """ + query_string = "SHOW CONTINUOUS QUERIES" + return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()] + + def create_continuous_query(self, name, select, database=None, + resample_opts=None): + r"""Create a continuous query for a database. + + :param name: the name of continuous query to create + :type name: str + :param select: select statement for the continuous query + :type select: str + :param database: the database for which the continuous query is + created. Defaults to current client's database + :type database: str + :param resample_opts: resample options + :type resample_opts: str + + :Example: + + :: + + >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \ + ... 'FROM "cpu" GROUP BY time(1m)' + >> client.create_continuous_query( + ... 'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m' + ... ) + >> client.get_list_continuous_queries() + [ + { + 'db_name': [ + { + 'name': 'cpu_mean', + 'query': 'CREATE CONTINUOUS QUERY "cpu_mean" ' + 'ON "db_name" ' + 'RESAMPLE EVERY 10s FOR 2m ' + 'BEGIN SELECT mean("value") ' + 'INTO "cpu_mean" FROM "cpu" ' + 'GROUP BY time(1m) END' + } + ] + } + ] + """ + query_string = ( + "CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END" + ).format(quote_ident(name), quote_ident(database or self._database), + ' RESAMPLE ' + resample_opts if resample_opts else '', select) + self.query(query_string) + + def drop_continuous_query(self, name, database=None): + """Drop an existing continuous query for a database. + + :param name: the name of continuous query to drop + :type name: str + :param database: the database for which the continuous query is + dropped. Defaults to current client's database + :type database: str + """ + query_string = ( + "DROP CONTINUOUS QUERY {0} ON {1}" + ).format(quote_ident(name), quote_ident(database or self._database)) + self.query(query_string) + def send_packet(self, packet, protocol='json', time_precision=None): """Send an UDP packet. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e27eef17..e1a30b81 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1027,6 +1027,114 @@ def test_get_list_privileges_fails(self): with _mocked_session(cli, 'get', 401): cli.get_list_privileges('test') + def test_get_list_continuous_queries(self): + """Test getting a list of continuous queries.""" + data = { + "results": [ + { + "statement_id": 0, + "series": [ + { + "name": "testdb01", + "columns": ["name", "query"], + "values": [["testname01", "testquery01"], + ["testname02", "testquery02"]] + }, + { + "name": "testdb02", + "columns": ["name", "query"], + "values": [["testname03", "testquery03"]] + }, + { + "name": "testdb03", + "columns": ["name", "query"] + } + ] + } + ] + } + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_continuous_queries(), + [ + { + 'testdb01': [ + {'name': 'testname01', 'query': 'testquery01'}, + {'name': 'testname02', 'query': 'testquery02'} + ] + }, + { + 'testdb02': [ + {'name': 'testname03', 'query': 'testquery03'} + ] + }, + { + 'testdb03': [] + } + ] + ) + + @raises(Exception) + def test_get_list_continuous_queries_fails(self): + """Test failing to get a list of continuous queries.""" + with _mocked_session(self.cli, 'get', 400): + self.cli.get_list_continuous_queries() + + def test_create_continuous_query(self): + """Test continuous query creation.""" + data = {"results": [{}]} + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=json.dumps(data) + ) + query = 'SELECT count("value") INTO "6_months"."events" FROM ' \ + '"events" GROUP BY time(10m)' + self.cli.create_continuous_query('cq_name', query, 'db_name') + self.assertEqual( + m.last_request.qs['q'][0], + 'create continuous query "cq_name" on "db_name" begin select ' + 'count("value") into "6_months"."events" from "events" group ' + 'by time(10m) end' + ) + self.cli.create_continuous_query('cq_name', query, 'db_name', + 'EVERY 10s FOR 2m') + self.assertEqual( + m.last_request.qs['q'][0], + 'create continuous query "cq_name" on "db_name" resample ' + 'every 10s for 2m begin select count("value") into ' + '"6_months"."events" from "events" group by time(10m) end' + ) + + @raises(Exception) + def test_create_continuous_query_fails(self): + """Test failing to create a continuous query.""" + with _mocked_session(self.cli, 'get', 400): + self.cli.create_continuous_query('cq_name', 'select', 'db_name') + + def test_drop_continuous_query(self): + """Test dropping a continuous query.""" + data = {"results": [{}]} + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + text=json.dumps(data) + ) + self.cli.drop_continuous_query('cq_name', 'db_name') + self.assertEqual( + m.last_request.qs['q'][0], + 'drop continuous query "cq_name" on "db_name"' + ) + + @raises(Exception) + def test_drop_continuous_query_fails(self): + """Test failing to drop a continuous query.""" + with _mocked_session(self.cli, 'get', 400): + self.cli.drop_continuous_query('cq_name', 'db_name') + def test_invalid_port_fails(self): """Test invalid port fail for TestInfluxDBClient object.""" with self.assertRaises(ValueError): diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 121d2c82..fda3f720 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -722,6 +722,36 @@ def test_drop_retention_policy(self): rsp ) + def test_create_continuous_query(self): + """Test continuous query creation.""" + self.cli.create_retention_policy('some_rp', '1d', 1) + query = 'select count("value") into "some_rp"."events" from ' \ + '"events" group by time(10m)' + self.cli.create_continuous_query('test_cq', query, 'db') + cqs = self.cli.get_list_continuous_queries() + expected_cqs = [ + { + 'db': [ + { + 'name': 'test_cq', + 'query': 'CREATE CONTINUOUS QUERY test_cq ON db ' + 'BEGIN SELECT count(value) INTO ' + 'db.some_rp.events FROM db.autogen.events ' + 'GROUP BY time(10m) END' + } + ] + } + ] + self.assertEqual(cqs, expected_cqs) + + def test_drop_continuous_query(self): + """Test continuous query drop.""" + self.test_create_continuous_query() + self.cli.drop_continuous_query('test_cq', 'db') + cqs = self.cli.get_list_continuous_queries() + expected_cqs = [{'db': []}] + self.assertEqual(cqs, expected_cqs) + def test_issue_143(self): """Test for PR#143 from repo.""" pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z') From e24b42c36269725946164dc98f2802d96df9f708 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A7=8B=E8=91=89?= Date: Sun, 7 Apr 2019 09:32:09 -0500 Subject: [PATCH 454/536] Fix a warning under Python 3.7 (#697) * Fix a warning under Python 3.7 Signed-off-by: Matthew McGinn --- CHANGELOG.md | 1 + setup.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9834a5ef..33302f16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed - Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) +- Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi) ### Removed diff --git a/setup.py b/setup.py index cd6e4e9b..d44875f6 100755 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ tests_require=test_requires, install_requires=requires, extras_require={'test': test_requires}, - classifiers=( + classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', @@ -55,5 +55,5 @@ 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', - ), + ], ) From 47aeb9bf48f7556b570b40279ca169980c141d1f Mon Sep 17 00:00:00 2001 From: xginn8 Date: Sun, 7 Apr 2019 20:09:36 -0500 Subject: [PATCH 455/536] Update delete_series docstring to differentiate between drop_database (#699) Closes #666 Signed-off-by: Matthew McGinn --- CHANGELOG.md | 1 + influxdb/client.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 33302f16..a7630a9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed - Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) - Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi) +- Update documentation for empty `delete_series` confusion ### Removed diff --git a/influxdb/client.py b/influxdb/client.py index d365643d..dc77bfc1 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -827,7 +827,9 @@ def set_user_password(self, username, password): def delete_series(self, database=None, measurement=None, tags=None): """Delete series from a database. - Series can be filtered by measurement and tags. + Series must be filtered by either measurement and tags. + This method cannot be used to delete all series, use + `drop_database` instead. :param database: the database from which the series should be deleted, defaults to client's current database From dc54e5eb716d0356a10138ee2f9b7b2ca83a3796 Mon Sep 17 00:00:00 2001 From: Ron Rothman Date: Mon, 8 Apr 2019 13:13:13 -0400 Subject: [PATCH 456/536] add consistency parameter to write_points (#664) * add consistency parameter to write_points [https://github.com/influxdata/influxdb-python/issues/643] --- CHANGELOG.md | 1 + influxdb/client.py | 21 +++++++++++++++++---- influxdb/tests/client_test.py | 26 ++++++++++++++++++++++++++ 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a7630a9d..7f1503b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - query() now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) ### Changed +- Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman) - Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) - Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi) - Update documentation for empty `delete_series` confusion diff --git a/influxdb/client.py b/influxdb/client.py index dc77bfc1..8ac557d3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -458,7 +458,8 @@ def write_points(self, retention_policy=None, tags=None, batch_size=None, - protocol='json' + protocol='json', + consistency=None ): """Write to multiple time series names. @@ -486,6 +487,9 @@ def write_points(self, :type batch_size: int :param protocol: Protocol for writing data. Either 'line' or 'json'. :type protocol: str + :param consistency: Consistency for the points. + One of {'any','one','quorum','all'}. + :type consistency: str :returns: True, if the operation is successful :rtype: bool @@ -498,14 +502,16 @@ def write_points(self, time_precision=time_precision, database=database, retention_policy=retention_policy, - tags=tags, protocol=protocol) + tags=tags, protocol=protocol, + consistency=consistency) return True return self._write_points(points=points, time_precision=time_precision, database=database, retention_policy=retention_policy, - tags=tags, protocol=protocol) + tags=tags, protocol=protocol, + consistency=consistency) def ping(self): """Check connectivity to InfluxDB. @@ -531,12 +537,16 @@ def _write_points(self, database, retention_policy, tags, - protocol='json'): + protocol='json', + consistency=None): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise ValueError( "Invalid time precision is given. " "(use 'n', 'u', 'ms', 's', 'm' or 'h')") + if consistency not in ['any', 'one', 'quorum', 'all', None]: + raise ValueError('Invalid consistency: {}'.format(consistency)) + if protocol == 'json': data = { 'points': points @@ -551,6 +561,9 @@ def _write_points(self, 'db': database or self._database } + if consistency is not None: + params['consistency'] = consistency + if time_precision is not None: params['precision'] = time_precision diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e1a30b81..e4cc7e11 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -337,6 +337,23 @@ def test_write_points_with_precision(self): m.last_request.body, ) + def test_write_points_with_consistency(self): + """Test write points with consistency for TestInfluxDBClient object.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + 'http://localhost:8086/write', + status_code=204 + ) + + cli = InfluxDBClient(database='db') + + cli.write_points(self.dummy_points, consistency='any') + self.assertEqual( + m.last_request.qs, + {'db': ['db'], 'consistency': ['any']} + ) + def test_write_points_with_precision_udp(self): """Test write points with precision for TestInfluxDBClient object.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -409,6 +426,15 @@ def test_write_points_bad_precision(self): time_precision='g' ) + def test_write_points_bad_consistency(self): + """Test write points w/bad consistency value.""" + cli = InfluxDBClient() + with self.assertRaises(ValueError): + cli.write_points( + self.dummy_points, + consistency='boo' + ) + @raises(Exception) def test_write_points_with_precision_fails(self): """Test write points w/precision fail for TestInfluxDBClient object.""" From d5d12499f3755199d5eedd8b363450f1cf4073bd Mon Sep 17 00:00:00 2001 From: Shan Desai Date: Wed, 10 Apr 2019 15:30:45 +0200 Subject: [PATCH 457/536] Add Example for sending information to DB via UDP (#648) Due to lack of documentation for UDP, this example provides basic usage of sending information points via UDP. The code structure followed is similar, if not same as other examples in the `examples` directory. Signed-off-by: Shantanoo --- docs/source/examples.rst | 6 ++++ examples/tutorial_udp.py | 66 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+) create mode 100644 examples/tutorial_udp.py diff --git a/docs/source/examples.rst b/docs/source/examples.rst index 2c85fbda..fdda62a9 100644 --- a/docs/source/examples.rst +++ b/docs/source/examples.rst @@ -25,3 +25,9 @@ Tutorials - SeriesHelper .. literalinclude:: ../../examples/tutorial_serieshelper.py :language: python + +Tutorials - UDP +=============== + +.. literalinclude:: ../../examples/tutorial_udp.py + :language: python diff --git a/examples/tutorial_udp.py b/examples/tutorial_udp.py new file mode 100644 index 00000000..517ae858 --- /dev/null +++ b/examples/tutorial_udp.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +"""Example for sending batch information to InfluxDB via UDP.""" + +""" +INFO: In order to use UDP, one should enable the UDP service from the +`influxdb.conf` under section + [[udp]] + enabled = true + bind-address = ":8089" # port number for sending data via UDP + database = "udp1" # name of database to be stored + [[udp]] + enabled = true + bind-address = ":8090" + database = "udp2" +""" + + +import argparse + +from influxdb import InfluxDBClient + + +def main(uport): + """Instantiate connection to the InfluxDB.""" + # NOTE: structure of the UDP packet is different than that of information + # sent via HTTP + json_body = { + "tags": { + "host": "server01", + "region": "us-west" + }, + "time": "2009-11-10T23:00:00Z", + "points": [{ + "measurement": "cpu_load_short", + "fields": { + "value": 0.64 + } + }, + { + "measurement": "cpu_load_short", + "fields": { + "value": 0.67 + } + }] + } + + # make `use_udp` True and add `udp_port` number from `influxdb.conf` file + # no need to mention the database name since it is already configured + client = InfluxDBClient(use_udp=True, udp_port=uport) + + # Instead of `write_points` use `send_packet` + client.send_packet(json_body) + + +def parse_args(): + """Parse the args.""" + parser = argparse.ArgumentParser( + description='example code to play with InfluxDB along with UDP Port') + parser.add_argument('--uport', type=int, required=True, + help=' UDP port of InfluxDB') + return parser.parse_args() + + +if __name__ == '__main__': + args = parse_args() + main(uport=args.uport) From 08e02996a17c531a87e02cf6c5e693c4038000ed Mon Sep 17 00:00:00 2001 From: Debanjan Date: Fri, 12 Jul 2019 00:33:24 +0200 Subject: [PATCH 458/536] Line protocol leading comma (#694) * [fix] typo in comment + [fix] handles leading comma for the case that the first value column is Null valued * [refactor] consolidated similar logic to a new function * [fix] covering scenario where is a string * Revert "[fix] covering scenario where is a string" This reverts commit 49af5abadda346929f728ff61625e38062adc434. * Revert "[refactor] consolidated similar logic to a new function" This reverts commit 366e7714668cc1fd28b5a5f689e93661f1b9da5f. * [tests][feature] added tests to check if first none value results in invalid line protocol * [fix] deleted debug lines * [fix] overspecified date_range args * [fix] overspecified date_range args * [fix] removed endpoint in date-range * [fix] reordered columns in test target * [fix] [test] freeze order of columns * [refactor] [test] used loc instead of dict-like invocation of columns * [fix] [test] [lint] cleared up complainsts from flake8 and pep257 --- influxdb/_dataframe_client.py | 4 +- influxdb/tests/dataframe_client_test.py | 65 +++++++++++++++++++++++++ 2 files changed, 68 insertions(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 1ce6e947..d16e29ca 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -363,7 +363,7 @@ def _convert_dataframe_to_lines(self, tag_df = self._stringify_dataframe( tag_df, numeric_precision, datatype='tag') - # join preprendded tags, leaving None values out + # join prepended tags, leaving None values out tags = tag_df.apply( lambda s: [',' + s.name + '=' + v if v else '' for v in s]) tags = tags.sum(axis=1) @@ -392,6 +392,8 @@ def _convert_dataframe_to_lines(self, field_df.columns[1:]] field_df = field_df.where(~mask_null, '') # drop Null entries fields = field_df.sum(axis=1) + # take out leading , where first column has a Null value + fields = fields.str.lstrip(",") del field_df # Generate line protocol string diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index cb380ac5..90312ed8 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -389,6 +389,71 @@ def test_write_points_from_dataframe_with_numeric_column_names(self): self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_with_leading_none_column(self): + """write_points detect erroneous leading comma for null first field.""" + dataframe = pd.DataFrame( + dict( + first=[1, None, None, 8, 9], + second=[2, None, None, None, 10], + third=[3, 4.1, None, None, 11], + first_tag=["one", None, None, "eight", None], + second_tag=["two", None, None, None, None], + third_tag=["three", "four", None, None, None], + comment=[ + "All columns filled", + "First two of three empty", + "All empty", + "Last two of three empty", + "Empty tags with values", + ] + ), + index=pd.date_range( + start=pd.to_datetime('2018-01-01'), + freq='1D', + periods=5, + ) + ) + expected = ( + b'foo,first_tag=one,second_tag=two,third_tag=three' + b' comment="All columns filled",first=1.0,second=2.0,third=3.0' + b' 1514764800000000000\n' + b'foo,third_tag=four' + b' comment="First two of three empty",third=4.1' + b' 1514851200000000000\n' + b'foo comment="All empty" 1514937600000000000\n' + b'foo,first_tag=eight' + b' comment="Last two of three empty",first=8.0' + b' 1515024000000000000\n' + b'foo' + b' comment="Empty tags with values",first=9.0,second=10.0' + b',third=11.0' + b' 1515110400000000000\n' + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + colnames = [ + "first_tag", + "second_tag", + "third_tag", + "comment", + "first", + "second", + "third" + ] + cli.write_points(dataframe.loc[:, colnames], 'foo', + tag_columns=[ + "first_tag", + "second_tag", + "third_tag"]) + + self.assertEqual(m.last_request.body, expected) + def test_write_points_from_dataframe_with_numeric_precision(self): """Test write points from df with numeric precision.""" now = pd.Timestamp('1970-01-01 00:00+00:00') From 47d24c7489ecef56b742b57ef8616f5eae0800af Mon Sep 17 00:00:00 2001 From: Lloyd Wallis Date: Thu, 11 Jul 2019 23:34:52 +0100 Subject: [PATCH 459/536] Mutual TLS authentication (#702) * Add support for providing a client certificate for mutual TLS authentication. * Be more explicit in documentation on valid values for the parameter --- influxdb/client.py | 16 ++++++++++++++++ influxdb/tests/client_test.py | 8 ++++++++ 2 files changed, 24 insertions(+) diff --git a/influxdb/client.py b/influxdb/client.py index 8ac557d3..ad4c6b66 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -61,6 +61,13 @@ class InfluxDBClient(object): :type proxies: dict :param path: path of InfluxDB on the server to connect, defaults to '' :type path: str + :param cert: Path to client certificate information to use for mutual TLS + authentication. You can specify a local cert to use + as a single file containing the private key and the certificate, or as + a tuple of both files’ paths, defaults to None + :type cert: str + + :raises ValueError: if cert is provided but ssl is disabled (set to False) """ def __init__(self, @@ -78,6 +85,7 @@ def __init__(self, proxies=None, pool_size=10, path='', + cert=None, ): """Construct a new InfluxDBClient object.""" self.__host = host @@ -120,6 +128,14 @@ def __init__(self, else: self._proxies = proxies + if cert: + if not ssl: + raise ValueError( + "Client certificate provided but ssl is disabled." + ) + else: + self._session.cert = cert + self.__baseurl = "{0}://{1}:{2}{3}".format( self._scheme, self._host, diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e4cc7e11..b741cf7a 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -149,6 +149,14 @@ def test_dsn(self): **{'ssl': False}) self.assertEqual('http://my.host.fr:1886', cli._baseurl) + def test_cert(self): + """Test mutual TLS authentication for TestInfluxDBClient object.""" + cli = InfluxDBClient(ssl=True, cert='/etc/pki/tls/private/dummy.crt') + self.assertEqual(cli._session.cert, '/etc/pki/tls/private/dummy.crt') + + with self.assertRaises(ValueError): + cli = InfluxDBClient(cert='/etc/pki/tls/private/dummy.crt') + def test_switch_database(self): """Test switch database in TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') From dc83fc6576b6463dcc77a0c101475a2a71ed655a Mon Sep 17 00:00:00 2001 From: Song Yihan Date: Fri, 12 Jul 2019 06:36:05 +0800 Subject: [PATCH 460/536] Fix newline in tag value cause partial commit (#716) When a tag value contains newline(\n), the request sent to db would be splitted into two parts and the first part would fail to write to db but the second woudl be succeed. The reason is that before sending we do serialization (make_lines) the _escape_tag method in line_protocol.py won't handle it well, we need somehow more specific on newline instead of only handling escape character (\) --- influxdb/line_protocol.py | 2 ++ influxdb/tests/test_line_protocol.py | 21 +++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index e8816fc0..249511d3 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -54,6 +54,8 @@ def _escape_tag(tag): ",", "\\," ).replace( "=", "\\=" + ).replace( + "\n", "\\n" ) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index a3d84793..bccd7727 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -115,6 +115,27 @@ def test_make_lines_unicode(self): 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n' ) + def test_tag_value_newline(self): + """Test make lines with tag value contains newline.""" + data = { + "tags": { + "t1": "line1\nline2" + }, + "points": [ + { + "measurement": "test", + "fields": { + "val": "hello" + } + } + ] + } + + self.assertEqual( + line_protocol.make_lines(data), + 'test,t1=line1\\nline2 val="hello"\n' + ) + def test_quote_ident(self): """Test quote indentation in TestLineProtocol object.""" self.assertEqual( From 9903974e936766edb8a4bf5434aa174f790ef0ec Mon Sep 17 00:00:00 2001 From: Nathan Epstein Date: Thu, 8 Aug 2019 00:08:59 -0400 Subject: [PATCH 461/536] this tutorial did not write to the database with the protocol set to json. It worked when changed to line (#737) --- examples/tutorial_pandas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index 67a5457d..13e72f8c 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -12,7 +12,7 @@ def main(host='localhost', port=8086): user = 'root' password = 'root' dbname = 'demo' - protocol = 'json' + protocol = 'line' client = DataFrameClient(host, port, user, password, dbname) From b9504e05db83b8cd159da6e02deab2faac8fbc59 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 7 Aug 2019 23:15:47 -0500 Subject: [PATCH 462/536] chore(CHANGELOG): update with latest merged PR #797 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f1503b5..59eb55c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) - Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi) - Update documentation for empty `delete_series` confusion +- Update tests/tutorials_pandas.py to reference `line` protocol, bug in `json` (#797 thx @Aeium) ### Removed From 5de0162c17d3a3137ec97e277420485b392f0634 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Mon, 19 Aug 2019 08:16:12 -0500 Subject: [PATCH 463/536] chore(CHANGELOG): update for v5.2.3 release --- CHANGELOG.md | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59eb55c7..025a1d71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,16 +7,30 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added + +### Changed + +### Removed + +## [v5.2.3] - 2019-08-19 + +### Added +- Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman) +- Add UDP example (#648 thx @shantanoo-desai) +- Add consistency paramter to `write_points` (#664 tx @RonRothman) +- The query() function now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) - Add `get_list_continuous_queries`, `drop_continuous_query`, and `create_continuous_query` management methods for continuous queries (#681 thx @lukaszdudek-silvair) -- query() now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) +- Mutual TLS authentication (#702 thx @LloydW93) ### Changed -- Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman) - Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc) +- Update supported versions of influxdb + python (#693 thx @clslgrnc) +- Fix for the line protocol issue with leading comma (#694 thx @d3banjan) - Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi) -- Update documentation for empty `delete_series` confusion -- Update tests/tutorials_pandas.py to reference `line` protocol, bug in `json` (#797 thx @Aeium) +- Update documentation for empty `delete_series` confusion (#699 thx @xginn8) +- Fix newline character issue in tag value (#716 thx @syhan) +- Update tests/tutorials_pandas.py to reference `line` protocol, bug in `json` (#737 thx @Aeium) ### Removed From f3b3df50423bfba5cf11652cec049ec285ed7227 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Mon, 26 Aug 2019 12:25:51 -0500 Subject: [PATCH 464/536] chore(influxdb): update release version number --- influxdb/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 288880b1..b31170bb 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.2.2' +__version__ = '5.2.3' From 6baf7eee61e968c70d4c7f111e7179bc92a80e8d Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 4 Sep 2019 08:24:19 -0500 Subject: [PATCH 465/536] chore(client_test): adding in an old test from legacy PR#315 (#752) * chore(client_test): adding in an old test from legacy PR#315 * chore(client_test): update from GET to POST * chore(client_test): update to clear out flake8 issues --- influxdb/tests/client_test.py | 43 +++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index b741cf7a..571b7ebc 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -706,6 +706,49 @@ def test_create_retention_policy(self): '"db" duration 1d replication 4 shard duration 0s' ) + def test_create_retention_policy_shard_duration(self): + """Test create retention policy with a custom shard duration.""" + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + self.cli.create_retention_policy( + 'somename2', '1d', 4, database='db', + shard_duration='1h' + ) + + self.assertEqual( + m.last_request.qs['q'][0], + 'create retention policy "somename2" on ' + '"db" duration 1d replication 4 shard duration 1h' + ) + + def test_create_retention_policy_shard_duration_default(self): + """Test create retention policy with a default shard duration.""" + example_response = '{"results":[{}]}' + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/query", + text=example_response + ) + self.cli.create_retention_policy( + 'somename3', '1d', 4, database='db', + shard_duration='1h', default=True + ) + + self.assertEqual( + m.last_request.qs['q'][0], + 'create retention policy "somename3" on ' + '"db" duration 1d replication 4 shard duration 1h ' + 'default' + ) + def test_alter_retention_policy(self): """Test alter retention policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' From bfc582bdd7e3d1009c28fb874c13778ed1522e35 Mon Sep 17 00:00:00 2001 From: sblondon Date: Mon, 9 Sep 2019 21:59:50 +0200 Subject: [PATCH 466/536] Fix minor spelling error (#744) `the connections is` -> `the connection is` Or should it be `the connections are` ? --- docs/source/api-documentation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/api-documentation.rst b/docs/source/api-documentation.rst index d00600e6..35fdb291 100644 --- a/docs/source/api-documentation.rst +++ b/docs/source/api-documentation.rst @@ -30,7 +30,7 @@ These clients are initiated in the same way as the client = DataFrameClient(host='127.0.0.1', port=8086, username='root', password='root', database='dbname') -.. note:: Only when using UDP (use_udp=True) the connections is established. +.. note:: Only when using UDP (use_udp=True) the connection is established. .. _InfluxDBClient-api: From 7b5c3d54985cc3503457c1a731a8f9f605ac6190 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Mon, 9 Sep 2019 23:27:53 +0200 Subject: [PATCH 467/536] meta: add .github config for ISSUE_TEMPLATE/CODEOWNERS/PR_TEMPLATE (#754) Signed-off-by: Matthew McGinn --- .github/CODEOWNERS | 1 + .github/ISSUE_TEMPLATE.md | 4 ++++ .github/PULL_REQUEST_TEMPLATE.md | 5 +++++ 3 files changed, 10 insertions(+) create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..0248ade1 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @aviau @sebito91 @xginn8 diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 00000000..7a7927c1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,4 @@ +- **InfluxDB version:** e.g. 1.7.7 (output of the `influx version` command) +- **InfluxDB-python version:** e.g. 5.2.2 (output of the `python -c "from __future__ import print_function; import influxdb; print(influxdb.__version__)"` command) +- **Python version:** e.g. 3.7.4 (output of the `python --version` command) +- **Operating system version:** e.g. Windows 10, Ubuntu 18.04, macOS 10.14.5 diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..84729d17 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,5 @@ +--- +##### Contributor checklist + +- [ ] Builds are passing +- [ ] New tests have been added (for feature additions) From beb236038200fb99c119260b476e92c33487dab9 Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Tue, 10 Sep 2019 00:54:47 +0200 Subject: [PATCH 468/536] meta: remove (old) CODEOWNERS (replaced in 7b5c3d5) Signed-off-by: Matthew McGinn --- CODEOWNERS | 1 - 1 file changed, 1 deletion(-) delete mode 100644 CODEOWNERS diff --git a/CODEOWNERS b/CODEOWNERS deleted file mode 100644 index 0acbd7c8..00000000 --- a/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -* @aviau @xginn8 @sebito91 From 71426a68aec6661ec975db9e7fd0c5971e2b6817 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Tue, 10 Sep 2019 01:41:08 +0200 Subject: [PATCH 469/536] meta: clean up travis CI config (#755) bump non-code tests to Python3.7 Signed-off-by: Matthew McGinn --- .travis.yml | 31 ++++--------------------------- README.rst | 8 ++++---- 2 files changed, 8 insertions(+), 31 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8c660b67..580e0c8f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,7 @@ python: - "2.7" - "3.5" - "3.6" + - "3.7" - "pypy" - "pypy3" @@ -23,36 +24,12 @@ addons: matrix: include: - python: 3.7 - dist: xenial - sudo: true - env: INFLUXDB_VER=1.2.4 - - python: 3.7 - dist: xenial - sudo: true - env: INFLUXDB_VER=1.3.9 - - python: 3.7 - dist: xenial - sudo: true - env: INFLUXDB_VER=1.4.3 - - python: 3.7 - dist: xenial - sudo: true - env: INFLUXDB_VER=1.5.4 - - python: 3.7 - dist: xenial - sudo: true - env: INFLUXDB_VER=1.6.4 - - python: 3.7 - dist: xenial - sudo: true - env: INFLUXDB_VER=1.7.4 - - python: 3.6 env: TOX_ENV=pep257 - - python: 3.6 + - python: 3.7 env: TOX_ENV=docs - - python: 3.6 + - python: 3.7 env: TOX_ENV=flake8 - - python: 3.6 + - python: 3.7 env: TOX_ENV=coverage install: diff --git a/README.rst b/README.rst index 026171b2..a40ed148 100644 --- a/README.rst +++ b/README.rst @@ -19,13 +19,13 @@ InfluxDB-Python is a client for interacting with InfluxDB_. Development of this library is maintained by: -+-----------+-------------------------------+ ++-----------+-------------------------------+ | Github ID | URL | -+===========+===============================+ ++===========+===============================+ | @aviau | (https://github.com/aviau) | -+-----------+-------------------------------+ ++-----------+-------------------------------+ | @xginn8 | (https://github.com/xginn8) | -+-----------+-------------------------------+ ++-----------+-------------------------------+ | @sebito91 | (https://github.com/sebito91) | +-----------+-------------------------------+ From f749764d066b6a298981ad78b754b168a6612858 Mon Sep 17 00:00:00 2001 From: xginn8 Date: Mon, 16 Sep 2019 20:20:11 +0200 Subject: [PATCH 470/536] tests: add basic type checking with mypy (#756) Closes #736 --- .gitignore | 1 + .travis.yml | 2 ++ influxdb/dataframe_client.py | 2 +- influxdb/tests/server_tests/base.py | 11 +++++++++-- mypy.ini | 8 ++++++++ tox.ini | 7 ++++++- 6 files changed, 27 insertions(+), 4 deletions(-) create mode 100644 mypy.ini diff --git a/.gitignore b/.gitignore index 7720b658..d970c44c 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ build/ mock*/ nose*/ .pybuild/ +.mypy_cache/ debian/files debian/python-influxdb.debhelper.log debian/python-influxdb.postinst.debhelper diff --git a/.travis.yml b/.travis.yml index 580e0c8f..9d45f19b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,6 +31,8 @@ matrix: env: TOX_ENV=flake8 - python: 3.7 env: TOX_ENV=coverage + - python: 3.7 + env: TOX_ENV=mypy install: - pip install tox-travis diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index 97258644..babfe0dd 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -25,4 +25,4 @@ def __init__(self, *a, **kw): raise ImportError("DataFrameClient requires Pandas " "which couldn't be imported: %s" % self.err) else: - from ._dataframe_client import DataFrameClient + from ._dataframe_client import DataFrameClient # type: ignore diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py index f4bd3ff9..fe722870 100644 --- a/influxdb/tests/server_tests/base.py +++ b/influxdb/tests/server_tests/base.py @@ -51,8 +51,15 @@ class SingleTestCaseWithServerMixin(object): # 'influxdb_template_conf' attribute must be set # on the TestCase class or instance. - setUp = _setup_influxdb_server - tearDown = _teardown_influxdb_server + @classmethod + def setUp(cls): + """Set up an instance of the SingleTestCaseWithServerMixin.""" + _setup_influxdb_server(cls) + + @classmethod + def tearDown(cls): + """Tear down an instance of the SingleTestCaseWithServerMixin.""" + _teardown_influxdb_server(cls) class ManyTestCasesWithServerMixin(object): diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..308aa62d --- /dev/null +++ b/mypy.ini @@ -0,0 +1,8 @@ +[mypy] +ignore_missing_imports = True +warn_unused_ignores = True +warn_unused_configs = True +warn_redundant_casts = True +warn_no_return = True +no_implicit_optional = True +strict_equality = True diff --git a/tox.ini b/tox.ini index 4a1921e2..ff30ebac 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs +envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs, mypy [testenv] passenv = INFLUXDB_PYTHON_INFLUXD_PATH @@ -44,6 +44,11 @@ deps = -r{toxinidir}/requirements.txt sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build +[testenv:mypy] +deps = -r{toxinidir}/test-requirements.txt + mypy==0.720 +commands = mypy --config-file mypy.ini -p influxdb + [flake8] ignore = W503,W504,W605,N802,F821,E402 # W503: Line break occurred before a binary operator From 3d61f1f465c0fbfbe404c77a4728c5d7268aaf1e Mon Sep 17 00:00:00 2001 From: Matthew McGinn Date: Mon, 16 Sep 2019 22:20:50 +0200 Subject: [PATCH 471/536] meta: add missing changelog entries for last few commits Signed-off-by: Matthew McGinn --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 025a1d71..8aa8de84 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,11 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added +- Add mypy testing framework (#756) ### Changed +- Clean up stale CI config (#755) +- Add legacy client test (#752 & #318 thx @oldmantaiter & @sebito91) ### Removed From 3a5e6219a2b62af076caa4f37fab9258165ec266 Mon Sep 17 00:00:00 2001 From: Ophir LOJKINE Date: Thu, 5 Dec 2019 17:42:49 +0100 Subject: [PATCH 472/536] Add support for messagepack (#734) * Add support for messagepack * Remove unnecessary blank line Fixes https://github.com/influxdata/influxdb-python/pull/734/files/57daf8ccd5027c796a2fd3934b8e88d3982d300e#r302769403 * Small code reorganization * Small code reorganization Fixes https://github.com/influxdata/influxdb-python/pull/734#discussion_r302770011 --- influxdb/client.py | 50 ++++++++++++++++++++++++++++------- influxdb/tests/client_test.py | 23 ++++++++++++++++ requirements.txt | 1 + 3 files changed, 64 insertions(+), 10 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index ad4c6b66..5e39f490 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -6,11 +6,14 @@ from __future__ import print_function from __future__ import unicode_literals -import time -import random - +import datetime import json +import random import socket +import struct +import time + +import msgpack import requests import requests.exceptions from six.moves import xrange @@ -144,7 +147,7 @@ def __init__(self, self._headers = { 'Content-Type': 'application/json', - 'Accept': 'text/plain' + 'Accept': 'application/x-msgpack' } @property @@ -293,13 +296,30 @@ def request(self, url, method='GET', params=None, data=None, time.sleep((2 ** _try) * random.random() / 100.0) if not retry: raise + + type_header = response.headers and response.headers.get("Content-Type") + if type_header == "application/x-msgpack" and response.content: + response._msgpack = msgpack.unpackb( + packed=response.content, + ext_hook=_msgpack_parse_hook, + raw=False) + else: + response._msgpack = None + + def reformat_error(response): + if response._msgpack: + return json.dumps(response._msgpack, separators=(',', ':')) + else: + return response.content + # if there's not an error, there must have been a successful response if 500 <= response.status_code < 600: - raise InfluxDBServerError(response.content) + raise InfluxDBServerError(reformat_error(response)) elif response.status_code == expected_response_code: return response else: - raise InfluxDBClientError(response.content, response.status_code) + err_msg = reformat_error(response) + raise InfluxDBClientError(err_msg, response.status_code) def write(self, data, params=None, expected_response_code=204, protocol='json'): @@ -450,10 +470,11 @@ def query(self, expected_response_code=expected_response_code ) - if chunked: - return self._read_chunked_response(response) - - data = response.json() + data = response._msgpack + if not data: + if chunked: + return self._read_chunked_response(response) + data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) @@ -1119,3 +1140,12 @@ def _parse_netloc(netloc): 'password': info.password or None, 'host': info.hostname or 'localhost', 'port': info.port or 8086} + + +def _msgpack_parse_hook(code, data): + if code == 5: + (epoch_s, epoch_ns) = struct.unpack(">QI", data) + timestamp = datetime.datetime.utcfromtimestamp(epoch_s) + timestamp += datetime.timedelta(microseconds=(epoch_ns / 1000)) + return timestamp.isoformat() + 'Z' + return msgpack.ExtType(code, data) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 571b7ebc..54116f7e 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -473,6 +473,29 @@ def test_query(self): [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] ) + def test_query_msgpack(self): + """Test query method with a messagepack response.""" + example_response = bytes(bytearray.fromhex( + "81a7726573756c74739182ac73746174656d656e745f696400a673657269" + "65739183a46e616d65a161a7636f6c756d6e7392a474696d65a176a67661" + "6c7565739192c70c05000000005d26178a019096c8cb3ff0000000000000" + )) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/query", + request_headers={"Accept": "application/x-msgpack"}, + headers={"Content-Type": "application/x-msgpack"}, + content=example_response + ) + rs = self.cli.query('select * from a') + + self.assertListEqual( + list(rs.get_points()), + [{'v': 1.0, 'time': '2019-07-10T16:51:22.026253Z'}] + ) + def test_select_into_post(self): """Test SELECT.*INTO is POSTed.""" example_response = ( diff --git a/requirements.txt b/requirements.txt index db5f6f85..77d7306f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ python-dateutil>=2.6.0 pytz requests>=2.17.0 six>=1.10.0 +msgpack==0.6.1 From cb15c2ebc4d337cac606638004cb794870b52c2e Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 5 Dec 2019 10:44:25 -0600 Subject: [PATCH 473/536] chore(CHANGELOG): adding in #754 from lovasoa, thanks! --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8aa8de84..13445e97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - Add mypy testing framework (#756) +- Add support for messagepack (#734 thx @lovasoa) ### Changed - Clean up stale CI config (#755) From ea1b995ac3b12b7693254167ea8443f4a5263386 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Tue, 7 Apr 2020 17:30:49 -0500 Subject: [PATCH 474/536] feat(client): re-add support for 'show series' from legacy PR #357. Closes #353 (#806) * feat(client): re-add support for 'show series' from legacy PR #357. Closes #353 * chore(client): fix failing tests * chore(client): update linters to pass --- CHANGELOG.md | 7 +++ influxdb/client.py | 35 +++++++++++ influxdb/tests/client_test.py | 60 +++++++++++++++++++ .../server_tests/client_test_with_server.py | 58 ++++++++++++++++++ 4 files changed, 160 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 13445e97..bdc08569 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,9 +6,16 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +### Added + +### Changed + +## [v5.2.4] - 2020-04-10 + ### Added - Add mypy testing framework (#756) - Add support for messagepack (#734 thx @lovasoa) +- Add support for 'show series' (#357 thx @gaker) ### Changed - Clean up stale CI config (#755) diff --git a/influxdb/client.py b/influxdb/client.py index 5e39f490..43427a11 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -7,6 +7,7 @@ from __future__ import unicode_literals import datetime +import itertools import json import random import socket @@ -637,6 +638,40 @@ def get_list_database(self): """ return list(self.query("SHOW DATABASES").get_points()) + def get_list_series(self, database=None, measurement=None, tags=None): + """ + Query SHOW SERIES returns the distinct series in your database. + + FROM and WHERE clauses are optional. + + :param measurement: Show all series from a measurement + :type id: string + :param tags: Show all series that match given tags + :type id: dict + :param database: the database from which the series should be + shows, defaults to client's current database + :type database: str + """ + database = database or self._database + query_str = 'SHOW SERIES' + + if measurement: + query_str += ' FROM "{0}"'.format(measurement) + + if tags: + query_str += ' WHERE ' + ' and '.join(["{0}='{1}'".format(k, v) + for k, v in tags.items()]) + + return list( + itertools.chain.from_iterable( + [ + x.values() + for x in (self.query(query_str, database=database) + .get_points()) + ] + ) + ) + def create_database(self, dbname): """Create a new database in InfluxDB. diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 54116f7e..99a7f42b 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -689,6 +689,66 @@ def test_get_list_measurements(self): [{'name': 'cpu'}, {'name': 'disk'}] ) + def test_get_list_series(self): + """Test get a list of series from the database.""" + data = {'results': [ + {'series': [ + { + 'values': [ + ['cpu_load_short,host=server01,region=us-west'], + ['memory_usage,host=server02,region=us-east']], + 'columns': ['key'] + } + ]} + ]} + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_series(), + ['cpu_load_short,host=server01,region=us-west', + 'memory_usage,host=server02,region=us-east']) + + def test_get_list_series_with_measurement(self): + """Test get a list of series from the database by filter.""" + data = {'results': [ + {'series': [ + { + 'values': [ + ['cpu_load_short,host=server01,region=us-west']], + 'columns': ['key'] + } + ]} + ]} + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_series(measurement='cpu_load_short'), + ['cpu_load_short,host=server01,region=us-west']) + + def test_get_list_series_with_tags(self): + """Test get a list of series from the database by tags.""" + data = {'results': [ + {'series': [ + { + 'values': [ + ['cpu_load_short,host=server01,region=us-west']], + 'columns': ['key'] + } + ]} + ]} + + with _mocked_session(self.cli, 'get', 200, json.dumps(data)): + self.assertListEqual( + self.cli.get_list_series(tags={'region': 'us-west'}), + ['cpu_load_short,host=server01,region=us-west']) + + @raises(Exception) + def test_get_list_series_fails(self): + """Test get a list of series from the database but fail.""" + cli = InfluxDBClient('host', 8086, 'username', 'password') + with _mocked_session(cli, 'get', 401): + cli.get_list_series() + def test_create_retention_policy_default(self): """Test create default ret policy for TestInfluxDBClient object.""" example_response = '{"results":[{}]}' diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index fda3f720..94f28b66 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -817,6 +817,64 @@ def test_query_multiple_series(self): ] self.cli.write_points(pts) + def test_get_list_series(self): + """Test get a list of series from the database.""" + dummy_points = [ + { + "measurement": "cpu_load_short", + "tags": { + "host": "server01", + "region": "us-west" + }, + "time": "2009-11-10T23:00:00.123456Z", + "fields": { + "value": 0.64 + } + } + ] + + dummy_points_2 = [ + { + "measurement": "memory_usage", + "tags": { + "host": "server02", + "region": "us-east" + }, + "time": "2009-11-10T23:00:00.123456Z", + "fields": { + "value": 80 + } + } + ] + + self.cli.write_points(dummy_points) + self.cli.write_points(dummy_points_2) + + self.assertEquals( + self.cli.get_list_series(), + ['cpu_load_short,host=server01,region=us-west', + 'memory_usage,host=server02,region=us-east'] + ) + + self.assertEquals( + self.cli.get_list_series(measurement='memory_usage'), + ['memory_usage,host=server02,region=us-east'] + ) + + self.assertEquals( + self.cli.get_list_series(measurement='memory_usage'), + ['memory_usage,host=server02,region=us-east'] + ) + + self.assertEquals( + self.cli.get_list_series(tags={'host': 'server02'}), + ['memory_usage,host=server02,region=us-east']) + + self.assertEquals( + self.cli.get_list_series( + measurement='cpu_load_short', tags={'host': 'server02'}), + []) + @skip_server_tests class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): From ad5e5b616130f83f6f63532d3e7ae8b005687ba7 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Tue, 7 Apr 2020 18:36:21 -0500 Subject: [PATCH 475/536] feat(client): allow custom requests session in InfluxDBClient (#807) * feat(client): enable client request to provide custom requests session * feat(client): allow custom requests session in InfluxDBClient --- CHANGELOG.md | 1 + influxdb/client.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bdc08569..59777b5f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add mypy testing framework (#756) - Add support for messagepack (#734 thx @lovasoa) - Add support for 'show series' (#357 thx @gaker) +- Add support for custom request session in InfluxDBClient (#360 thx @dschien) ### Changed - Clean up stale CI config (#755) diff --git a/influxdb/client.py b/influxdb/client.py index 43427a11..390d8e16 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -70,6 +70,9 @@ class InfluxDBClient(object): as a single file containing the private key and the certificate, or as a tuple of both files’ paths, defaults to None :type cert: str + :param session: allow for the new client request to use an existing + requests Session, defaults to None + :type session: requests.Session :raises ValueError: if cert is provided but ssl is disabled (set to False) """ @@ -90,6 +93,7 @@ def __init__(self, pool_size=10, path='', cert=None, + session=None, ): """Construct a new InfluxDBClient object.""" self.__host = host @@ -104,7 +108,11 @@ def __init__(self, self.__use_udp = use_udp self.__udp_port = udp_port - self._session = requests.Session() + + if not session: + session = requests.Session() + + self._session = session adapter = requests.adapters.HTTPAdapter( pool_connections=int(pool_size), pool_maxsize=int(pool_size) From 4799c588789c2425693c419da308c62158063174 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 8 Apr 2020 12:39:13 -0500 Subject: [PATCH 476/536] feat(line_protocol): split out make_line function from core make_lines (#810) * feat(line_protocol): split out make_line function from core make_lines * chore(line_protocol): fix malformed testcase --- CHANGELOG.md | 1 + influxdb/line_protocol.py | 123 ++++++++++++++++----------- influxdb/tests/test_line_protocol.py | 2 +- 3 files changed, 75 insertions(+), 51 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59777b5f..81809bcd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed - Clean up stale CI config (#755) - Add legacy client test (#752 & #318 thx @oldmantaiter & @sebito91) +- Update make_lines section in line_protocol.py to split out core function (#375 thx @aisbaa) ### Removed diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 249511d3..ec59ef47 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -11,7 +11,7 @@ from pytz import UTC from dateutil.parser import parse -from six import iteritems, binary_type, text_type, integer_types, PY2 +from six import binary_type, text_type, integer_types, PY2 EPOCH = UTC.localize(datetime.utcfromtimestamp(0)) @@ -30,15 +30,20 @@ def _convert_timestamp(timestamp, precision=None): ns = (timestamp - EPOCH).total_seconds() * 1e9 if precision is None or precision == 'n': return ns - elif precision == 'u': + + if precision == 'u': return ns / 1e3 - elif precision == 'ms': + + if precision == 'ms': return ns / 1e6 - elif precision == 's': + + if precision == 's': return ns / 1e9 - elif precision == 'm': + + if precision == 'm': return ns / 1e9 / 60 - elif precision == 'h': + + if precision == 'h': return ns / 1e9 / 3600 raise ValueError(timestamp) @@ -95,9 +100,11 @@ def _escape_value(value): if isinstance(value, text_type) and value != '': return quote_ident(value) - elif isinstance(value, integer_types) and not isinstance(value, bool): + + if isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' - elif _is_float(value): + + if _is_float(value): return repr(value) return str(value) @@ -107,15 +114,60 @@ def _get_unicode(data, force=False): """Try to return a text aka unicode object from the given data.""" if isinstance(data, binary_type): return data.decode('utf-8') - elif data is None: + + if data is None: return '' - elif force: + + if force: if PY2: return unicode(data) - else: - return str(data) - else: - return data + return str(data) + + return data + + +def make_line(measurement, tags=None, fields=None, time=None, precision=None): + """Extract the actual point from a given measurement line.""" + tags = tags or {} + fields = fields or {} + + line = _escape_tag(_get_unicode(measurement)) + + # tags should be sorted client-side to take load off server + tag_list = [] + for tag_key in sorted(tags.keys()): + key = _escape_tag(tag_key) + value = _escape_tag(tags[tag_key]) + + if key != '' and value != '': + tag_list.append( + "{key}={value}".format(key=key, value=value) + ) + + if tag_list: + line += ',' + ','.join(tag_list) + + field_list = [] + for field_key in sorted(fields.keys()): + key = _escape_tag(field_key) + value = _escape_value(fields[field_key]) + + if key != '' and value != '': + field_list.append("{key}={value}".format( + key=key, + value=value + )) + + if field_list: + line += ' ' + ','.join(field_list) + + if time is not None: + timestamp = _get_unicode(str(int( + _convert_timestamp(time, precision) + ))) + line += ' ' + timestamp + + return line def make_lines(data, precision=None): @@ -127,48 +179,19 @@ def make_lines(data, precision=None): lines = [] static_tags = data.get('tags') for point in data['points']: - elements = [] - - # add measurement name - measurement = _escape_tag(_get_unicode( - point.get('measurement', data.get('measurement')))) - key_values = [measurement] - - # add tags if static_tags: tags = dict(static_tags) # make a copy, since we'll modify tags.update(point.get('tags') or {}) else: tags = point.get('tags') or {} - # tags should be sorted client-side to take load off server - for tag_key, tag_value in sorted(iteritems(tags)): - key = _escape_tag(tag_key) - value = _escape_tag_value(tag_value) - - if key != '' and value != '': - key_values.append(key + "=" + value) - - elements.append(','.join(key_values)) - - # add fields - field_values = [] - for field_key, field_value in sorted(iteritems(point['fields'])): - key = _escape_tag(field_key) - value = _escape_value(field_value) - - if key != '' and value != '': - field_values.append(key + "=" + value) - - elements.append(','.join(field_values)) - - # add timestamp - if 'time' in point: - timestamp = _get_unicode(str(int( - _convert_timestamp(point['time'], precision)))) - elements.append(timestamp) - - line = ' '.join(elements) + line = make_line( + point.get('measurement', data.get('measurement')), + tags=tags, + fields=point.get('fields'), + precision=precision, + time=point.get('time') + ) lines.append(line) return '\n'.join(lines) + '\n' diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index bccd7727..71828f62 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -42,7 +42,7 @@ def test_make_lines(self): self.assertEqual( line_protocol.make_lines(data), - 'test,backslash_tag=C:\\\\ ,integer_tag=2,string_tag=hello ' + 'test,backslash_tag=C:\\\\,integer_tag=2,string_tag=hello ' 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n' ) From 04205cf5858520fe2362432090232a871f1860ea Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 8 Apr 2020 16:09:04 -0500 Subject: [PATCH 477/536] chore(line_protocol): fix nanosecond timestamp resolution for points (#811) --- CHANGELOG.md | 1 + influxdb/line_protocol.py | 20 ++++++++++++++------ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 81809bcd..0556d942 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Clean up stale CI config (#755) - Add legacy client test (#752 & #318 thx @oldmantaiter & @sebito91) - Update make_lines section in line_protocol.py to split out core function (#375 thx @aisbaa) +- Fix nanosecond time resolution for points (#407 thx @AndreCAndersen && @clslgrnc) ### Removed diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index ec59ef47..3a5eb4e8 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -16,6 +16,14 @@ EPOCH = UTC.localize(datetime.utcfromtimestamp(0)) +def _to_nanos(timestamp): + delta = timestamp - EPOCH + nanos_in_days = delta.days * 86400 * 10 ** 9 + nanos_in_seconds = delta.seconds * 10 ** 9 + nanos_in_micros = delta.microseconds * 10 ** 3 + return nanos_in_days + nanos_in_seconds + nanos_in_micros + + def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int @@ -27,24 +35,24 @@ def _convert_timestamp(timestamp, precision=None): if not timestamp.tzinfo: timestamp = UTC.localize(timestamp) - ns = (timestamp - EPOCH).total_seconds() * 1e9 + ns = _to_nanos(timestamp) if precision is None or precision == 'n': return ns if precision == 'u': - return ns / 1e3 + return ns / 10**3 if precision == 'ms': - return ns / 1e6 + return ns / 10**6 if precision == 's': - return ns / 1e9 + return ns / 10**9 if precision == 'm': - return ns / 1e9 / 60 + return ns / 10**9 / 60 if precision == 'h': - return ns / 1e9 / 3600 + return ns / 10**9 / 3600 raise ValueError(timestamp) From a368951aca91c89d978fe29ee0ddc8a246711ea0 Mon Sep 17 00:00:00 2001 From: Sergei Smolianinov Date: Wed, 8 Apr 2020 21:21:12 +0000 Subject: [PATCH 478/536] Add CQs management methods to the client (#414) * Add CQs management methods to the client * chore(server_tests): update pep257 and flake8 commentary * chore(client_test): update comments based on pep257 and flake8 Co-authored-by: Sergei Smolianinov Co-authored-by: Sebastian Borza Co-authored-by: xginn8 --- influxdb/tests/client_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 99a7f42b..fe7381ea 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1161,7 +1161,7 @@ def test_revoke_privilege_invalid(self): self.cli.revoke_privilege('', 'testdb', 'test') def test_get_list_privileges(self): - """Tst get list of privs for TestInfluxDBClient object.""" + """Test get list of privs for TestInfluxDBClient object.""" data = {'results': [ {'series': [ {'columns': ['database', 'privilege'], From 516274f9bae9edfc9c6f35c6e56281b29b23b12f Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 8 Apr 2020 16:22:27 -0500 Subject: [PATCH 479/536] chore(CHANGELOG): add smolse to PR merge for #681 and #414 --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0556d942..2a887f90 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,7 +34,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add consistency paramter to `write_points` (#664 tx @RonRothman) - The query() function now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc) - Add `get_list_continuous_queries`, `drop_continuous_query`, and `create_continuous_query` management methods for - continuous queries (#681 thx @lukaszdudek-silvair) + continuous queries (#681 thx @lukaszdudek-silvair && @smolse) - Mutual TLS authentication (#702 thx @LloydW93) ### Changed From 7c858bf2b069061005a4525c5a5252fd5f0b27ad Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 8 Apr 2020 19:22:31 -0500 Subject: [PATCH 480/536] feat(dataframe_client): handle np.nan, np.inf values in DataFrameClient (#812) * feat(dataframe_client): handle np.nan, np.inf values in DataFrameClient * chore(dataframe): handle cases where tagset is empty * chore(dataframe): add tests for Nan lines but with tag values --- CHANGELOG.md | 1 + influxdb/_dataframe_client.py | 40 +++++--- influxdb/tests/dataframe_client_test.py | 122 +++++++++++++++++++++++- 3 files changed, 147 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a887f90..be15d52b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add support for messagepack (#734 thx @lovasoa) - Add support for 'show series' (#357 thx @gaker) - Add support for custom request session in InfluxDBClient (#360 thx @dschien) +- Add support for handling np.nan and np.inf values in DataFrameClient (#436 thx @nmerket) ### Changed - Clean up stale CI config (#755) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index d16e29ca..f411bb37 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -270,14 +270,31 @@ def _convert_dataframe_to_json(dataframe, "h": 1e9 * 3600, }.get(time_precision, 1) + if not tag_columns: + points = [ + {'measurement': measurement, + 'fields': + rec.replace([np.inf, -np.inf], np.nan).dropna().to_dict(), + 'time': np.int64(ts.value / precision_factor)} + for ts, (_, rec) in zip( + dataframe.index, + dataframe[field_columns].iterrows() + ) + ] + + return points + points = [ {'measurement': measurement, 'tags': dict(list(tag.items()) + list(tags.items())), - 'fields': rec, + 'fields': + rec.replace([np.inf, -np.inf], np.nan).dropna().to_dict(), 'time': np.int64(ts.value / precision_factor)} - for ts, tag, rec in zip(dataframe.index, - dataframe[tag_columns].to_dict('record'), - dataframe[field_columns].to_dict('record')) + for ts, tag, (_, rec) in zip( + dataframe.index, + dataframe[tag_columns].to_dict('record'), + dataframe[field_columns].iterrows() + ) ] return points @@ -379,21 +396,18 @@ def _convert_dataframe_to_lines(self, tags = '' # Make an array of formatted field keys and values - field_df = dataframe[field_columns] - # Keep the positions where Null values are found - mask_null = field_df.isnull().values + field_df = dataframe[field_columns].replace([np.inf, -np.inf], np.nan) + nans = pd.isnull(field_df) field_df = self._stringify_dataframe(field_df, numeric_precision, datatype='field') field_df = (field_df.columns.values + '=').tolist() + field_df - field_df[field_df.columns[1:]] = ',' + field_df[ - field_df.columns[1:]] - field_df = field_df.where(~mask_null, '') # drop Null entries - fields = field_df.sum(axis=1) - # take out leading , where first column has a Null value - fields = fields.str.lstrip(",") + field_df[field_df.columns[1:]] = ',' + field_df[field_df.columns[1:]] + field_df[nans] = '' + + fields = field_df.sum(axis=1).map(lambda x: x.lstrip(',')) del field_df # Generate line protocol string diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 90312ed8..0573d5c3 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -13,8 +13,8 @@ import warnings import requests_mock -from influxdb.tests import skip_if_pypy, using_pypy from nose.tools import raises +from influxdb.tests import skip_if_pypy, using_pypy from .client_test import _mocked_session @@ -22,7 +22,7 @@ import pandas as pd from pandas.util.testing import assert_frame_equal from influxdb import DataFrameClient - import numpy + import numpy as np @skip_if_pypy @@ -462,7 +462,7 @@ def test_write_points_from_dataframe_with_numeric_precision(self): ["2", 2, 2.2222222222222]], index=[now, now + timedelta(hours=1)]) - if numpy.lib.NumpyVersion(numpy.__version__) <= '1.13.3': + if np.lib.NumpyVersion(np.__version__) <= '1.13.3': expected_default_precision = ( b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n' b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n' @@ -1032,3 +1032,119 @@ def test_dsn_constructor(self): client = DataFrameClient.from_dsn('influxdb://localhost:8086') self.assertIsInstance(client, DataFrameClient) self.assertEqual('http://localhost:8086', client._baseurl) + + def test_write_points_from_dataframe_with_nan_line(self): + """Test write points from dataframe with Nan lines.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + expected = ( + b"foo column_one=\"1\",column_two=1i 0\n" + b"foo column_one=\"2\",column_two=2i " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', protocol='line') + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None, protocol='line') + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_nan_json(self): + """Test write points from json with NaN lines.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]], + index=[now, now + timedelta(hours=1)], + columns=["column_one", "column_two", + "column_three"]) + expected = ( + b"foo column_one=\"1\",column_two=1i 0\n" + b"foo column_one=\"2\",column_two=2i " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', protocol='json') + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None, protocol='json') + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_tags_and_nan_line(self): + """Test write points from dataframe with NaN lines and tags.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf], + ['red', 0, "2", 2, np.nan]], + index=[now, now + timedelta(hours=1)], + columns=["tag_one", "tag_two", "column_one", + "column_two", "column_three"]) + expected = ( + b"foo,tag_one=blue,tag_two=1 " + b"column_one=\"1\",column_two=1i " + b"0\n" + b"foo,tag_one=red,tag_two=0 " + b"column_one=\"2\",column_two=2i " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', protocol='line', + tag_columns=['tag_one', 'tag_two']) + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None, protocol='line', + tag_columns=['tag_one', 'tag_two']) + self.assertEqual(m.last_request.body, expected) + + def test_write_points_from_dataframe_with_tags_and_nan_json(self): + """Test write points from json with NaN lines and tags.""" + now = pd.Timestamp('1970-01-01 00:00+00:00') + dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf], + ['red', 0, "2", 2, np.nan]], + index=[now, now + timedelta(hours=1)], + columns=["tag_one", "tag_two", "column_one", + "column_two", "column_three"]) + expected = ( + b"foo,tag_one=blue,tag_two=1 " + b"column_one=\"1\",column_two=1i " + b"0\n" + b"foo,tag_one=red,tag_two=0 " + b"column_one=\"2\",column_two=2i " + b"3600000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + + cli = DataFrameClient(database='db') + + cli.write_points(dataframe, 'foo', protocol='json', + tag_columns=['tag_one', 'tag_two']) + self.assertEqual(m.last_request.body, expected) + + cli.write_points(dataframe, 'foo', tags=None, protocol='json', + tag_columns=['tag_one', 'tag_two']) + self.assertEqual(m.last_request.body, expected) From 9b4a51ebc811bce92f66123ef17f46f10c8c943f Mon Sep 17 00:00:00 2001 From: Christopher Head Date: Thu, 9 Apr 2020 15:33:11 -0700 Subject: [PATCH 481/536] Fix import of distutils.spawn (#805) --- influxdb/tests/server_tests/influxdb_instance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py index 1dcd7567..2dd823ff 100644 --- a/influxdb/tests/server_tests/influxdb_instance.py +++ b/influxdb/tests/server_tests/influxdb_instance.py @@ -7,7 +7,7 @@ from __future__ import unicode_literals import datetime -import distutils +import distutils.spawn import os import tempfile import shutil From 57c14083215ed2012dc3eb755869544c30f4f809 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 9 Apr 2020 17:34:33 -0500 Subject: [PATCH 482/536] chore(CHANGELOG): update to include PR from #805 --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be15d52b..44f7f4d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed -## [v5.2.4] - 2020-04-10 +## [v5.3.0] - 2020-04-10 ### Added - Add mypy testing framework (#756) @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add legacy client test (#752 & #318 thx @oldmantaiter & @sebito91) - Update make_lines section in line_protocol.py to split out core function (#375 thx @aisbaa) - Fix nanosecond time resolution for points (#407 thx @AndreCAndersen && @clslgrnc) +- Fix import of distutils.spawn (#805 thx @Hawk777) ### Removed From 896f6237f9217354fd365b7157bb4dd01af633a3 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 9 Apr 2020 19:05:34 -0500 Subject: [PATCH 483/536] chore(line_protocol): update repr value of floats to properly handle precision (#813) * chore(line_protocol): update repr value of floats to properly handle precision. Closes #488 * chore(line_protocol): fix repr and handle boolean values * chore(CHANGELOG): update to include reference to PR#488 --- CHANGELOG.md | 1 + influxdb/line_protocol.py | 5 ++++- influxdb/tests/test_line_protocol.py | 23 +++++++++++++++++++++-- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 44f7f4d1..943fb83f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Update make_lines section in line_protocol.py to split out core function (#375 thx @aisbaa) - Fix nanosecond time resolution for points (#407 thx @AndreCAndersen && @clslgrnc) - Fix import of distutils.spawn (#805 thx @Hawk777) +- Update repr of float values including properly handling of boolean (#488 thx @ghost) ### Removed diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index 3a5eb4e8..d6cbf46f 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -112,8 +112,11 @@ def _escape_value(value): if isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' + if isinstance(value, bool): + return str(value) + if _is_float(value): - return repr(value) + return repr(float(value)) return str(value) diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index 71828f62..c48d5edc 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -6,10 +6,12 @@ from __future__ import print_function from __future__ import unicode_literals -from datetime import datetime import unittest -from pytz import UTC, timezone +from datetime import datetime +from decimal import Decimal + +from pytz import UTC, timezone from influxdb import line_protocol @@ -166,3 +168,20 @@ def test_float_with_long_decimal_fraction(self): line_protocol.make_lines(data), 'test float_val=1.0000000000000009\n' ) + + def test_float_with_long_decimal_fraction_as_type_decimal(self): + """Ensure precision is preserved when casting Decimal into strings.""" + data = { + "points": [ + { + "measurement": "test", + "fields": { + "float_val": Decimal(0.8289445733333332), + } + } + ] + } + self.assertEqual( + line_protocol.make_lines(data), + 'test float_val=0.8289445733333332\n' + ) From 42d172d1db5eab8d881325d24404aa240bf342ac Mon Sep 17 00:00:00 2001 From: Kenzyme Le Date: Thu, 9 Apr 2020 22:15:05 -0400 Subject: [PATCH 484/536] Fix tests for existing 'Adding time_precision optional option to SeriesHelper' PR (#719) * Adding time_precision into Meta of SeriesHelper time_precision option not currently supported in influx-db python * Making Time Precision optional Changed it from required to optional * Fixing Typo in _time_precision Attribute * Fixing coding conventions for Travis CI * Appunni: Test for invalid time_precision on SeriesHelper * Appunni: Intendation problem resolution in master * - Fix flake7 errors : E131 continuation line unaligned for hanging indent - Fix typo - Fix cls._client declaration ordering - Remove duplicate code cls._autocommit ... Co-authored-by: appunni-dishq <31534711+appunni-dishq@users.noreply.github.com> Co-authored-by: xginn8 Co-authored-by: appunni --- influxdb/helper.py | 20 +++++++++++++++++--- influxdb/tests/helper_test.py | 13 ++++++++++++- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index e622526d..fa79c079 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -41,6 +41,10 @@ class Meta: # Only applicable if autocommit is True. autocommit = True # If True and no bulk_size, then will set bulk_size to 1. + time_precision = "h"|"m"|s"|"ms"|"u"|"ns" + # Default is ns (nanoseconds) + # Setting time precision while writing point + # You should also make sure time is set in the given precision """ @@ -71,6 +75,13 @@ def __new__(cls, *args, **kwargs): cls.__name__)) cls._autocommit = getattr(_meta, 'autocommit', False) + cls._time_precision = getattr(_meta, 'time_precision', None) + + allowed_time_precisions = ['h', 'm', 's', 'ms', 'u', 'ns', None] + if cls._time_precision not in allowed_time_precisions: + raise AttributeError( + 'In {0}, time_precision is set, but invalid use any of {}.' + .format(cls.__name__, ','.join(allowed_time_precisions))) cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: @@ -116,11 +127,11 @@ def __init__(self, **kw): keys = set(kw.keys()) # all tags should be passed, and keys - tags should be a subset of keys - if not(tags <= keys): + if not (tags <= keys): raise NameError( 'Expected arguments to contain all tags {0}, instead got {1}.' .format(cls._tags, kw.keys())) - if not(keys - tags <= fields): + if not (keys - tags <= fields): raise NameError('Got arguments not in tags or fields: {0}' .format(keys - tags - fields)) @@ -143,7 +154,10 @@ def commit(cls, client=None): """ if not client: client = cls._client - rtn = client.write_points(cls._json_body_()) + rtn = client.write_points( + cls._json_body_(), + time_precision=cls._time_precision) + # will be None if not set and will default to ns cls._reset_() return rtn diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 6f24e85d..6aa8f15a 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -310,8 +310,19 @@ class Meta: series_name = 'events.stats.{server_name}' + class InvalidTimePrecision(SeriesHelper): + """Define instance of SeriesHelper for invalid time precision.""" + + class Meta: + """Define metadata for InvalidTimePrecision.""" + + series_name = 'events.stats.{server_name}' + time_precision = "ks" + fields = ['time', 'server_name'] + autocommit = True + for cls in [MissingMeta, MissingClient, MissingFields, - MissingSeriesName]: + MissingSeriesName, InvalidTimePrecision]: self.assertRaises( AttributeError, cls, **{'time': 159, 'server_name': 'us.east-1'}) From 8a8b8ff3c7d517959146c9daeb09c982a9c184b1 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Thu, 9 Apr 2020 21:27:23 -0500 Subject: [PATCH 485/536] chore(CHANGELOG): update for PR#502 and #719 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 943fb83f..b94e438e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add support for 'show series' (#357 thx @gaker) - Add support for custom request session in InfluxDBClient (#360 thx @dschien) - Add support for handling np.nan and np.inf values in DataFrameClient (#436 thx @nmerket) +- Add support for optional `time_precision` in the SeriesHelper (#502 && #719 thx @appunni-dishq && @klDen) ### Changed - Clean up stale CI config (#755) From 72c372f2412d445ea205e9b3cc696e242a90e35e Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 08:59:46 -0500 Subject: [PATCH 486/536] chore(dataframe_client): update to handle empty tags in dataframe client (#814) --- CHANGELOG.md | 1 + influxdb/_dataframe_client.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b94e438e..e7f40dc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Fix nanosecond time resolution for points (#407 thx @AndreCAndersen && @clslgrnc) - Fix import of distutils.spawn (#805 thx @Hawk777) - Update repr of float values including properly handling of boolean (#488 thx @ghost) +- Update dataframe_client to fix faulty empty tags (#770 thx @michelfripiat) ### Removed diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index f411bb37..a977754e 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -388,7 +388,8 @@ def _convert_dataframe_to_lines(self, del tag_df elif global_tags: tag_string = ''.join( - [",{}={}".format(k, _escape_tag(v)) if v else '' + [",{}={}".format(k, _escape_tag(v)) + if v not in [None, ''] else "" for k, v in sorted(global_tags.items())] ) tags = pd.Series(tag_string, index=dataframe.index) From c35e49ef6dd919c87dc20a01003922818bb30096 Mon Sep 17 00:00:00 2001 From: jgspiro <45625897+jgspiro@users.noreply.github.com> Date: Fri, 10 Apr 2020 16:07:18 +0200 Subject: [PATCH 487/536] Bugfix dropna in DataFrameClient. Add unit test. (#778) --- influxdb/_dataframe_client.py | 3 +- influxdb/tests/dataframe_client_test.py | 92 +++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index a977754e..d7e67baa 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -203,7 +203,8 @@ def query(self, def _to_dataframe(self, rs, dropna=True): result = defaultdict(list) if isinstance(rs, list): - return map(self._to_dataframe, rs) + return map(self._to_dataframe, rs, + [dropna for _ in range(len(rs))]) for key, data in rs.items(): name, tags = key diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 0573d5c3..4e172ea7 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -968,6 +968,98 @@ def test_multiquery_into_dataframe(self): for k in e: assert_frame_equal(e[k], r[k]) + def test_multiquery_into_dataframe_dropna(self): + """Test multiquery into df for TestDataFrameClient object.""" + data = { + "results": [ + { + "series": [ + { + "name": "cpu_load_short", + "columns": ["time", "value", "value2", "value3"], + "values": [ + ["2015-01-29T21:55:43.702900257Z", + 0.55, 0.254, numpy.NaN], + ["2015-01-29T21:55:43.702900257Z", + 23422, 122878, numpy.NaN], + ["2015-06-11T20:46:02Z", + 0.64, 0.5434, numpy.NaN] + ] + } + ] + }, { + "series": [ + { + "name": "cpu_load_short", + "columns": ["time", "count"], + "values": [ + ["1970-01-01T00:00:00Z", 3] + ] + } + ] + } + ] + } + + pd1 = pd.DataFrame( + [[0.55, 0.254, numpy.NaN], + [23422.0, 122878, numpy.NaN], + [0.64, 0.5434, numpy.NaN]], + columns=['value', 'value2', 'value3'], + index=pd.to_datetime([ + "2015-01-29 21:55:43.702900257+0000", + "2015-01-29 21:55:43.702900257+0000", + "2015-06-11 20:46:02+0000"])) + + if pd1.index.tzinfo is None: + pd1.index = pd1.index.tz_localize('UTC') + + pd1_dropna = pd.DataFrame( + [[0.55, 0.254], [23422.0, 122878], [0.64, 0.5434]], + columns=['value', 'value2'], + index=pd.to_datetime([ + "2015-01-29 21:55:43.702900257+0000", + "2015-01-29 21:55:43.702900257+0000", + "2015-06-11 20:46:02+0000"])) + + if pd1_dropna.index.tzinfo is None: + pd1_dropna.index = pd1_dropna.index.tz_localize('UTC') + + pd2 = pd.DataFrame( + [[3]], columns=['count'], + index=pd.to_datetime(["1970-01-01 00:00:00+00:00"])) + + if pd2.index.tzinfo is None: + pd2.index = pd2.index.tz_localize('UTC') + + expected_dropna_true = [ + {'cpu_load_short': pd1_dropna}, + {'cpu_load_short': pd2}] + expected_dropna_false = [ + {'cpu_load_short': pd1}, + {'cpu_load_short': pd2}] + + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \ + "SELECT count(value) FROM cpu_load_short WHERE region=$region" + bind_params = {'region': 'us-west'} + + for dropna in [True, False]: + with _mocked_session(cli, 'GET', 200, data): + result = cli.query(iql, bind_params=bind_params, dropna=dropna) + expected = \ + expected_dropna_true if dropna else expected_dropna_false + for r, e in zip(result, expected): + for k in e: + assert_frame_equal(e[k], r[k]) + + # test default value (dropna = True) + with _mocked_session(cli, 'GET', 200, data): + result = cli.query(iql, bind_params=bind_params) + for r, e in zip(result, expected_dropna_true): + for k in e: + assert_frame_equal(e[k], r[k]) + def test_query_with_empty_result(self): """Test query with empty results in TestDataFrameClient object.""" cli = DataFrameClient('host', 8086, 'username', 'password', 'db') From d85ecee3aa132980e11005a3ba8a248ccac6965b Mon Sep 17 00:00:00 2001 From: testforvln <1694611+testforvln@users.noreply.github.com> Date: Fri, 10 Apr 2020 22:21:24 +0800 Subject: [PATCH 488/536] fix bug in _convert_dataframe_to_json function that need not transform index if data type of index is already DatatimeIndex (#623) --- influxdb/_dataframe_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index d7e67baa..600bc1ec 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -252,7 +252,8 @@ def _convert_dataframe_to_json(dataframe, field_columns = list( set(dataframe.columns).difference(set(tag_columns))) - dataframe.index = pd.to_datetime(dataframe.index) + if not isinstance(dataframe.index, pd.DatetimeIndex): + dataframe.index = pd.to_datetime(dataframe.index) if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') From ed975b4b5ab754d09c804bd69a26c6bddc160fa1 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 09:22:36 -0500 Subject: [PATCH 489/536] chore(CHANGELOG): update with more recent PR merges --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e7f40dc7..11f47e38 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,7 +27,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Fix nanosecond time resolution for points (#407 thx @AndreCAndersen && @clslgrnc) - Fix import of distutils.spawn (#805 thx @Hawk777) - Update repr of float values including properly handling of boolean (#488 thx @ghost) -- Update dataframe_client to fix faulty empty tags (#770 thx @michelfripiat) +- Update DataFrameClient to fix faulty empty tags (#770 thx @michelfripiat) +- Update DataFrameClient to properly return `dropna` values (#778 thx @jgspiro) +- Update DataFrameClient to test for pd.DataTimeIndex before blind conversion (#623 thx @testforvin) ### Removed From 5929fef5df91f49df7981688df832958d9fa7e89 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 09:41:52 -0500 Subject: [PATCH 490/536] chore(dataframe_client_test): fix incorrect package naming convention --- influxdb/tests/dataframe_client_test.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 4e172ea7..a80498f3 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -979,11 +979,11 @@ def test_multiquery_into_dataframe_dropna(self): "columns": ["time", "value", "value2", "value3"], "values": [ ["2015-01-29T21:55:43.702900257Z", - 0.55, 0.254, numpy.NaN], + 0.55, 0.254, np.NaN], ["2015-01-29T21:55:43.702900257Z", - 23422, 122878, numpy.NaN], + 23422, 122878, np.NaN], ["2015-06-11T20:46:02Z", - 0.64, 0.5434, numpy.NaN] + 0.64, 0.5434, np.NaN] ] } ] @@ -1002,9 +1002,9 @@ def test_multiquery_into_dataframe_dropna(self): } pd1 = pd.DataFrame( - [[0.55, 0.254, numpy.NaN], - [23422.0, 122878, numpy.NaN], - [0.64, 0.5434, numpy.NaN]], + [[0.55, 0.254, np.NaN], + [23422.0, 122878, np.NaN], + [0.64, 0.5434, np.NaN]], columns=['value', 'value2', 'value3'], index=pd.to_datetime([ "2015-01-29 21:55:43.702900257+0000", From aad8349db4638696fbbc3d253ca353b5f7a31759 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 10:50:15 -0500 Subject: [PATCH 491/536] chore(client): ensure UDP port is actually an int. Closes #651. (#815) * chore(client): ensure UDP port is actually an int. Closes #651. * chore(CHANGELOG): update to include PR#651 --- CHANGELOG.md | 1 + influxdb/client.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 11f47e38..31e36680 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Update DataFrameClient to fix faulty empty tags (#770 thx @michelfripiat) - Update DataFrameClient to properly return `dropna` values (#778 thx @jgspiro) - Update DataFrameClient to test for pd.DataTimeIndex before blind conversion (#623 thx @testforvin) +- Update client to type-set UDP port to int (#651 thx @yifeikong) ### Removed diff --git a/influxdb/client.py b/influxdb/client.py index 390d8e16..0f0350c8 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -107,7 +107,7 @@ def __init__(self, self._verify_ssl = verify_ssl self.__use_udp = use_udp - self.__udp_port = udp_port + self.__udp_port = int(udp_port) if not session: session = requests.Session() From d2bf36c5be1cafb48f11e095f2a269901085dcf7 Mon Sep 17 00:00:00 2001 From: Cesar Sanz Date: Fri, 10 Apr 2020 18:28:53 +0200 Subject: [PATCH 492/536] Specify Retention Policy in SeriesHelper (#723) * Allow specify a retention policy in SeriesHelper * Complete the annotation example with the retention policy * Fix formatting * Fix formatting again * Add helper write with retention policy * Add helper write without retention policy * Remove blank line after the docstring. Co-authored-by: Sebastian Borza --- influxdb/helper.py | 8 +++++- influxdb/tests/helper_test.py | 51 +++++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index fa79c079..f49f40ad 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -41,6 +41,8 @@ class Meta: # Only applicable if autocommit is True. autocommit = True # If True and no bulk_size, then will set bulk_size to 1. + retention_policy = 'your_retention_policy' + # Specify the retention policy for the data points time_precision = "h"|"m"|s"|"ms"|"u"|"ns" # Default is ns (nanoseconds) # Setting time precision while writing point @@ -83,6 +85,8 @@ def __new__(cls, *args, **kwargs): 'In {0}, time_precision is set, but invalid use any of {}.' .format(cls.__name__, ','.join(allowed_time_precisions))) + cls._retention_policy = getattr(_meta, 'retention_policy', None) + cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: raise AttributeError( @@ -154,9 +158,11 @@ def commit(cls, client=None): """ if not client: client = cls._client + rtn = client.write_points( cls._json_body_(), - time_precision=cls._time_precision) + time_precision=cls._time_precision, + retention_policy=cls._retention_policy) # will be None if not set and will default to ns cls._reset_() return rtn diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 6aa8f15a..16924936 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -376,3 +376,54 @@ class Meta: .format(WarnBulkSizeNoEffect)) self.assertIn('has no affect', str(w[-1].message), 'Warning message did not contain "has not affect".') + + def testSeriesWithRetentionPolicy(self): + """Test that the data is saved with the specified retention policy.""" + my_policy = 'my_policy' + + class RetentionPolicySeriesHelper(SeriesHelper): + + class Meta: + client = InfluxDBClient() + series_name = 'events.stats.{server_name}' + fields = ['some_stat', 'time'] + tags = ['server_name', 'other_tag'] + bulk_size = 2 + autocommit = True + retention_policy = my_policy + + fake_write_points = mock.MagicMock() + RetentionPolicySeriesHelper( + server_name='us.east-1', some_stat=159, other_tag='gg') + RetentionPolicySeriesHelper._client.write_points = fake_write_points + RetentionPolicySeriesHelper( + server_name='us.east-1', some_stat=158, other_tag='aa') + + kall = fake_write_points.call_args + args, kwargs = kall + self.assertTrue('retention_policy' in kwargs) + self.assertEqual(kwargs['retention_policy'], my_policy) + + def testSeriesWithoutRetentionPolicy(self): + """Test that the data is saved without any retention policy.""" + class NoRetentionPolicySeriesHelper(SeriesHelper): + + class Meta: + client = InfluxDBClient() + series_name = 'events.stats.{server_name}' + fields = ['some_stat', 'time'] + tags = ['server_name', 'other_tag'] + bulk_size = 2 + autocommit = True + + fake_write_points = mock.MagicMock() + NoRetentionPolicySeriesHelper( + server_name='us.east-1', some_stat=159, other_tag='gg') + NoRetentionPolicySeriesHelper._client.write_points = fake_write_points + NoRetentionPolicySeriesHelper( + server_name='us.east-1', some_stat=158, other_tag='aa') + + kall = fake_write_points.call_args + args, kwargs = kall + self.assertTrue('retention_policy' in kwargs) + self.assertEqual(kwargs['retention_policy'], None) From 72c18b899db03432bca37d08c3f9efeb0875d30d Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 11:29:58 -0500 Subject: [PATCH 493/536] chore(CHANGELOG): update for PR#723 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 31e36680..f2fad368 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add support for custom request session in InfluxDBClient (#360 thx @dschien) - Add support for handling np.nan and np.inf values in DataFrameClient (#436 thx @nmerket) - Add support for optional `time_precision` in the SeriesHelper (#502 && #719 thx @appunni-dishq && @klDen) +- Add ability to specify retention policy in SeriesHelper (#723 thx @csanz91) ### Changed - Clean up stale CI config (#755) From c858a46ae5fd74cfe9220a8356dd5825ee2a66dd Mon Sep 17 00:00:00 2001 From: Kevin Claytor Date: Fri, 10 Apr 2020 12:49:26 -0400 Subject: [PATCH 494/536] gzip compression for data (post and responses) (#732) * gzip compression working in my influx stack. Needs proper tests. * Also gzip data from server, slightly more straightforward data handling. * Adding in test cases. * Switching back to zlib with gzip headers. * flake8 compatibility * Move parameter into correct position. per review * Switching back to gzip for the headers. * Fixing python 2.7 compatability with gzip. * flake8 compatibility. * flake8 testing Co-authored-by: Kevin Claytor Co-authored-by: Sebastian Borza --- influxdb/client.py | 25 ++++++- influxdb/tests/client_test.py | 67 +++++++++++++++++++ influxdb/tests/server_tests/base.py | 47 +++++++++++++ .../server_tests/client_test_with_server.py | 24 +++++++ 4 files changed, 162 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 0f0350c8..3262c242 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -7,7 +7,9 @@ from __future__ import unicode_literals import datetime +import gzip import itertools +import io import json import random import socket @@ -70,10 +72,11 @@ class InfluxDBClient(object): as a single file containing the private key and the certificate, or as a tuple of both files’ paths, defaults to None :type cert: str + :param gzip: use gzip content encoding to compress requests + :type gzip: bool :param session: allow for the new client request to use an existing requests Session, defaults to None :type session: requests.Session - :raises ValueError: if cert is provided but ssl is disabled (set to False) """ @@ -93,6 +96,7 @@ def __init__(self, pool_size=10, path='', cert=None, + gzip=False, session=None, ): """Construct a new InfluxDBClient object.""" @@ -159,6 +163,8 @@ def __init__(self, 'Accept': 'application/x-msgpack' } + self._gzip = gzip + @property def _baseurl(self): return self.__baseurl @@ -278,6 +284,23 @@ def request(self, url, method='GET', params=None, data=None, if isinstance(data, (dict, list)): data = json.dumps(data) + if self._gzip: + # Receive and send compressed data + headers.update({ + 'Accept-Encoding': 'gzip', + 'Content-Encoding': 'gzip', + }) + if data is not None: + # For Py 2.7 compatability use Gzipfile + compressed = io.BytesIO() + with gzip.GzipFile( + compresslevel=9, + fileobj=compressed, + mode='w' + ) as f: + f.write(data) + data = compressed.getvalue() + # Try to send the request more than once by default (see #103) retry = True _try = 0 diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index fe7381ea..f8f3cb00 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -24,6 +24,8 @@ import unittest import warnings +import io +import gzip import json import mock import requests @@ -214,6 +216,71 @@ def test_write_points(self): m.last_request.body.decode('utf-8'), ) + def test_write_gzip(self): + """Test write in TestInfluxDBClient object.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write", + status_code=204 + ) + + cli = InfluxDBClient(database='db', gzip=True) + cli.write( + {"database": "mydb", + "retentionPolicy": "mypolicy", + "points": [{"measurement": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "time": "2009-11-10T23:00:00Z", + "fields": {"value": 0.64}}]} + ) + + compressed = io.BytesIO() + with gzip.GzipFile( + compresslevel=9, + fileobj=compressed, + mode='w' + ) as f: + f.write( + b"cpu_load_short,host=server01,region=us-west " + b"value=0.64 1257894000000000000\n" + ) + + self.assertEqual( + m.last_request.body, + compressed.getvalue(), + ) + + def test_write_points_gzip(self): + """Test write points for TestInfluxDBClient object.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write", + status_code=204 + ) + + cli = InfluxDBClient(database='db', gzip=True) + cli.write_points( + self.dummy_points, + ) + + compressed = io.BytesIO() + with gzip.GzipFile( + compresslevel=9, + fileobj=compressed, + mode='w' + ) as f: + f.write( + b'cpu_load_short,host=server01,region=us-west ' + b'value=0.64 1257894000123456000\n' + ) + self.assertEqual( + m.last_request.body, + compressed.getvalue(), + ) + def test_write_points_toplevel_attributes(self): """Test write points attrs for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py index fe722870..45a9ec80 100644 --- a/influxdb/tests/server_tests/base.py +++ b/influxdb/tests/server_tests/base.py @@ -36,6 +36,15 @@ def _setup_influxdb_server(inst): database='db') +def _setup_gzip_client(inst): + inst.cli = InfluxDBClient('localhost', + inst.influxd_inst.http_port, + 'root', + '', + database='db', + gzip=True) + + def _teardown_influxdb_server(inst): remove_tree = sys.exc_info() == (None, None, None) inst.influxd_inst.close(remove_tree=remove_tree) @@ -89,3 +98,41 @@ def tearDownClass(cls): def tearDown(self): """Deconstruct an instance of ManyTestCasesWithServerMixin.""" self.cli.drop_database('db') + + +class SingleTestCaseWithServerGzipMixin(object): + """Define the single testcase with server with gzip client mixin. + + Same as the SingleTestCaseWithServerGzipMixin but the InfluxDBClient has + gzip=True + """ + + @classmethod + def setUp(cls): + """Set up an instance of the SingleTestCaseWithServerGzipMixin.""" + _setup_influxdb_server(cls) + _setup_gzip_client(cls) + + @classmethod + def tearDown(cls): + """Tear down an instance of the SingleTestCaseWithServerMixin.""" + _teardown_influxdb_server(cls) + + +class ManyTestCasesWithServerGzipMixin(object): + """Define the many testcase with server with gzip client mixin. + + Same as the ManyTestCasesWithServerMixin but the InfluxDBClient has + gzip=True. + """ + + @classmethod + def setUpClass(cls): + """Set up an instance of the ManyTestCasesWithServerGzipMixin.""" + _setup_influxdb_server(cls) + _setup_gzip_client(cls) + + @classmethod + def tearDown(cls): + """Tear down an instance of the SingleTestCaseWithServerMixin.""" + _teardown_influxdb_server(cls) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 94f28b66..020014c3 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -26,6 +26,8 @@ from influxdb.tests import skip_if_pypy, using_pypy, skip_server_tests from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin +from influxdb.tests.server_tests.base import ManyTestCasesWithServerGzipMixin +from influxdb.tests.server_tests.base import SingleTestCaseWithServerGzipMixin # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) @@ -913,3 +915,25 @@ def test_write_points_udp(self): ], list(rsp['cpu_load_short']) ) + + +# Run the tests again, but with gzip enabled this time +@skip_server_tests +class GzipSimpleTests(SimpleTests, SingleTestCaseWithServerGzipMixin): + """Repeat the simple tests with InfluxDBClient where gzip=True.""" + + pass + + +@skip_server_tests +class GzipCommonTests(CommonTests, ManyTestCasesWithServerGzipMixin): + """Repeat the common tests with InfluxDBClient where gzip=True.""" + + pass + + +@skip_server_tests +class GzipUdpTests(UdpTests, ManyTestCasesWithServerGzipMixin): + """Repeat the UDP tests with InfluxDBClient where gzip=True.""" + + pass From d59011945435d747d8446cd2453b1f0e0850754f Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 11:50:38 -0500 Subject: [PATCH 495/536] chore(CHANGELOG): update to add PR#732 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f2fad368..3a706e7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add support for handling np.nan and np.inf values in DataFrameClient (#436 thx @nmerket) - Add support for optional `time_precision` in the SeriesHelper (#502 && #719 thx @appunni-dishq && @klDen) - Add ability to specify retention policy in SeriesHelper (#723 thx @csanz91) +- Add gzip compression for post and response data (#732 thx @KEClaytor) ### Changed - Clean up stale CI config (#755) From de44289e0167d690895e8e70eff37edeef36240f Mon Sep 17 00:00:00 2001 From: Jamie Hewland Date: Fri, 10 Apr 2020 18:58:31 +0200 Subject: [PATCH 496/536] Make batched writing support all iterables (#746) * Make batched writing support all iterables * Also test batching generator against real server * Fix PEP257 error * Import itertools functions directly --- influxdb/client.py | 15 ++++++++-- influxdb/tests/client_test.py | 30 +++++++++++++++++++ .../server_tests/client_test_with_server.py | 27 +++++++++++++++++ 3 files changed, 69 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 3262c242..46424bc2 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -15,11 +15,11 @@ import socket import struct import time +from itertools import chain, islice import msgpack import requests import requests.exceptions -from six.moves import xrange from six.moves.urllib.parse import urlparse from influxdb.line_protocol import make_lines, quote_ident, quote_literal @@ -597,8 +597,17 @@ def ping(self): @staticmethod def _batches(iterable, size): - for i in xrange(0, len(iterable), size): - yield iterable[i:i + size] + # Iterate over an iterable producing iterables of batches. Based on: + # http://code.activestate.com/recipes/303279-getting-items-in-batches/ + iterator = iter(iterable) + while True: + try: # Try get the first element in the iterator... + head = (next(iterator),) + except StopIteration: + return # ...so that we can stop if there isn't one + # Otherwise, lazily slice the rest of the batch + rest = islice(iterator, size - 1) + yield chain(head, rest) def _write_points(self, points, diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index f8f3cb00..a8f8e864 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -332,6 +332,36 @@ def test_write_points_batch(self): self.assertEqual(expected_last_body, m.last_request.body.decode('utf-8')) + def test_write_points_batch_generator(self): + """Test write points batch from a generator for TestInfluxDBClient.""" + dummy_points = [ + {"measurement": "cpu_usage", "tags": {"unit": "percent"}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + {"measurement": "network", "tags": {"direction": "in"}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + {"measurement": "network", "tags": {"direction": "out"}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + ] + dummy_points_generator = (point for point in dummy_points) + expected_last_body = ( + "network,direction=out,host=server01,region=us-west " + "value=12.0 1257894000000000000\n" + ) + + with requests_mock.Mocker() as m: + m.register_uri(requests_mock.POST, + "http://localhost:8086/write", + status_code=204) + cli = InfluxDBClient(database='db') + cli.write_points(points=dummy_points_generator, + database='db', + tags={"host": "server01", + "region": "us-west"}, + batch_size=2) + self.assertEqual(m.call_count, 2) + self.assertEqual(expected_last_body, + m.last_request.body.decode('utf-8')) + def test_write_points_udp(self): """Test write points UDP for TestInfluxDBClient object.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py index 020014c3..a0263243 100644 --- a/influxdb/tests/server_tests/client_test_with_server.py +++ b/influxdb/tests/server_tests/client_test_with_server.py @@ -452,6 +452,33 @@ def test_write_points_batch(self): self.assertIn(12, net_out['series'][0]['values'][0]) self.assertIn(12.34, cpu['series'][0]['values'][0]) + def test_write_points_batch_generator(self): + """Test writing points in a batch from a generator.""" + dummy_points = [ + {"measurement": "cpu_usage", "tags": {"unit": "percent"}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, + {"measurement": "network", "tags": {"direction": "in"}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, + {"measurement": "network", "tags": {"direction": "out"}, + "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} + ] + dummy_points_generator = (point for point in dummy_points) + self.cli.write_points(points=dummy_points_generator, + tags={"host": "server01", + "region": "us-west"}, + batch_size=2) + time.sleep(5) + net_in = self.cli.query("SELECT value FROM network " + "WHERE direction=$dir", + bind_params={'dir': 'in'} + ).raw + net_out = self.cli.query("SELECT value FROM network " + "WHERE direction='out'").raw + cpu = self.cli.query("SELECT value FROM cpu_usage").raw + self.assertIn(123, net_in['series'][0]['values'][0]) + self.assertIn(12, net_out['series'][0]['values'][0]) + self.assertIn(12.34, cpu['series'][0]['values'][0]) + def test_query(self): """Test querying data back from server.""" self.assertIs(True, self.cli.write_points(dummy_point)) From d6192a759c106da1317fc60d0630022bf7e829cf Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 11:59:25 -0500 Subject: [PATCH 497/536] chore(CHANGELOG): update to include PR#746 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a706e7b..5a49c84a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Update DataFrameClient to properly return `dropna` values (#778 thx @jgspiro) - Update DataFrameClient to test for pd.DataTimeIndex before blind conversion (#623 thx @testforvin) - Update client to type-set UDP port to int (#651 thx @yifeikong) +- Update batched writing support for all iterables (#746 thx @JayH5) ### Removed From c903d73efcf49b4e340490072d777d8f34ac8e1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20=22hr=22=20Berder=20=28=E7=99=BD=E5=B3=B0=29?= Date: Sat, 11 Apr 2020 01:10:40 +0800 Subject: [PATCH 498/536] Fix chunked query to return chunk resultsets (#753) When querying large data sets, it's vital to get a chunked responses to manage memory usage. Wrapping the query response in a generator and streaming the request provides the desired result. It also fixes `InfluxDBClient.query()` behavior for chunked queries that is currently not working according to [specs](https://github.com/influxdata/influxdb-python/blob/master/influxdb/client.py#L429) Closes #585. Closes #531. Closes #538. --- influxdb/client.py | 10 +++++--- influxdb/tests/client_test.py | 44 ++++++++++++++--------------------- 2 files changed, 25 insertions(+), 29 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 46424bc2..b28ed1b5 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -249,7 +249,7 @@ def switch_user(self, username, password): self._username = username self._password = password - def request(self, url, method='GET', params=None, data=None, + def request(self, url, method='GET', params=None, data=None, stream=False, expected_response_code=200, headers=None): """Make a HTTP request to the InfluxDB API. @@ -261,6 +261,8 @@ def request(self, url, method='GET', params=None, data=None, :type params: dict :param data: the data of the request, defaults to None :type data: str + :param stream: True if a query uses chunked responses + :type stream: bool :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int @@ -312,6 +314,7 @@ def request(self, url, method='GET', params=None, data=None, auth=(self._username, self._password), params=params, data=data, + stream=stream, headers=headers, proxies=self._proxies, verify=self._verify_ssl, @@ -398,17 +401,17 @@ def write(self, data, params=None, expected_response_code=204, @staticmethod def _read_chunked_response(response, raise_errors=True): - result_set = {} for line in response.iter_lines(): if isinstance(line, bytes): line = line.decode('utf-8') data = json.loads(line) + result_set = {} for result in data.get('results', []): for _key in result: if isinstance(result[_key], list): result_set.setdefault( _key, []).extend(result[_key]) - return ResultSet(result_set, raise_errors=raise_errors) + yield ResultSet(result_set, raise_errors=raise_errors) def query(self, query, @@ -499,6 +502,7 @@ def query(self, method=method, params=params, data=None, + stream=chunked, expected_response_code=expected_response_code ) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index a8f8e864..fd3c06bb 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1400,16 +1400,11 @@ def test_invalid_port_fails(self): def test_chunked_response(self): """Test chunked reponse for TestInfluxDBClient object.""" example_response = \ - u'{"results":[{"statement_id":0,"series":' \ - '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \ - '[["value","integer"]]}],"partial":true}]}\n{"results":' \ - '[{"statement_id":0,"series":[{"name":"iops","columns":' \ - '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \ - '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \ - '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \ - '[["value","integer"]]}],"partial":true}]}\n{"results":' \ - '[{"statement_id":0,"series":[{"name":"memory","columns":' \ - '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n' + u'{"results":[{"statement_id":0,"series":[{"columns":["key"],' \ + '"values":[["cpu"],["memory"],["iops"],["network"]],"partial":' \ + 'true}],"partial":true}]}\n{"results":[{"statement_id":0,' \ + '"series":[{"columns":["key"],"values":[["qps"],["uptime"],' \ + '["df"],["mount"]]}]}]}\n' with requests_mock.Mocker() as m: m.register_uri( @@ -1417,23 +1412,20 @@ def test_chunked_response(self): "http://localhost:8086/query", text=example_response ) - response = self.cli.query('show series limit 4 offset 0', + response = self.cli.query('show series', chunked=True, chunk_size=4) - self.assertTrue(len(response) == 4) - self.assertEqual(response.__repr__(), ResultSet( - {'series': [{'values': [['value', 'integer']], - 'name': 'cpu', - 'columns': ['fieldKey', 'fieldType']}, - {'values': [['value', 'integer']], - 'name': 'iops', - 'columns': ['fieldKey', 'fieldType']}, - {'values': [['value', 'integer']], - 'name': 'load', - 'columns': ['fieldKey', 'fieldType']}, - {'values': [['value', 'integer']], - 'name': 'memory', - 'columns': ['fieldKey', 'fieldType']}]} - ).__repr__()) + res = list(response) + self.assertTrue(len(res) == 2) + self.assertEqual(res[0].__repr__(), ResultSet( + {'series': [{ + 'columns': ['key'], + 'values': [['cpu'], ['memory'], ['iops'], ['network']] + }]}).__repr__()) + self.assertEqual(res[1].__repr__(), ResultSet( + {'series': [{ + 'columns': ['key'], + 'values': [['qps'], ['uptime'], ['df'], ['mount']] + }]}).__repr__()) class FakeClient(InfluxDBClient): From e884631148dda96f586ab9da358deb7119bb57d2 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 12:12:03 -0500 Subject: [PATCH 499/536] chore(CHANGELOG): update to include PR#753 and #538 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5a49c84a..00d2a4f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add support for optional `time_precision` in the SeriesHelper (#502 && #719 thx @appunni-dishq && @klDen) - Add ability to specify retention policy in SeriesHelper (#723 thx @csanz91) - Add gzip compression for post and response data (#732 thx @KEClaytor) +- Add support for chunked responses in ResultSet (#753 and #538 thx @hrbonz && @psy0rz) ### Changed - Clean up stale CI config (#755) From 7d82f9371176699f8d6f9c4d0b0133ed5bd20275 Mon Sep 17 00:00:00 2001 From: Greg Schrock Date: Fri, 10 Apr 2020 13:29:54 -0400 Subject: [PATCH 500/536] Fix make_lines excludes fields with empty strings (#655) (#766) * Fix make_lines excludes fields with empty strings (#655) Converting to unicode required something to be done with None values. They were converted to empty strings which were subsequently ignored. This makes it impossible to write an explicitly empty string, which should be possible. This change distinguishes between None and empty strings. * Fix linting failure due to long comment line Co-authored-by: Greg Schrock --- influxdb/line_protocol.py | 6 ++++-- influxdb/tests/test_line_protocol.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py index d6cbf46f..25dd2ad7 100644 --- a/influxdb/line_protocol.py +++ b/influxdb/line_protocol.py @@ -104,9 +104,11 @@ def _is_float(value): def _escape_value(value): - value = _get_unicode(value) + if value is None: + return '' - if isinstance(value, text_type) and value != '': + value = _get_unicode(value) + if isinstance(value, text_type): return quote_ident(value) if isinstance(value, integer_types) and not isinstance(value, bool): diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py index c48d5edc..5b344990 100644 --- a/influxdb/tests/test_line_protocol.py +++ b/influxdb/tests/test_line_protocol.py @@ -117,6 +117,24 @@ def test_make_lines_unicode(self): 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n' ) + def test_make_lines_empty_field_string(self): + """Test make lines with an empty string field.""" + data = { + "points": [ + { + "measurement": "test", + "fields": { + "string": "", + } + } + ] + } + + self.assertEqual( + line_protocol.make_lines(data), + 'test string=""\n' + ) + def test_tag_value_newline(self): """Test make lines with tag value contains newline.""" data = { From 6290994590c4164535d59879e50482b44bb4d7ff Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 12:30:50 -0500 Subject: [PATCH 501/536] chore(CHANGELOG): update for PR #766 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 00d2a4f7..4ee39303 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add ability to specify retention policy in SeriesHelper (#723 thx @csanz91) - Add gzip compression for post and response data (#732 thx @KEClaytor) - Add support for chunked responses in ResultSet (#753 and #538 thx @hrbonz && @psy0rz) +- Add support for empty string fields (#766 thx @gregschrock) ### Changed - Clean up stale CI config (#755) From 6f73ea12d7d9ca0184ba3c487264e58146ce2bac Mon Sep 17 00:00:00 2001 From: Keunhyun Oh Date: Sat, 11 Apr 2020 02:33:14 +0900 Subject: [PATCH 502/536] fix: Calling commit and _json_body make raising an exception when any datapoints are not added. (#772) * If any SerialHelper is not generated, calling the commit function makes raising an exception because _datapoints is not allocated. It is hard to find the reason of this error. it is because reviewing influxdb-python's code is needed. I think that it is important that is producing predictable results. Results when calling first time is needed to equal to calling resetting datapoints in json_body. So, I've fixed that if not initialized when calling _json_body() function, _datapoints is reset to avoid raising error. In Unittest, the setup function is added. When calling setup function firstly, __initialized__ is False and _datapoints is not assigned. But, because of this commit, it is OK. Contacts: Keunhyun Oh * fix build fail Contacts: Keunhyun Oh * fix build fail Contacts: Keunhyun Oh * fix build fail Contacts: Keunhyun Oh * Update helper_test.py * Update helper_test.py * Update helper_test.py --- influxdb/helper.py | 2 ++ influxdb/influxdb08/helper.py | 2 ++ influxdb/tests/helper_test.py | 8 ++++++++ 3 files changed, 12 insertions(+) diff --git a/influxdb/helper.py b/influxdb/helper.py index f49f40ad..74209354 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -174,6 +174,8 @@ def _json_body_(cls): :return: JSON body of these datapoints. """ json = [] + if not cls.__initialized__: + cls._reset_() for series_name, data in six.iteritems(cls._datapoints): for point in data: json_point = { diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py index f3dec33c..5f2d4614 100644 --- a/influxdb/influxdb08/helper.py +++ b/influxdb/influxdb08/helper.py @@ -139,6 +139,8 @@ def _json_body_(cls): :return: JSON body of the datapoints. """ json = [] + if not cls.__initialized__: + cls._reset_() for series_name, data in six.iteritems(cls._datapoints): json.append({'name': series_name, 'columns': cls._fields, diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py index 16924936..6737f921 100644 --- a/influxdb/tests/helper_test.py +++ b/influxdb/tests/helper_test.py @@ -47,6 +47,14 @@ class Meta: TestSeriesHelper.MySeriesHelper = MySeriesHelper + def setUp(self): + """Check that MySeriesHelper has empty datapoints.""" + super(TestSeriesHelper, self).setUp() + self.assertEqual( + TestSeriesHelper.MySeriesHelper._json_body_(), + [], + 'Resetting helper in teardown did not empty datapoints.') + def tearDown(self): """Deconstruct the TestSeriesHelper object.""" super(TestSeriesHelper, self).tearDown() From 351b98a880d7a62188e00a240df4edaa2d75f499 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 12:34:05 -0500 Subject: [PATCH 503/536] chore(CHANGELOG): add PR#772 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4ee39303..322c952b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,6 +36,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Update DataFrameClient to test for pd.DataTimeIndex before blind conversion (#623 thx @testforvin) - Update client to type-set UDP port to int (#651 thx @yifeikong) - Update batched writing support for all iterables (#746 thx @JayH5) +- Update SeriesHelper to enable class instantiation when not initialized (#772 thx @ocworld) ### Removed From f7f30b5ed3d7bc9ded164288af8b1a7ca1a3a6b0 Mon Sep 17 00:00:00 2001 From: Shan Desai Date: Fri, 10 Apr 2020 19:59:04 +0200 Subject: [PATCH 504/536] Refactor `tutorial_udp` example for timestamps (#808) - `time` key should be within each datapoint - This PR addresses #788 regarding the structure of the data when inserting via UDP. - The original documentation contributed by me took the structure of the `tutorial.py` as base. However, upon testing, the timestamp in the example are not written (2020 is written as opposed to 2009). - Tested for `influxdb-python` v5.2.3 and InfluxDB v1.6.1 --- examples/tutorial_udp.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/examples/tutorial_udp.py b/examples/tutorial_udp.py index 517ae858..93b923d7 100644 --- a/examples/tutorial_udp.py +++ b/examples/tutorial_udp.py @@ -29,18 +29,19 @@ def main(uport): "host": "server01", "region": "us-west" }, - "time": "2009-11-10T23:00:00Z", "points": [{ "measurement": "cpu_load_short", "fields": { "value": 0.64 - } + }, + "time": "2009-11-10T23:00:00Z", }, - { - "measurement": "cpu_load_short", - "fields": { - "value": 0.67 - } + { + "measurement": "cpu_load_short", + "fields": { + "value": 0.67 + }, + "time": "2009-11-10T23:05:00Z" }] } From f8705f9f474e260b0455fc44d1b1f74c64d22162 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 14:27:14 -0500 Subject: [PATCH 505/536] feat(client): add support for context managers (#816) * feat(client): add support for context managers * chore(CHANGELOG): rebase against master --- CHANGELOG.md | 2 ++ influxdb/client.py | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 322c952b..411b46a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Add gzip compression for post and response data (#732 thx @KEClaytor) - Add support for chunked responses in ResultSet (#753 and #538 thx @hrbonz && @psy0rz) - Add support for empty string fields (#766 thx @gregschrock) +- Add support for context managers to InfluxDBClient (#721 thx @JustusAdam) ### Changed - Clean up stale CI config (#755) @@ -37,6 +38,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Update client to type-set UDP port to int (#651 thx @yifeikong) - Update batched writing support for all iterables (#746 thx @JayH5) - Update SeriesHelper to enable class instantiation when not initialized (#772 thx @ocworld) +- Update UDP test case to add proper timestamp to datapoints (#808 thx @shantanoo-desai) ### Removed diff --git a/influxdb/client.py b/influxdb/client.py index b28ed1b5..a0f571f5 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -35,6 +35,9 @@ class InfluxDBClient(object): connect to InfluxDB. Requests can be made to InfluxDB directly through the client. + The client supports the use as a `context manager + `_. + :param host: hostname to connect to InfluxDB, defaults to 'localhost' :type host: str :param port: port to connect to InfluxDB, defaults to 8086 @@ -78,6 +81,7 @@ class InfluxDBClient(object): requests Session, defaults to None :type session: requests.Session :raises ValueError: if cert is provided but ssl is disabled (set to False) + """ def __init__(self, @@ -165,6 +169,14 @@ def __init__(self, self._gzip = gzip + def __enter__(self): + """Enter function as used by context manager.""" + pass + + def __exit__(self, _exc_type, _exc_value, _traceback): + """Exit function as used by context manager.""" + self.close() + @property def _baseurl(self): return self.__baseurl From cf83d1d429ecebb093f6eaab773bd41c251530ca Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Fri, 10 Apr 2020 14:38:30 -0500 Subject: [PATCH 506/536] chore(CHANGELOG): tagging release to 5.3.0 --- LICENSE | 2 +- influxdb/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE b/LICENSE index 38ee2491..a49a5410 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2013 InfluxDB +Copyright (c) 2020 InfluxDB Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/influxdb/__init__.py b/influxdb/__init__.py index b31170bb..56f2f619 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.2.3' +__version__ = '5.3.0' From e7ef0454524bf0617097a2f303d784242e5ddfe8 Mon Sep 17 00:00:00 2001 From: Krzysztof Baranski Date: Sun, 12 Apr 2020 01:45:32 +0200 Subject: [PATCH 507/536] do not sleep after last retry before raising exception (#790) * do not sleep after last retry before raising exception * documentation: clarification of retry parameter retry=0 - retry forever retry=1 - try once, on error don't do any retry retry=2 - 2 tries, one original and one retry on error retry=3 - 3 tries, one original and maximum two retries on errors * retries - move raise before sleep * retries - documentation * fix line length --- influxdb/client.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index a0f571f5..80994190 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -59,8 +59,12 @@ class InfluxDBClient(object): :param timeout: number of seconds Requests will wait for your client to establish a connection, defaults to None :type timeout: int - :param retries: number of retries your client will try before aborting, - defaults to 3. 0 indicates try until success + :param retries: number of attempts your client will make before aborting, + defaults to 3 + 0 - try until success + 1 - attempt only once (without retry) + 2 - maximum two attempts (including one retry) + 3 - maximum three attempts (default option) :type retries: int :param use_udp: use UDP to connect to InfluxDB, defaults to False :type use_udp: bool @@ -339,10 +343,10 @@ def request(self, url, method='GET', params=None, data=None, stream=False, _try += 1 if self._retries != 0: retry = _try < self._retries - if method == "POST": - time.sleep((2 ** _try) * random.random() / 100.0) if not retry: raise + if method == "POST": + time.sleep((2 ** _try) * random.random() / 100.0) type_header = response.headers and response.headers.get("Content-Type") if type_header == "application/x-msgpack" and response.content: From b4390cc8680abd0931b14b8f2ee1e83998a53eb2 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Sat, 11 Apr 2020 18:48:26 -0500 Subject: [PATCH 508/536] chore(CHANGELOG): add PR#790 --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 411b46a4..c2f3edcc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,11 +4,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## [Unreleased] +## [v5.3.1] - Unreleased ### Added ### Changed +- Amend retry to avoid sleep after last retry before raising exception (#790 thx @krzysbaranski) + +### Removed ## [v5.3.0] - 2020-04-10 From cc41e290f690c4eb67f75c98fa9f027bdb6eb16b Mon Sep 17 00:00:00 2001 From: Matthew Thode Date: Sat, 11 Apr 2020 18:49:33 -0500 Subject: [PATCH 509/536] remove msgpack pin (#818) The hard lock prevents this from being co-installed with many other packages. For instance, it's preventing it from being included in openstack (which is on 0.6.2 and working on 1.0.0 now). Signed-off-by: Matthew Thode --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 77d7306f..548b17c8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,4 @@ python-dateutil>=2.6.0 pytz requests>=2.17.0 six>=1.10.0 -msgpack==0.6.1 +msgpack From cdc2c665808f2cf572973029f81b9a0175b43277 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Sat, 11 Apr 2020 18:53:49 -0500 Subject: [PATCH 510/536] chore(CHANGELOG): add PR#818 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c2f3edcc..d380b73e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed - Amend retry to avoid sleep after last retry before raising exception (#790 thx @krzysbaranski) +- Remove msgpack pinning for requirements (#818 thx @prometheanfire) ### Removed From 5bcfadf7a525232078b875b2fb94d831287d6c8e Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Sun, 12 Apr 2020 22:14:38 -0500 Subject: [PATCH 511/536] chore(dataframe_client): update param definition. Closes #525. (#819) --- influxdb/_dataframe_client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 600bc1ec..ec58cebb 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -59,6 +59,8 @@ def write_points(self, :param dataframe: data points in a DataFrame :param measurement: name of measurement :param tags: dictionary of tags, with string key-values + :param tag_columns: [Optional, default None] List of data tag names + :param field_columns: [Options, default None] List of data field names :param time_precision: [Optional, default None] Either 's', 'ms', 'u' or 'n'. :param batch_size: [Optional] Value to write the points in batches From 7fb5e946062dd36a84801e4a03012a3c032a70db Mon Sep 17 00:00:00 2001 From: Adam Suban-Loewen Date: Mon, 13 Apr 2020 22:05:49 -0400 Subject: [PATCH 512/536] Added headers parameter to InfluxDBClient (#710) --- influxdb/client.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 80994190..404e14be 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -84,8 +84,10 @@ class InfluxDBClient(object): :param session: allow for the new client request to use an existing requests Session, defaults to None :type session: requests.Session + :param headers: headers to add to Requests, will add 'Content-Type' + and 'Accept' unless these are already present, defaults to {} + :type headers: dict :raises ValueError: if cert is provided but ssl is disabled (set to False) - """ def __init__(self, @@ -106,6 +108,7 @@ def __init__(self, cert=None, gzip=False, session=None, + headers=None, ): """Construct a new InfluxDBClient object.""" self.__host = host @@ -166,10 +169,11 @@ def __init__(self, self._port, self._path) - self._headers = { - 'Content-Type': 'application/json', - 'Accept': 'application/x-msgpack' - } + if headers is None: + headers = {} + headers.setdefault('Content-Type', 'application/json') + headers.setdefault('Accept', 'application/x-msgpack') + self._headers = headers self._gzip = gzip @@ -390,7 +394,7 @@ def write(self, data, params=None, expected_response_code=204, :returns: True, if the write operation is successful :rtype: bool """ - headers = self._headers + headers = self._headers.copy() headers['Content-Type'] = 'application/octet-stream' if params: From 95e0efb0821d44bf06aebe0b2c4700e4d3b084c8 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Mon, 13 Apr 2020 21:09:22 -0500 Subject: [PATCH 513/536] chore(CHANGELOG): add PR #710 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d380b73e..92bbe42e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [v5.3.1] - Unreleased ### Added +- Add support for custom headers in the InfluxDBClient (#710 thx @nathanielatom) ### Changed - Amend retry to avoid sleep after last retry before raising exception (#790 thx @krzysbaranski) From de75e7351ce3fc084c145d16df38f96d18603bf5 Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Thu, 28 May 2020 10:41:50 +0200 Subject: [PATCH 514/536] Add support for custom indexes for query in the DataFrameClient (#785) --- CHANGELOG.md | 1 + influxdb/_dataframe_client.py | 22 +++++++++++------ influxdb/tests/dataframe_client_test.py | 33 +++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 92bbe42e..2b374faa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - Add support for custom headers in the InfluxDBClient (#710 thx @nathanielatom) +- Add support for custom indexes for query in the DataFrameClient (#785) ### Changed - Amend retry to avoid sleep after last retry before raising exception (#790 thx @krzysbaranski) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index ec58cebb..58063500 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -8,6 +8,7 @@ import math from collections import defaultdict +from typing import List import pandas as pd import numpy as np @@ -152,7 +153,8 @@ def query(self, chunked=False, chunk_size=0, method="GET", - dropna=True): + dropna=True, + data_frame_index: List[str] = None): """ Query data into a DataFrame. @@ -181,6 +183,7 @@ def query(self, containing all results within that chunk :param chunk_size: Size of each chunk to tell InfluxDB to use. :param dropna: drop columns where all values are missing + :param data_frame_index: the list of columns that are used as DataFrame index :returns: the queried data :rtype: :class:`~.ResultSet` """ @@ -196,13 +199,13 @@ def query(self, results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): if len(results) > 0: - return self._to_dataframe(results, dropna) + return self._to_dataframe(results, dropna, data_frame_index=data_frame_index) else: return {} else: return results - def _to_dataframe(self, rs, dropna=True): + def _to_dataframe(self, rs, dropna=True, data_frame_index: List[str] = None): result = defaultdict(list) if isinstance(rs, list): return map(self._to_dataframe, rs, @@ -216,10 +219,15 @@ def _to_dataframe(self, rs, dropna=True): key = (name, tuple(sorted(tags.items()))) df = pd.DataFrame(data) df.time = pd.to_datetime(df.time) - df.set_index('time', inplace=True) - if df.index.tzinfo is None: - df.index = df.index.tz_localize('UTC') - df.index.name = None + + if data_frame_index: + df.set_index(data_frame_index, inplace=True) + else: + df.set_index('time', inplace=True) + if df.index.tzinfo is None: + df.index = df.index.tz_localize('UTC') + df.index.name = 'time' + result[key].append(df) for key, data in result.items(): df = pd.concat(data).sort_index() diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index a80498f3..cf82b49c 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -1240,3 +1240,36 @@ def test_write_points_from_dataframe_with_tags_and_nan_json(self): cli.write_points(dataframe, 'foo', tags=None, protocol='json', tag_columns=['tag_one', 'tag_two']) self.assertEqual(m.last_request.body, expected) + + def test_query_custom_index(self): + data = { + "results": [ + { + "series": [ + { + "name": "cpu_load_short", + "columns": ["time", "value", "host"], + "values": [ + [1, 0.55, "local"], + [2, 23422, "local"], + [3, 0.64, "local"] + ] + } + ] + } + ] + } + + cli = DataFrameClient('host', 8086, 'username', 'password', 'db') + iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \ + "SELECT count(value) FROM cpu_load_short WHERE region=$region" + bind_params = {'region': 'us-west'} + with _mocked_session(cli, 'GET', 200, data): + result = cli.query(iql, bind_params=bind_params, data_frame_index=["time", "host"]) + + _data_frame = result['cpu_load_short'] + print(_data_frame) + + self.assertListEqual(["time", "host"], list(_data_frame.index.names)) + + From d3fd851c8e99350524fad710c753a0d978a5f978 Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Mon, 1 Jun 2020 08:26:39 +0200 Subject: [PATCH 515/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/_dataframe_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 58063500..0b9f282d 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -226,7 +226,7 @@ def _to_dataframe(self, rs, dropna=True, data_frame_index: List[str] = None): df.set_index('time', inplace=True) if df.index.tzinfo is None: df.index = df.index.tz_localize('UTC') - df.index.name = 'time' + df.index.name = None result[key].append(df) for key, data in result.items(): From 9a110a1abda2677340bc7fbd00890d27b8d3d5ab Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Mon, 1 Jun 2020 09:57:12 +0200 Subject: [PATCH 516/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/_dataframe_client.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index 0b9f282d..afd75b39 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -8,7 +8,6 @@ import math from collections import defaultdict -from typing import List import pandas as pd import numpy as np @@ -154,7 +153,7 @@ def query(self, chunk_size=0, method="GET", dropna=True, - data_frame_index: List[str] = None): + data_frame_index=None): """ Query data into a DataFrame. @@ -205,7 +204,7 @@ def query(self, else: return results - def _to_dataframe(self, rs, dropna=True, data_frame_index: List[str] = None): + def _to_dataframe(self, rs, dropna=True, data_frame_index=None): result = defaultdict(list) if isinstance(rs, list): return map(self._to_dataframe, rs, From 055d71fb83aeb83603bd9b5423d6b05d1f01c59e Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Mon, 1 Jun 2020 10:37:56 +0200 Subject: [PATCH 517/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/tests/dataframe_client_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index cf82b49c..2dd98398 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -1242,6 +1242,7 @@ def test_write_points_from_dataframe_with_tags_and_nan_json(self): self.assertEqual(m.last_request.body, expected) def test_query_custom_index(self): + """Test query with custom indexes.""" data = { "results": [ { From ddd82612f57da90476cd0d10539d7edf1ca25d49 Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Mon, 1 Jun 2020 10:40:39 +0200 Subject: [PATCH 518/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/tests/dataframe_client_test.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index 2dd98398..f6db3c22 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -1266,11 +1266,11 @@ def test_query_custom_index(self): "SELECT count(value) FROM cpu_load_short WHERE region=$region" bind_params = {'region': 'us-west'} with _mocked_session(cli, 'GET', 200, data): - result = cli.query(iql, bind_params=bind_params, data_frame_index=["time", "host"]) + result = cli.query(iql, bind_params=bind_params, + data_frame_index=["time", "host"]) _data_frame = result['cpu_load_short'] print(_data_frame) - self.assertListEqual(["time", "host"], list(_data_frame.index.names)) - - + self.assertListEqual(["time", "host"], + list(_data_frame.index.names)) From 64aeddd82358f68b1e5b662d80bfaee6485d0de7 Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Mon, 1 Jun 2020 11:53:15 +0200 Subject: [PATCH 519/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/_dataframe_client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index afd75b39..e7ae9c17 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -182,7 +182,8 @@ def query(self, containing all results within that chunk :param chunk_size: Size of each chunk to tell InfluxDB to use. :param dropna: drop columns where all values are missing - :param data_frame_index: the list of columns that are used as DataFrame index + :param data_frame_index: the list of columns that + are used as DataFrame index :returns: the queried data :rtype: :class:`~.ResultSet` """ @@ -198,7 +199,8 @@ def query(self, results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): if len(results) > 0: - return self._to_dataframe(results, dropna, data_frame_index=data_frame_index) + return self._to_dataframe(results, dropna, + data_frame_index=data_frame_index) else: return {} else: From 6c45f30d6a7142c17cca0ca7736eb1b693f212e8 Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Mon, 1 Jun 2020 13:53:30 +0200 Subject: [PATCH 520/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/influxdb08/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py index 965a91db..40c58145 100644 --- a/influxdb/influxdb08/client.py +++ b/influxdb/influxdb08/client.py @@ -292,10 +292,10 @@ def write_points(self, data, time_precision='s', *args, **kwargs): :type batch_size: int """ - def list_chunks(l, n): + def list_chunks(data_list, n): """Yield successive n-sized chunks from l.""" - for i in xrange(0, len(l), n): - yield l[i:i + n] + for i in xrange(0, len(data_list), n): + yield data_list[i:i + n] batch_size = kwargs.get('batch_size') if batch_size and batch_size > 0: From 49165bedaab5fad158f742fdf16020b44c92decf Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Tue, 2 Jun 2020 14:42:29 +0200 Subject: [PATCH 521/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/client.py | 4 ++-- influxdb/helper.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 404e14be..df9ef966 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -860,7 +860,7 @@ def alter_retention_policy(self, name, database=None, query_string = ( "ALTER RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), - quote_ident(database or self._database), shard_duration) + quote_ident(database or self._database)) if duration: query_string += " DURATION {0}".format(duration) if shard_duration: @@ -958,7 +958,7 @@ def drop_user(self, username): :param username: the username to drop :type username: str """ - text = "DROP USER {0}".format(quote_ident(username), method="POST") + text = "DROP USER {0}".format(quote_ident(username)) self.query(text, method="POST") def set_user_password(self, username, password): diff --git a/influxdb/helper.py b/influxdb/helper.py index 74209354..fbf6b65d 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -82,7 +82,7 @@ def __new__(cls, *args, **kwargs): allowed_time_precisions = ['h', 'm', 's', 'ms', 'u', 'ns', None] if cls._time_precision not in allowed_time_precisions: raise AttributeError( - 'In {0}, time_precision is set, but invalid use any of {}.' + 'In {0}, time_precision is set, but invalid use any of {1}.' .format(cls.__name__, ','.join(allowed_time_precisions))) cls._retention_policy = getattr(_meta, 'retention_policy', None) From cb3156c1fa9181f142f09e93afccf10355b4e5a1 Mon Sep 17 00:00:00 2001 From: Pavlina Rolincova Date: Tue, 2 Jun 2020 15:11:33 +0200 Subject: [PATCH 522/536] Add support for custom indexes for query in the DataFrameClient (#785) --- influxdb/helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/helper.py b/influxdb/helper.py index fbf6b65d..138cf6e8 100644 --- a/influxdb/helper.py +++ b/influxdb/helper.py @@ -82,7 +82,7 @@ def __new__(cls, *args, **kwargs): allowed_time_precisions = ['h', 'm', 's', 'ms', 'u', 'ns', None] if cls._time_precision not in allowed_time_precisions: raise AttributeError( - 'In {0}, time_precision is set, but invalid use any of {1}.' + 'In {}, time_precision is set, but invalid use any of {}.' .format(cls.__name__, ','.join(allowed_time_precisions))) cls._retention_policy = getattr(_meta, 'retention_policy', None) From de5878ae83cbae20ee0a3ec3eba4d0403868c939 Mon Sep 17 00:00:00 2001 From: Yevgen Antymyrov Date: Mon, 15 Jun 2020 16:40:44 +0200 Subject: [PATCH 523/536] Fix #828 "Context manager for InfluxDBClient not working correctly?" by returning a clientt instance from __enter__ --- influxdb/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index df9ef966..5eea9d1f 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -179,7 +179,7 @@ def __init__(self, def __enter__(self): """Enter function as used by context manager.""" - pass + return self def __exit__(self, _exc_type, _exc_value, _traceback): """Exit function as used by context manager.""" From c49c79adfd8700982d48c01c81e6e2dea539825d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Bedn=C3=A1=C5=99?= Date: Thu, 8 Oct 2020 14:07:58 +0200 Subject: [PATCH 524/536] feat: add support for custom authorization token --- docs/source/examples.rst | 6 +++ examples/tutorial_authorization.py | 32 ++++++++++++++ influxdb/client.py | 3 +- influxdb/tests/client_test.py | 71 ++++++++++++++++++++++++++++++ tox.ini | 2 +- 5 files changed, 112 insertions(+), 2 deletions(-) create mode 100644 examples/tutorial_authorization.py diff --git a/docs/source/examples.rst b/docs/source/examples.rst index fdda62a9..b4ada447 100644 --- a/docs/source/examples.rst +++ b/docs/source/examples.rst @@ -31,3 +31,9 @@ Tutorials - UDP .. literalinclude:: ../../examples/tutorial_udp.py :language: python + +Tutorials - Authorization by Token +=============== + +.. literalinclude:: ../../examples/tutorial_authorization.py + :language: python diff --git a/examples/tutorial_authorization.py b/examples/tutorial_authorization.py new file mode 100644 index 00000000..9d9a800f --- /dev/null +++ b/examples/tutorial_authorization.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +"""Tutorial how to authorize InfluxDB client by custom Authorization token.""" + +import argparse +from influxdb import InfluxDBClient + + +def main(token='my-token'): + """Instantiate a connection to the InfluxDB.""" + client = InfluxDBClient(username=None, password=None, + headers={"Authorization": token}) + + print("Use authorization token: " + token) + + version = client.ping() + print("Successfully connected to InfluxDB: " + version) + pass + + +def parse_args(): + """Parse the args from main.""" + parser = argparse.ArgumentParser( + description='example code to play with InfluxDB') + parser.add_argument('--token', type=str, required=False, + default='my-token', + help='Authorization token for the proxy that is ahead the InfluxDB.') + return parser.parse_args() + + +if __name__ == '__main__': + args = parse_args() + main(token=args.token) diff --git a/influxdb/client.py b/influxdb/client.py index df9ef966..51a64ac3 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -328,10 +328,11 @@ def request(self, url, method='GET', params=None, data=None, stream=False, _try = 0 while retry: try: + auth = (self._username, self._password) response = self._session.request( method=method, url=url, - auth=(self._username, self._password), + auth=auth if None not in auth else None, params=params, data=data, stream=stream, diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index fd3c06bb..e511ca9b 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1427,6 +1427,77 @@ def test_chunked_response(self): 'values': [['qps'], ['uptime'], ['df'], ['mount']] }]}).__repr__()) + def test_auth_default(self): + """Test auth with default settings.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/ping", + status_code=204, + headers={'X-Influxdb-Version': '1.2.3'} + ) + + cli = InfluxDBClient() + cli.ping() + + self.assertEqual(m.last_request.headers["Authorization"], + "Basic cm9vdDpyb290") + + def test_auth_username_password(self): + """Test auth with custom username and password.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/ping", + status_code=204, + headers={'X-Influxdb-Version': '1.2.3'} + ) + + cli = InfluxDBClient(username='my-username', + password='my-password') + cli.ping() + + self.assertEqual(m.last_request.headers["Authorization"], + "Basic bXktdXNlcm5hbWU6bXktcGFzc3dvcmQ=") + + def test_auth_username_password_none(self): + """Test auth with not defined username or password.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/ping", + status_code=204, + headers={'X-Influxdb-Version': '1.2.3'} + ) + + cli = InfluxDBClient(username=None, password=None) + cli.ping() + self.assertFalse('Authorization' in m.last_request.headers) + + cli = InfluxDBClient(username=None) + cli.ping() + self.assertFalse('Authorization' in m.last_request.headers) + + cli = InfluxDBClient(password=None) + cli.ping() + self.assertFalse('Authorization' in m.last_request.headers) + + def test_auth_token(self): + """Test auth with custom authorization header.""" + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.GET, + "http://localhost:8086/ping", + status_code=204, + headers={'X-Influxdb-Version': '1.2.3'} + ) + + cli = InfluxDBClient(username=None, password=None, + headers={"Authorization": "my-token"}) + cli.ping() + self.assertEqual(m.last_request.headers["Authorization"], + "my-token") + class FakeClient(InfluxDBClient): """Set up a fake client instance of InfluxDBClient.""" diff --git a/tox.ini b/tox.ini index ff30ebac..1e59b415 100644 --- a/tox.ini +++ b/tox.ini @@ -31,7 +31,7 @@ commands = pydocstyle --count -ve examples influxdb [testenv:coverage] deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - pandas + pandas==0.24.2 coverage numpy commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb From 7b0367309b118e8be97bd85af4433b25129a4618 Mon Sep 17 00:00:00 2001 From: Sebastian Borza Date: Wed, 11 Nov 2020 15:03:24 -0600 Subject: [PATCH 525/536] chore(CHANGELOG): update to include v5.3.1 updates --- CHANGELOG.md | 1 + influxdb/__init__.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b374faa..f3e86086 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed - Amend retry to avoid sleep after last retry before raising exception (#790 thx @krzysbaranski) - Remove msgpack pinning for requirements (#818 thx @prometheanfire) +- Update support for HTTP headers in the InfluxDBClient (#851 thx @bednar) ### Removed diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 56f2f619..59916c26 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.3.0' +__version__ = '5.3.1' From c3903dda515d4f7efcb8c55250fd8b75c8446034 Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Fri, 1 Jan 2021 06:10:01 +1100 Subject: [PATCH 526/536] docs: fix simple typo, reponse -> response (#873) There is a small typo in influxdb/tests/client_test.py. Should read `response` rather than `reponse`. --- influxdb/tests/client_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index e511ca9b..1f9d704a 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1398,7 +1398,7 @@ def test_invalid_port_fails(self): InfluxDBClient('host', '80/redir', 'username', 'password') def test_chunked_response(self): - """Test chunked reponse for TestInfluxDBClient object.""" + """Test chunked response for TestInfluxDBClient object.""" example_response = \ u'{"results":[{"statement_id":0,"series":[{"columns":["key"],' \ '"values":[["cpu"],["memory"],["iops"],["network"]],"partial":' \ From cf51d026469252d06d57f65489561fc5b1e337c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Bedn=C3=A1=C5=99?= Date: Fri, 19 Mar 2021 16:04:03 +0100 Subject: [PATCH 527/536] docs: add link to v2 client (#881) --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index a40ed148..01e054e8 100644 --- a/README.rst +++ b/README.rst @@ -17,6 +17,8 @@ InfluxDB-Python InfluxDB-Python is a client for interacting with InfluxDB_. +**Note: This library is for use with InfluxDB 1.x. For connecting to InfluxDB 2.x instances, please use the the** `influxdb-client-python `_ **client.** + Development of this library is maintained by: +-----------+-------------------------------+ From dec2f0587ca0592cb94072d58b5f545ebfe68ba2 Mon Sep 17 00:00:00 2001 From: Robert Hajek Date: Wed, 28 Apr 2021 15:55:50 +0200 Subject: [PATCH 528/536] feat: Add custom socket_options --- influxdb/client.py | 24 +++++++++++++++++++++--- influxdb/tests/client_test.py | 25 +++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index c413181b..2d0b0ddb 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -20,6 +20,7 @@ import msgpack import requests import requests.exceptions +from requests.adapters import HTTPAdapter from six.moves.urllib.parse import urlparse from influxdb.line_protocol import make_lines, quote_ident, quote_literal @@ -87,6 +88,10 @@ class InfluxDBClient(object): :param headers: headers to add to Requests, will add 'Content-Type' and 'Accept' unless these are already present, defaults to {} :type headers: dict + :param socket_options: use custom tcp socket options, If not specified, then defaults are loaded from + ``HTTPConnection.default_socket_options`` + :type socket_options: list + :raises ValueError: if cert is provided but ssl is disabled (set to False) """ @@ -109,6 +114,7 @@ def __init__(self, gzip=False, session=None, headers=None, + socket_options=None, ): """Construct a new InfluxDBClient object.""" self.__host = host @@ -128,9 +134,10 @@ def __init__(self, session = requests.Session() self._session = session - adapter = requests.adapters.HTTPAdapter( + adapter = SocketOptionsAdapter( pool_connections=int(pool_size), - pool_maxsize=int(pool_size) + pool_maxsize=int(pool_size), + socket_options=socket_options ) if use_udp: @@ -626,7 +633,7 @@ def _batches(iterable, size): # http://code.activestate.com/recipes/303279-getting-items-in-batches/ iterator = iter(iterable) while True: - try: # Try get the first element in the iterator... + try: # Try get the first element in the iterator... head = (next(iterator),) except StopIteration: return # ...so that we can stop if there isn't one @@ -1249,3 +1256,14 @@ def _msgpack_parse_hook(code, data): timestamp += datetime.timedelta(microseconds=(epoch_ns / 1000)) return timestamp.isoformat() + 'Z' return msgpack.ExtType(code, data) + + +class SocketOptionsAdapter(HTTPAdapter): + def __init__(self, *args, **kwargs): + self.socket_options = kwargs.pop("socket_options", None) + super(SocketOptionsAdapter, self).__init__(*args, **kwargs) + + def init_poolmanager(self, *args, **kwargs): + if self.socket_options is not None: + kwargs["socket_options"] = self.socket_options + super(SocketOptionsAdapter, self).init_poolmanager(*args, **kwargs) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 1f9d704a..6b65249c 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -33,6 +33,7 @@ import requests_mock from nose.tools import raises +from urllib3.connection import HTTPConnection from influxdb import InfluxDBClient from influxdb.resultset import ResultSet @@ -1498,6 +1499,30 @@ def test_auth_token(self): self.assertEqual(m.last_request.headers["Authorization"], "my-token") + def test_custom_socket_options(self): + test_socket_options = HTTPConnection.default_socket_options + [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60), + (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)] + + cli = InfluxDBClient(username=None, password=None, socket_options=test_socket_options) + + self.assertEquals(cli._session.adapters.get("http://").socket_options, test_socket_options) + self.assertEquals(cli._session.adapters.get("http://").poolmanager.connection_pool_kw.get("socket_options"), + test_socket_options) + + connection_pool = cli._session.adapters.get("http://").poolmanager.connection_from_url( + url="http://localhost:8086") + new_connection = connection_pool._new_conn() + self.assertEquals(new_connection.socket_options, test_socket_options) + + def test_none_socket_options(self): + cli = InfluxDBClient(username=None, password=None) + self.assertEquals(cli._session.adapters.get("http://").socket_options, None) + connection_pool = cli._session.adapters.get("http://").poolmanager.connection_from_url( + url="http://localhost:8086") + new_connection = connection_pool._new_conn() + self.assertEquals(new_connection.socket_options, HTTPConnection.default_socket_options) + class FakeClient(InfluxDBClient): """Set up a fake client instance of InfluxDBClient.""" From 6ba88c064e7fd99df7251409c1b8aad2c88335e2 Mon Sep 17 00:00:00 2001 From: Robert Hajek Date: Thu, 29 Apr 2021 13:30:47 +0200 Subject: [PATCH 529/536] feat: Add custom socket_options --- influxdb/client.py | 13 ++++++++----- influxdb/tests/client_test.py | 30 ++++++++++++++++++++---------- 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/influxdb/client.py b/influxdb/client.py index 2d0b0ddb..548c5772 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -88,7 +88,8 @@ class InfluxDBClient(object): :param headers: headers to add to Requests, will add 'Content-Type' and 'Accept' unless these are already present, defaults to {} :type headers: dict - :param socket_options: use custom tcp socket options, If not specified, then defaults are loaded from + :param socket_options: use custom tcp socket options, + If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` :type socket_options: list @@ -134,7 +135,7 @@ def __init__(self, session = requests.Session() self._session = session - adapter = SocketOptionsAdapter( + adapter = _SocketOptionsAdapter( pool_connections=int(pool_size), pool_maxsize=int(pool_size), socket_options=socket_options @@ -1258,12 +1259,14 @@ def _msgpack_parse_hook(code, data): return msgpack.ExtType(code, data) -class SocketOptionsAdapter(HTTPAdapter): +class _SocketOptionsAdapter(HTTPAdapter): + """_SocketOptionsAdapter injects socket_options into HTTP Adapter.""" + def __init__(self, *args, **kwargs): self.socket_options = kwargs.pop("socket_options", None) - super(SocketOptionsAdapter, self).__init__(*args, **kwargs) + super(_SocketOptionsAdapter, self).__init__(*args, **kwargs) def init_poolmanager(self, *args, **kwargs): if self.socket_options is not None: kwargs["socket_options"] = self.socket_options - super(SocketOptionsAdapter, self).init_poolmanager(*args, **kwargs) + super(_SocketOptionsAdapter, self).init_poolmanager(*args, **kwargs) diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py index 6b65249c..115fbc48 100644 --- a/influxdb/tests/client_test.py +++ b/influxdb/tests/client_test.py @@ -1500,28 +1500,38 @@ def test_auth_token(self): "my-token") def test_custom_socket_options(self): - test_socket_options = HTTPConnection.default_socket_options + [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), - (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60), - (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)] + """Test custom socket options.""" + test_socket_options = HTTPConnection.default_socket_options + \ + [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60), + (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)] - cli = InfluxDBClient(username=None, password=None, socket_options=test_socket_options) + cli = InfluxDBClient(username=None, password=None, + socket_options=test_socket_options) - self.assertEquals(cli._session.adapters.get("http://").socket_options, test_socket_options) - self.assertEquals(cli._session.adapters.get("http://").poolmanager.connection_pool_kw.get("socket_options"), + self.assertEquals(cli._session.adapters.get("http://").socket_options, + test_socket_options) + self.assertEquals(cli._session.adapters.get("http://").poolmanager. + connection_pool_kw.get("socket_options"), test_socket_options) - connection_pool = cli._session.adapters.get("http://").poolmanager.connection_from_url( + connection_pool = cli._session.adapters.get("http://").poolmanager \ + .connection_from_url( url="http://localhost:8086") new_connection = connection_pool._new_conn() self.assertEquals(new_connection.socket_options, test_socket_options) def test_none_socket_options(self): + """Test default socket options.""" cli = InfluxDBClient(username=None, password=None) - self.assertEquals(cli._session.adapters.get("http://").socket_options, None) - connection_pool = cli._session.adapters.get("http://").poolmanager.connection_from_url( + self.assertEquals(cli._session.adapters.get("http://").socket_options, + None) + connection_pool = cli._session.adapters.get("http://").poolmanager \ + .connection_from_url( url="http://localhost:8086") new_connection = connection_pool._new_conn() - self.assertEquals(new_connection.socket_options, HTTPConnection.default_socket_options) + self.assertEquals(new_connection.socket_options, + HTTPConnection.default_socket_options) class FakeClient(InfluxDBClient): From 7cb565698c88bfbf9f4804650231bd28d09e2e6d Mon Sep 17 00:00:00 2001 From: Grant7z Date: Thu, 9 Sep 2021 08:50:11 +0800 Subject: [PATCH 530/536] Prevent overwriting Authorization with basic auth (#901) --- influxdb/client.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/influxdb/client.py b/influxdb/client.py index 548c5772..adab4edc 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -336,7 +336,10 @@ def request(self, url, method='GET', params=None, data=None, stream=False, _try = 0 while retry: try: - auth = (self._username, self._password) + if "Authorization" in headers: + auth = (None, None) + else: + auth = (self._username, self._password) response = self._session.request( method=method, url=url, From 3d1f1ce32524e40f4db33b9d8f11faf9b4925bf2 Mon Sep 17 00:00:00 2001 From: Jamie Strandboge Date: Tue, 15 Nov 2022 15:25:21 -0600 Subject: [PATCH 531/536] README.md: archive repo --- README.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.rst b/README.rst index 01e054e8..58bfcd1a 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,13 @@ +The v1 client libraries for InfluxDB were typically developed and maintained by +community members. They have all now been succeeded by v2 client libraries. +They are being archived it in favor of the v2 client library. See +https://github.com/influxdata/influxdb-python/issues/918. + +If there are still users of this v1 client library, and they or somebody else +are willing to keep them updated with security fixes at a minimum please reach +out on the [Community Forums](https://community.influxdata.com/) or +[InfluxData Slack](https://influxdata.com/slack). + InfluxDB-Python =============== From 1df9a816a93c561779b54bb82472c83ce990282b Mon Sep 17 00:00:00 2001 From: Jamie Strandboge Date: Tue, 18 Jul 2023 16:29:05 -0500 Subject: [PATCH 532/536] chore: update README.rst based on feedback from Product --- README.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index 58bfcd1a..d214d41a 100644 --- a/README.rst +++ b/README.rst @@ -1,12 +1,11 @@ The v1 client libraries for InfluxDB were typically developed and maintained by -community members. They have all now been succeeded by v2 client libraries. -They are being archived it in favor of the v2 client library. See -https://github.com/influxdata/influxdb-python/issues/918. +community members. For InfluxDB 3.0 users, this library is succeeded by the +lightweight `v3 client library `_. If there are still users of this v1 client library, and they or somebody else are willing to keep them updated with security fixes at a minimum please reach -out on the [Community Forums](https://community.influxdata.com/) or -[InfluxData Slack](https://influxdata.com/slack). +out on the `Community Forums `_ or +`InfluxData Slack `_. InfluxDB-Python =============== From b614474e82c621dc77f52d36a9d54b8be2af45c6 Mon Sep 17 00:00:00 2001 From: Josh Powers Date: Tue, 16 Apr 2024 08:29:25 -0600 Subject: [PATCH 533/536] fix: Correctly serialize nanosecond dataframe timestamps Co-authored-by: @bednar --- influxdb/_dataframe_client.py | 4 +- influxdb/tests/dataframe_client_test.py | 74 ++++++++++++++++++++++++- 2 files changed, 75 insertions(+), 3 deletions(-) diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py index e7ae9c17..907db2cb 100644 --- a/influxdb/_dataframe_client.py +++ b/influxdb/_dataframe_client.py @@ -372,10 +372,10 @@ def _convert_dataframe_to_lines(self, # Make array of timestamp ints if isinstance(dataframe.index, pd.PeriodIndex): - time = ((dataframe.index.to_timestamp().values.astype(np.int64) / + time = ((dataframe.index.to_timestamp().values.astype(np.int64) // precision_factor).astype(np.int64).astype(str)) else: - time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) / + time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) // precision_factor).astype(np.int64).astype(str)) # If tag columns exist, make an array of formatted tag keys and values diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py index f6db3c22..87b8e0d8 100644 --- a/influxdb/tests/dataframe_client_test.py +++ b/influxdb/tests/dataframe_client_test.py @@ -877,7 +877,7 @@ def test_query_into_dataframe(self): {"measurement": "network", "tags": {"direction": ""}, "columns": ["time", "value"], - "values":[["2009-11-10T23:00:00Z", 23422]] + "values": [["2009-11-10T23:00:00Z", 23422]] }, {"measurement": "network", "tags": {"direction": "in"}, @@ -1274,3 +1274,75 @@ def test_query_custom_index(self): self.assertListEqual(["time", "host"], list(_data_frame.index.names)) + + def test_dataframe_nanosecond_precision(self): + """Test nanosecond precision.""" + for_df_dict = { + "nanFloats": [1.1, float('nan'), 3.3, 4.4], + "onlyFloats": [1.1, 2.2, 3.3, 4.4], + "strings": ['one_one', 'two_two', 'three_three', 'four_four'] + } + df = pd.DataFrame.from_dict(for_df_dict) + df['time'] = ['2019-10-04 06:27:19.850557111+00:00', + '2019-10-04 06:27:19.850557184+00:00', + '2019-10-04 06:27:42.251396864+00:00', + '2019-10-04 06:27:42.251396974+00:00'] + df['time'] = pd.to_datetime(df['time'], unit='ns') + df = df.set_index('time') + + expected = ( + b'foo nanFloats=1.1,onlyFloats=1.1,strings="one_one" 1570170439850557111\n' # noqa E501 line too long + b'foo onlyFloats=2.2,strings="two_two" 1570170439850557184\n' # noqa E501 line too long + b'foo nanFloats=3.3,onlyFloats=3.3,strings="three_three" 1570170462251396864\n' # noqa E501 line too long + b'foo nanFloats=4.4,onlyFloats=4.4,strings="four_four" 1570170462251396974\n' # noqa E501 line too long + ) + + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write", + status_code=204 + ) + + cli = DataFrameClient(database='db') + cli.write_points(df, 'foo', time_precision='n') + + self.assertEqual(m.last_request.body, expected) + + def test_dataframe_nanosecond_precision_one_microsecond(self): + """Test nanosecond precision within one microsecond.""" + # 1 microsecond = 1000 nanoseconds + start = np.datetime64('2019-10-04T06:27:19.850557000') + end = np.datetime64('2019-10-04T06:27:19.850558000') + + # generate timestamps with nanosecond precision + timestamps = np.arange( + start, + end + np.timedelta64(1, 'ns'), + np.timedelta64(1, 'ns') + ) + # generate values + values = np.arange(0.0, len(timestamps)) + + df = pd.DataFrame({'value': values}, index=timestamps) + with requests_mock.Mocker() as m: + m.register_uri( + requests_mock.POST, + "http://localhost:8086/write", + status_code=204 + ) + + cli = DataFrameClient(database='db') + cli.write_points(df, 'foo', time_precision='n') + + lines = m.last_request.body.decode('utf-8').split('\n') + self.assertEqual(len(lines), 1002) + + for index, line in enumerate(lines): + if index == 1001: + self.assertEqual(line, '') + continue + self.assertEqual( + line, + f"foo value={index}.0 157017043985055{7000 + index:04}" + ) From 5ad04f696c71514967c9d7419bff457dbfbe8400 Mon Sep 17 00:00:00 2001 From: Joshua Powers Date: Tue, 16 Apr 2024 08:35:24 -0600 Subject: [PATCH 534/536] Update README.rst Add link to v2 library --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index d214d41a..b78e626d 100644 --- a/README.rst +++ b/README.rst @@ -1,6 +1,8 @@ The v1 client libraries for InfluxDB were typically developed and maintained by community members. For InfluxDB 3.0 users, this library is succeeded by the lightweight `v3 client library `_. +For InfluxDB 2.0 users, look at the `v2 client library +`_. If there are still users of this v1 client library, and they or somebody else are willing to keep them updated with security fixes at a minimum please reach From 37ff905fbefe33bc321e619ea970d015ccd8b434 Mon Sep 17 00:00:00 2001 From: Josh Powers Date: Wed, 17 Apr 2024 07:14:43 -0600 Subject: [PATCH 535/536] chore(CHANGELOG): Release v5.3.2 --- CHANGELOG.md | 7 ++++++- influxdb/__init__.py | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f3e86086..bfd27d38 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## [v5.3.1] - Unreleased +## [v5.3.2] - 2024-04-17 + +### Changed +- Correctly serialize nanosecond dataframe timestamps (#926) + +## [v5.3.1] - 2022-11-14 ### Added - Add support for custom headers in the InfluxDBClient (#710 thx @nathanielatom) diff --git a/influxdb/__init__.py b/influxdb/__init__.py index 59916c26..e66f80ea 100644 --- a/influxdb/__init__.py +++ b/influxdb/__init__.py @@ -18,4 +18,4 @@ ] -__version__ = '5.3.1' +__version__ = '5.3.2' From bbe80ed32cf57b252be131d3edcda5ca610fc223 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Tue, 29 Oct 2024 10:54:14 -0500 Subject: [PATCH 536/536] Update docs (#929) * chore(docs): Update version advice and tox build requirements: - Updates the version advice and removes the succession statement to be consistent with the OSS 1.11 release. - Adds explicit relations to v2 and v3. - Removes redundancies. - Fixes formatting errors reported by the linter. - Updates dependencies for docs build. * fix(docs): mention 1.x compat endpoints in 2.x --- README.rst | 78 ++++++++++++++++++++++------------------ docs/source/conf.py | 3 +- docs/source/examples.rst | 2 +- influxdb/client.py | 7 ++-- pyproject.toml | 3 ++ requirements.txt | 4 +-- setup.py | 5 +++ tox.ini | 10 +++--- 8 files changed, 65 insertions(+), 47 deletions(-) create mode 100644 pyproject.toml diff --git a/README.rst b/README.rst index b78e626d..048db045 100644 --- a/README.rst +++ b/README.rst @@ -1,14 +1,3 @@ -The v1 client libraries for InfluxDB were typically developed and maintained by -community members. For InfluxDB 3.0 users, this library is succeeded by the -lightweight `v3 client library `_. -For InfluxDB 2.0 users, look at the `v2 client library -`_. - -If there are still users of this v1 client library, and they or somebody else -are willing to keep them updated with security fixes at a minimum please reach -out on the `Community Forums `_ or -`InfluxData Slack `_. - InfluxDB-Python =============== @@ -26,38 +15,45 @@ InfluxDB-Python :target: https://pypi.python.org/pypi/influxdb :alt: PyPI Status -InfluxDB-Python is a client for interacting with InfluxDB_. -**Note: This library is for use with InfluxDB 1.x. For connecting to InfluxDB 2.x instances, please use the the** `influxdb-client-python `_ **client.** +.. important:: -Development of this library is maintained by: + **This project is no longer in development** + + This v1 client library is for interacting with `InfluxDB 1.x `_ and 1.x-compatible endpoints in `InfluxDB 2.x `_. + Use it to: + + - Write data in line protocol. + - Query data with `InfluxQL `_. -+-----------+-------------------------------+ -| Github ID | URL | -+===========+===============================+ -| @aviau | (https://github.com/aviau) | -+-----------+-------------------------------+ -| @xginn8 | (https://github.com/xginn8) | -+-----------+-------------------------------+ -| @sebito91 | (https://github.com/sebito91) | -+-----------+-------------------------------+ + If you use `InfluxDB 2.x (TSM storage engine) `_ and `Flux `_, see the `v2 client library `_. + + If you use `InfluxDB 3.0 `_, see the `v3 client library `_. + + For new projects, consider using InfluxDB 3.0 and v3 client libraries. + +Description +=========== + +InfluxDB-python, the InfluxDB Python Client (1.x), is a client library for interacting with `InfluxDB 1.x `_ instances. .. _readme-about: -InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at https://docs.influxdata.com/influxdb/latest +`InfluxDB`_ is the time series platform designed to handle high write and query loads. .. _installation: -InfluxDB pre v1.1.0 users -------------------------- -This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.3, v1.5.4, v1.6.4, and 1.7.4. +For InfluxDB pre-v1.1.0 users +----------------------------- -Those users still on InfluxDB v0.8.x users may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. +This module is tested with InfluxDB versions v1.2.4, v1.3.9, v1.4.3, v1.5.4, v1.6.4, and 1.7.4. -Installation ------------- +Users on InfluxDB v0.8.x may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``. + +For InfluxDB v1.1+ users +------------------------ Install, upgrade and uninstall influxdb-python with these commands:: @@ -165,21 +161,33 @@ We are also lurking on the following: Development ----------- +The v1 client libraries for InfluxDB 1.x were typically developed and maintained by InfluxDB community members. If you are an InfluxDB v1 user interested in maintaining this client library (at a minimum, keeping it updated with security patches) please contact the InfluxDB team at on the `Community Forums `_ or +`InfluxData Slack `_. + All development is done on Github_. Use Issues_ to report problems or submit contributions. .. _Github: https://github.com/influxdb/influxdb-python/ .. _Issues: https://github.com/influxdb/influxdb-python/issues -Please note that we WILL get to your questions/issues/concerns as quickly as possible. We maintain many -software repositories and sometimes things may get pushed to the backburner. Please don't take offense, -we will do our best to reply as soon as possible! +Please note that we will answer you question as quickly as possible. +Maintainers: + ++-----------+-------------------------------+ +| Github ID | URL | ++===========+===============================+ +| @aviau | (https://github.com/aviau) | ++-----------+-------------------------------+ +| @xginn8 | (https://github.com/xginn8) | ++-----------+-------------------------------+ +| @sebito91 | (https://github.com/sebito91) | ++-----------+-------------------------------+ Source code ----------- -The source code is currently available on Github: https://github.com/influxdata/influxdb-python +The source code for the InfluxDB Python Client (1.x) is currently available on Github: https://github.com/influxdata/influxdb-python TODO @@ -188,6 +196,6 @@ TODO The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues -.. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ +.. _InfluxDB: https://influxdata.com/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org diff --git a/docs/source/conf.py b/docs/source/conf.py index 231c776c..efc22f88 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -117,7 +117,8 @@ # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] +# Calling get_html_theme_path is deprecated. +# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". diff --git a/docs/source/examples.rst b/docs/source/examples.rst index b4ada447..841ad8b1 100644 --- a/docs/source/examples.rst +++ b/docs/source/examples.rst @@ -33,7 +33,7 @@ Tutorials - UDP :language: python Tutorials - Authorization by Token -=============== +================================== .. literalinclude:: ../../examples/tutorial_authorization.py :language: python diff --git a/influxdb/client.py b/influxdb/client.py index adab4edc..c535a3f1 100644 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -395,7 +395,7 @@ def write(self, data, params=None, expected_response_code=204, :param data: the data to be written :type data: (if protocol is 'json') dict (if protocol is 'line') sequence of line protocol strings - or single string + or single string :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write @@ -571,8 +571,9 @@ def write_points(self, :param points: the list of points to be written in the database :type points: list of dictionaries, each dictionary represents a point :type points: (if protocol is 'json') list of dicts, where each dict - represents a point. - (if protocol is 'line') sequence of line protocol strings. + represents a point. + (if protocol is 'line') sequence of line protocol strings. + :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str :param database: the database to write the points to. Defaults to diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..1b68d94e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools>=42", "wheel"] +build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 548b17c8..a3df3154 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ python-dateutil>=2.6.0 -pytz +pytz>=2016.10 requests>=2.17.0 six>=1.10.0 -msgpack +msgpack>=0.5.0 diff --git a/setup.py b/setup.py index d44875f6..8ac7d1a7 100755 --- a/setup.py +++ b/setup.py @@ -23,6 +23,11 @@ with open('requirements.txt', 'r') as f: requires = [x.strip() for x in f if x.strip()] +# Debugging: Print the requires values +print("install_requires values:") +for req in requires: + print(f"- {req}") + with open('test-requirements.txt', 'r') as f: test_requires = [x.strip() for x in f if x.strip()] diff --git a/tox.ini b/tox.ini index 1e59b415..a1005abb 100644 --- a/tox.ini +++ b/tox.ini @@ -12,8 +12,8 @@ deps = -r{toxinidir}/requirements.txt py35: numpy==1.14.6 py36: pandas==0.23.4 py36: numpy==1.15.4 - py37: pandas==0.24.2 - py37: numpy==1.16.2 + py37: pandas>=0.24.2 + py37: numpy>=1.16.2 # Only install pandas with non-pypy interpreters # Testing all combinations would be too expensive commands = nosetests -v --with-doctest {posargs} @@ -38,9 +38,9 @@ commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb [testenv:docs] deps = -r{toxinidir}/requirements.txt - pandas==0.24.2 - numpy==1.16.2 - Sphinx==1.8.5 + pandas>=0.24.2 + numpy>=1.16.2 + Sphinx>=1.8.5 sphinx_rtd_theme commands = sphinx-build -b html docs/source docs/build