diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..0248ade1
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @aviau @sebito91 @xginn8
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 00000000..7a7927c1
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,4 @@
+- **InfluxDB version:** e.g. 1.7.7 (output of the `influx version` command)
+- **InfluxDB-python version:** e.g. 5.2.2 (output of the `python -c "from __future__ import print_function; import influxdb; print(influxdb.__version__)"` command)
+- **Python version:** e.g. 3.7.4 (output of the `python --version` command)
+- **Operating system version:** e.g. Windows 10, Ubuntu 18.04, macOS 10.14.5
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 00000000..84729d17
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,5 @@
+---
+##### Contributor checklist
+
+- [ ] Builds are passing
+- [ ] New tests have been added (for feature additions)
diff --git a/.gitignore b/.gitignore
index 7720b658..d970c44c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,6 +7,7 @@ build/
mock*/
nose*/
.pybuild/
+.mypy_cache/
debian/files
debian/python-influxdb.debhelper.log
debian/python-influxdb.postinst.debhelper
diff --git a/.travis.yml b/.travis.yml
index 5c2be658..9d45f19b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,33 +1,62 @@
language: python
+
+python:
+ - "2.7"
+ - "3.5"
+ - "3.6"
+ - "3.7"
+ - "pypy"
+ - "pypy3"
+
env:
- - TOX_ENV=py27
- - TOX_ENV=py32
- - TOX_ENV=py33
- - TOX_ENV=py34
- - TOX_ENV=pypy
- - TOX_ENV=pypy3
- - TOX_ENV=docs
- - TOX_ENV=flake8
- - TOX_ENV=coverage
+ - INFLUXDB_VER=1.2.4 # 2017-05-08
+ - INFLUXDB_VER=1.3.9 # 2018-01-19
+ - INFLUXDB_VER=1.4.3 # 2018-01-30
+ - INFLUXDB_VER=1.5.4 # 2018-06-22
+ - INFLUXDB_VER=1.6.4 # 2018-10-24
+ - INFLUXDB_VER=1.7.4 # 2019-02-14
+
+addons:
+ apt:
+ packages:
+ - wget
+
+matrix:
+ include:
+ - python: 3.7
+ env: TOX_ENV=pep257
+ - python: 3.7
+ env: TOX_ENV=docs
+ - python: 3.7
+ env: TOX_ENV=flake8
+ - python: 3.7
+ env: TOX_ENV=coverage
+ - python: 3.7
+ env: TOX_ENV=mypy
+
install:
- - pip install tox
+ - pip install tox-travis
+ - pip install setuptools
- pip install coveralls
- - mkdir influxdb_install
- - wget https://s3.amazonaws.com/influxdb/influxdb_0.9.4.1_amd64.deb
- - dpkg -x influxdb_*_amd64.deb influxdb_install
+ - mkdir -p "influxdb_install/${INFLUXDB_VER}"
+ - if [ -n "${INFLUXDB_VER}" ] ; then wget "https://dl.influxdata.com/influxdb/releases/influxdb_${INFLUXDB_VER}_amd64.deb" ; fi
+ - if [ -n "${INFLUXDB_VER}" ] ; then dpkg -x influxdb*.deb "influxdb_install/${INFLUXDB_VER}" ; fi
+
script:
- - export INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/opt/influxdb/versions/0.9.4.1/influxd
- - travis_wait 30 tox -e $TOX_ENV
+ - export "INFLUXDB_PYTHON_INFLUXD_PATH=$(pwd)/influxdb_install/${INFLUXDB_VER}/usr/bin/influxd"
+ - if [ -n "${TOX_ENV}" ]; then tox -e "${TOX_ENV}"; else tox; fi
+
after_success:
- - if [ "$TOX_ENV" == "coverage" ] ; then coveralls; fi
+ - if [ "${TOX_ENV}" == "coverage" ] ; then coveralls; fi
+
notifications:
email: false
sudo: false
# Travis caching
-cache:
- directories:
- - $HOME/.cache/pip
-before_cache:
- - rm -f $HOME/.cache/pip/log/debug.log
+cache: false
+# directories:
+# - $HOME/.cache/pip
+#before_cache:
+# - rm -f $HOME/.cache/pip/log/debug.log
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..bfd27d38
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,375 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
+and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
+
+## [v5.3.2] - 2024-04-17
+
+### Changed
+- Correctly serialize nanosecond dataframe timestamps (#926)
+
+## [v5.3.1] - 2022-11-14
+
+### Added
+- Add support for custom headers in the InfluxDBClient (#710 thx @nathanielatom)
+- Add support for custom indexes for query in the DataFrameClient (#785)
+
+### Changed
+- Amend retry to avoid sleep after last retry before raising exception (#790 thx @krzysbaranski)
+- Remove msgpack pinning for requirements (#818 thx @prometheanfire)
+- Update support for HTTP headers in the InfluxDBClient (#851 thx @bednar)
+
+### Removed
+
+## [v5.3.0] - 2020-04-10
+
+### Added
+- Add mypy testing framework (#756)
+- Add support for messagepack (#734 thx @lovasoa)
+- Add support for 'show series' (#357 thx @gaker)
+- Add support for custom request session in InfluxDBClient (#360 thx @dschien)
+- Add support for handling np.nan and np.inf values in DataFrameClient (#436 thx @nmerket)
+- Add support for optional `time_precision` in the SeriesHelper (#502 && #719 thx @appunni-dishq && @klDen)
+- Add ability to specify retention policy in SeriesHelper (#723 thx @csanz91)
+- Add gzip compression for post and response data (#732 thx @KEClaytor)
+- Add support for chunked responses in ResultSet (#753 and #538 thx @hrbonz && @psy0rz)
+- Add support for empty string fields (#766 thx @gregschrock)
+- Add support for context managers to InfluxDBClient (#721 thx @JustusAdam)
+
+### Changed
+- Clean up stale CI config (#755)
+- Add legacy client test (#752 & #318 thx @oldmantaiter & @sebito91)
+- Update make_lines section in line_protocol.py to split out core function (#375 thx @aisbaa)
+- Fix nanosecond time resolution for points (#407 thx @AndreCAndersen && @clslgrnc)
+- Fix import of distutils.spawn (#805 thx @Hawk777)
+- Update repr of float values including properly handling of boolean (#488 thx @ghost)
+- Update DataFrameClient to fix faulty empty tags (#770 thx @michelfripiat)
+- Update DataFrameClient to properly return `dropna` values (#778 thx @jgspiro)
+- Update DataFrameClient to test for pd.DataTimeIndex before blind conversion (#623 thx @testforvin)
+- Update client to type-set UDP port to int (#651 thx @yifeikong)
+- Update batched writing support for all iterables (#746 thx @JayH5)
+- Update SeriesHelper to enable class instantiation when not initialized (#772 thx @ocworld)
+- Update UDP test case to add proper timestamp to datapoints (#808 thx @shantanoo-desai)
+
+### Removed
+
+## [v5.2.3] - 2019-08-19
+
+### Added
+- Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman)
+- Add UDP example (#648 thx @shantanoo-desai)
+- Add consistency paramter to `write_points` (#664 tx @RonRothman)
+- The query() function now accepts a bind_params argument for parameter binding (#678 thx @clslgrnc)
+- Add `get_list_continuous_queries`, `drop_continuous_query`, and `create_continuous_query` management methods for
+ continuous queries (#681 thx @lukaszdudek-silvair && @smolse)
+- Mutual TLS authentication (#702 thx @LloydW93)
+
+### Changed
+- Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 1.7.4 (#692 thx @clslgrnc)
+- Update supported versions of influxdb + python (#693 thx @clslgrnc)
+- Fix for the line protocol issue with leading comma (#694 thx @d3banjan)
+- Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi)
+- Update documentation for empty `delete_series` confusion (#699 thx @xginn8)
+- Fix newline character issue in tag value (#716 thx @syhan)
+- Update tests/tutorials_pandas.py to reference `line` protocol, bug in `json` (#737 thx @Aeium)
+
+### Removed
+
+## [v5.2.2] - 2019-03-14
+### Added
+
+### Changed
+- Fix 'TypeError: Already tz-aware' introduced with recent versions of Panda (#671, #676, thx @f4bsch @clslgrnc)
+
+## [v5.2.1] - 2018-12-07
+### Added
+
+### Changed
+- Pass through the "method" kwarg to DataFrameClient queries
+
+### Removed
+
+## [v5.2.0] - 2018-07-10
+### Added
+- Finally add a CHANGELOG.md to communicate breaking changes (#598)
+- Test multiple versions of InfluxDB in travis
+- Add SHARD DURATION parameter to retention policy create/alter
+### Changed
+- Update POST/GET requests to follow verb guidelines from InfluxDB documentation
+- Update test suite to support InfluxDB v1.3.9, v1.4.2, and v1.5.4
+- Fix performance degradation when removing NaN values via line protocol (#592)
+### Removed
+- Dropped support for Python3.4
+
+## [v5.1.0] - 2018-06-26
+### Added
+- Connect to InfluxDB path running on server (#556 thx @gladhorn)
+- Escape measurement names in DataFrameClient (#542 thx @tzonghao)
+- Escape tags that end with a backslash (#537 thx @vaniakov)
+- Add back mistakenly-dropped database parameter (#540)
+- Add PyPI status to README.md
+### Changed
+- Fix bad session mount scheme (#571 thx @vaniakov)
+- Fixed issue with DataFrameClient calling to_datetime function (#593 thx @dragoshenron)
+- Escape columns in DataFrameClient for line protocol (#584 thx @dmuiruri)
+- Convert DataFrameClient times from int to np.int64 (#495 thx patrickhoebeke)
+- Updated pandas tutorial (#547 thx @techaddicted)
+- Explicitly set numpy version for tox (#563)
+### Removed
+- Removed UDP precision restrictions on timestamp (#557 thx @mdhausman)
+
+## [v5.0.0] - 2017-11-20
+### Added
+- Add pool size parameter to client constructor (#534 thx @vaniakov)
+- Add ping method to client for checking connectivity (#409 thx @pmenglund)
+- Add retry logic & exponential backoff when a connection fails (#508)
+- Declare which setuptools version is required in PyPy env
+- Functions for drop_measurement and get_list_measurements in InfluxDBClient (#402 thx @Vic020)
+- Allow single string as data argument in write (#492 thx @baftek)
+- Support chunked queries in DataFrameClient (#439 thx @gusutabopb)
+- Add close method to InfluxDBClient (#465 thx @Linux-oiD)
+- PEP257 linting & code compliance (#473)
+### Changed
+- Fix broken tags filtering on a ResultSet (#511)
+- Improve retry codepath for connecting to InfluxDB (#536 thx @swails)
+- Clean up imports using six instead of sys.version (#536 thx @swails)
+- Replace references to dataframe.ix with dataframe.iloc (#528)
+- Improve performance of tag processing when converting DataFrameClient to line protocol (#503 thx @tzonghao)
+- Typo in Content-Type header (#513 thx @milancermak)
+- Clean up README.md formatting
+- Catch TypeError when casting to float to return False with objects (#475 thx @BenHewins)
+- Improve efficiency of tag appending in DataFrameClient when converting to line protocol (#486 thx @maxdolle)
+### Removed
+- Drop requirement for all fields in SeriesHelper (#518 thx @spott)
+- use_udp and udp_port are now private properties in InfluxDBClient
+
+## [v4.1.1] - 2017-06-06
+### Added
+### Changed
+### Removed
+
+## [v4.1.0] - 2017-04-12
+### Added
+### Changed
+### Removed
+
+## [v4.0.0] - 2016-12-07
+### Added
+### Changed
+### Removed
+
+## [v3.0.0] - 2016-06-26
+### Added
+### Changed
+### Removed
+
+## [v2.12.0] - 2016-01-29
+### Added
+### Changed
+### Removed
+
+## [v2.11.0] - 2016-01-11
+### Added
+### Changed
+### Removed
+
+## [v2.10.0] - 2015-11-13
+### Added
+### Changed
+### Removed
+
+## [v2.9.3] - 2015-10-30
+### Added
+### Changed
+### Removed
+
+## [v2.9.2] - 2015-10-07
+### Added
+### Changed
+### Removed
+
+## [v2.9.1] - 2015-08-30
+### Added
+### Changed
+### Removed
+
+## [v2.9.0] - 2015-08-28
+### Added
+### Changed
+### Removed
+
+## [v2.8.0] - 2015-08-06
+### Added
+### Changed
+### Removed
+
+## [v2.7.3] - 2015-07-31
+### Added
+### Changed
+### Removed
+
+## [v2.7.2] - 2015-07-31
+### Added
+### Changed
+### Removed
+
+## [v2.7.1] - 2015-07-26
+### Added
+### Changed
+### Removed
+
+## [v2.7.0] - 2015-07-23
+### Added
+### Changed
+### Removed
+
+## [v2.6.0] - 2015-06-16
+### Added
+### Changed
+### Removed
+
+## [v2.5.1] - 2015-06-15
+### Added
+### Changed
+### Removed
+
+## [v2.5.0] - 2015-06-15
+### Added
+### Changed
+### Removed
+
+## [v2.4.0] - 2015-06-12
+### Added
+### Changed
+### Removed
+
+## [v2.3.0] - 2015-05-13
+### Added
+### Changed
+### Removed
+
+## [v2.2.0] - 2015-05-05
+### Added
+### Changed
+### Removed
+
+## [v2.1.0] - 2015-04-24
+### Added
+### Changed
+### Removed
+
+## [v2.0.2] - 2015-04-22
+### Added
+### Changed
+### Removed
+
+## [v2.0.1] - 2015-04-17
+### Added
+### Changed
+### Removed
+
+## [v2.0.0] - 2015-04-17
+### Added
+### Changed
+### Removed
+
+## [v1.0.1] - 2015-03-30
+### Added
+### Changed
+### Removed
+
+## [v1.0.0] - 2015-03-20
+### Added
+### Changed
+### Removed
+
+## [v0.4.1] - 2015-03-18
+### Added
+### Changed
+### Removed
+
+## [v0.4.0] - 2015-03-17
+### Added
+### Changed
+### Removed
+
+## [v0.3.1] - 2015-02-23
+### Added
+### Changed
+### Removed
+
+## [v0.3.0] - 2015-02-17
+### Added
+### Changed
+### Removed
+
+## [v0.2.0] - 2015-01-23
+### Added
+### Changed
+### Removed
+
+## [v0.1.13] - 2014-11-12
+### Added
+### Changed
+### Removed
+
+## [v0.1.12] - 2014-08-22
+### Added
+### Changed
+### Removed
+
+## [v0.1.11] - 2014-06-20
+### Added
+### Changed
+### Removed
+
+## [v0.1.10] - 2014-06-09
+### Added
+### Changed
+### Removed
+
+## [v0.1.9] - 2014-06-06
+### Added
+### Changed
+### Removed
+
+## [v0.1.8] - 2014-06-06
+### Added
+### Changed
+### Removed
+
+## [v0.1.7] - 2014-05-21
+### Added
+### Changed
+### Removed
+
+## [v0.1.6] - 2014-04-02
+### Added
+### Changed
+### Removed
+
+## [v0.1.5] - 2014-03-25
+### Added
+### Changed
+### Removed
+
+## [v0.1.4] - 2014-03-03
+### Added
+### Changed
+### Removed
+
+## [v0.1.3] - 2014-02-11
+### Added
+### Changed
+### Removed
+
+## [v0.1.2] - 2013-12-09
+### Added
+### Changed
+### Removed
+
+## [v0.1.1] - 2013-11-14
+### Added
+### Changed
+### Removed
diff --git a/LICENSE b/LICENSE
index 38ee2491..a49a5410 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2013 InfluxDB
+Copyright (c) 2020 InfluxDB
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
diff --git a/MANIFEST.in b/MANIFEST.in
index 18b23910..76466bf2 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -2,3 +2,4 @@ include requirements.txt
include test-requirements.txt
include dev-requirements.txt
include LICENSE
+include README.rst
diff --git a/README.rst b/README.rst
index 5b1ba3f5..048db045 100644
--- a/README.rst
+++ b/README.rst
@@ -1,50 +1,61 @@
+InfluxDB-Python
+===============
-
-InfluxDB-Python is a client for interacting with InfluxDB_.
-
-.. image:: https://travis-ci.org/influxdb/influxdb-python.svg?branch=master
- :target: https://travis-ci.org/influxdb/influxdb-python
-
+.. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master
+ :target: https://travis-ci.org/influxdata/influxdb-python
.. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style
:target: http://influxdb-python.readthedocs.org/
:alt: Documentation Status
-.. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg
- :target: https://coveralls.io/r/influxdb/influxdb-python
+.. image:: https://img.shields.io/coveralls/influxdata/influxdb-python.svg
+ :target: https://coveralls.io/r/influxdata/influxdb-python
:alt: Coverage
-.. image:: https://pypip.in/download/influxdb/badge.svg
- :target: https://pypi.python.org/pypi//influxdb/
- :alt: Downloads
+.. image:: https://img.shields.io/pypi/v/influxdb.svg
+ :target: https://pypi.python.org/pypi/influxdb
+ :alt: PyPI Status
+
+
+.. important::
+
+ **This project is no longer in development**
+
+ This v1 client library is for interacting with `InfluxDB 1.x `_ and 1.x-compatible endpoints in `InfluxDB 2.x `_.
+ Use it to:
+
+ - Write data in line protocol.
+ - Query data with `InfluxQL `_.
+
+ If you use `InfluxDB 2.x (TSM storage engine) `_ and `Flux `_, see the `v2 client library `_.
-.. image:: https://pypip.in/version/influxdb/badge.svg
- :target: https://pypi.python.org/pypi/influxdb/
- :alt: Latest Version
+ If you use `InfluxDB 3.0 `_, see the `v3 client library `_.
-.. image:: https://pypip.in/py_versions/influxdb/badge.svg
- :target: https://pypi.python.org/pypi/influxdb/
- :alt: Supported Python versions
+ For new projects, consider using InfluxDB 3.0 and v3 client libraries.
-.. image:: https://pypip.in/license/influxdb/badge.svg
- :target: https://pypi.python.org/pypi/influxdb/
- :alt: License
+Description
+===========
+
+InfluxDB-python, the InfluxDB Python Client (1.x), is a client library for interacting with `InfluxDB 1.x `_ instances.
.. _readme-about:
-InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdb.com/
+`InfluxDB`_ is the time series platform designed to handle high write and query loads.
.. _installation:
-InfluxDB v0.8.X users
-=====================
-InfluxDB 0.9 was released and it is the new recommended version. However, InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead.
+For InfluxDB pre-v1.1.0 users
+-----------------------------
+
+This module is tested with InfluxDB versions v1.2.4, v1.3.9, v1.4.3, v1.5.4, v1.6.4, and 1.7.4.
+
+Users on InfluxDB v0.8.x may still use the legacy client by importing ``from influxdb.influxdb08 import InfluxDBClient``.
-Installation
-============
+For InfluxDB v1.1+ users
+------------------------
-Install, upgrade and uninstall InfluxDB-Python with these commands::
+Install, upgrade and uninstall influxdb-python with these commands::
$ pip install influxdb
$ pip install --upgrade influxdb
@@ -55,9 +66,11 @@ On Debian/Ubuntu, you can install it with this command::
$ sudo apt-get install python-influxdb
Dependencies
-============
+------------
-The InfluxDB-Python distribution is supported and tested on Python 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3.
+The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 3.6, 3.7, PyPy and PyPy3.
+
+**Note:** Python <3.5 are currently untested. See ``.travis.yml``.
Main dependency is:
@@ -73,9 +86,9 @@ Additional dependencies are:
Documentation
-=============
+-------------
-InfluxDB-Python documentation is available at http://influxdb-python.readthedocs.org
+Documentation is available at https://influxdb-python.readthedocs.io/en/latest/.
You will need Sphinx_ installed to generate the documentation.
@@ -88,7 +101,7 @@ Generated documentation can be found in the *docs/build/html/* directory.
Examples
-========
+--------
Here's a basic example (for more see the examples directory)::
@@ -120,23 +133,9 @@ Here's a basic example (for more see the examples directory)::
>>> print("Result: {0}".format(result))
-If you want to connect to a cluster, you could initialize a ``InfluxDBClusterClient``::
-
- $ python
-
- >>> from influxdb import InfluxDBClusterClient
-
- >>> cc = InfluxDBClusterClient(hosts = [('192.168.0.1', 8086),
- ('192.168.0.2', 8086),
- ('192.168.0.3', 8086)],
- username='root',
- password='root',
- database='example')
-
-``InfluxDBClusterClient`` has the same methods as ``InfluxDBClient``, it basically is a proxy to multiple InfluxDBClients.
Testing
-=======
+-------
Make sure you have tox by running the following::
@@ -148,14 +147,22 @@ To test influxdb-python with multiple version of Python, you can use Tox_::
Support
-=======
+-------
For issues with, questions about, or feedback for InfluxDB_, please look into
our community page: http://influxdb.com/community/.
+We are also lurking on the following:
+
+- #influxdb on irc.freenode.net
+- #influxdb on gophers.slack.com
+
Development
-===========
+-----------
+
+The v1 client libraries for InfluxDB 1.x were typically developed and maintained by InfluxDB community members. If you are an InfluxDB v1 user interested in maintaining this client library (at a minimum, keeping it updated with security patches) please contact the InfluxDB team at on the `Community Forums `_ or
+`InfluxData Slack `_.
All development is done on Github_. Use Issues_ to report
problems or submit contributions.
@@ -163,19 +170,32 @@ problems or submit contributions.
.. _Github: https://github.com/influxdb/influxdb-python/
.. _Issues: https://github.com/influxdb/influxdb-python/issues
+Please note that we will answer you question as quickly as possible.
-TODO
-====
-
-The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdb/influxdb-python/issues
+Maintainers:
++-----------+-------------------------------+
+| Github ID | URL |
++===========+===============================+
+| @aviau | (https://github.com/aviau) |
++-----------+-------------------------------+
+| @xginn8 | (https://github.com/xginn8) |
++-----------+-------------------------------+
+| @sebito91 | (https://github.com/sebito91) |
++-----------+-------------------------------+
Source code
-===========
+-----------
+
+The source code for the InfluxDB Python Client (1.x) is currently available on Github: https://github.com/influxdata/influxdb-python
+
+
+TODO
+----
-The source code is currently available on Github: https://github.com/influxdb/influxdb-python
+The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues
-.. _InfluxDB: http://influxdb.com/
+.. _InfluxDB: https://influxdata.com/
.. _Sphinx: http://sphinx.pocoo.org/
.. _Tox: https://tox.readthedocs.org
diff --git a/dev-requirements.txt b/dev-requirements.txt
index 78d40a24..bc7b4c87 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -1,8 +1,8 @@
-requests
+requests>=2.17.0
nose
mock
-pandas
-Sphinx==1.2.3
+pandas==0.20.1
+Sphinx==1.5.5
sphinx_rtd_theme
wheel
twine
diff --git a/docs/source/api-documentation.rst b/docs/source/api-documentation.rst
index c6178fed..35fdb291 100644
--- a/docs/source/api-documentation.rst
+++ b/docs/source/api-documentation.rst
@@ -30,7 +30,7 @@ These clients are initiated in the same way as the
client = DataFrameClient(host='127.0.0.1', port=8086, username='root', password='root', database='dbname')
-.. note:: Only when using UDP (use_udp=True) the connections is established.
+.. note:: Only when using UDP (use_udp=True) the connection is established.
.. _InfluxDBClient-api:
@@ -45,16 +45,6 @@ These clients are initiated in the same way as the
:members:
:undoc-members:
-------------------------------
-:class:`InfluxDBClusterClient`
-------------------------------
-
-
-.. currentmodule:: influxdb.InfluxDBClusterClient
-.. autoclass:: influxdb.InfluxDBClusterClient
- :members:
- :undoc-members:
-
------------------------
:class:`DataFrameClient`
------------------------
diff --git a/docs/source/conf.py b/docs/source/conf.py
index f55684d5..efc22f88 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
#
+"""InfluxDB documentation build configuration file."""
+
# InfluxDB documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 16 00:33:06 2014.
#
@@ -115,7 +117,8 @@
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
-html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+# Calling get_html_theme_path is deprecated.
+# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index 2c85fbda..841ad8b1 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -25,3 +25,15 @@ Tutorials - SeriesHelper
.. literalinclude:: ../../examples/tutorial_serieshelper.py
:language: python
+
+Tutorials - UDP
+===============
+
+.. literalinclude:: ../../examples/tutorial_udp.py
+ :language: python
+
+Tutorials - Authorization by Token
+==================================
+
+.. literalinclude:: ../../examples/tutorial_authorization.py
+ :language: python
diff --git a/docs/source/resultset.rst b/docs/source/resultset.rst
index 6c36463f..b1c3206f 100644
--- a/docs/source/resultset.rst
+++ b/docs/source/resultset.rst
@@ -18,7 +18,7 @@ Using ``rs.get_points()`` will return a generator for all the points in the Resu
Filtering by measurement
------------------------
-Using ``rs.get_points('cpu')`` will return a generator for all the points that are in a serie with measurement name ``cpu``, no matter the tags.
+Using ``rs.get_points('cpu')`` will return a generator for all the points that are in a series with measurement name ``cpu``, no matter the tags.
::
rs = cli.query("SELECT * from cpu")
@@ -36,7 +36,7 @@ Using ``rs.get_points(tags={'host_name': 'influxdb.com'})`` will return a genera
Filtering by measurement and tags
---------------------------------
-Using measurement name and tags will return a generator for all the points that are in a serie with the specified measurement name AND whose tags match the given tags.
+Using measurement name and tags will return a generator for all the points that are in a series with the specified measurement name AND whose tags match the given tags.
::
rs = cli.query("SELECT * from cpu")
diff --git a/examples/tutorial.py b/examples/tutorial.py
index 64e95778..12cd49c1 100644
--- a/examples/tutorial.py
+++ b/examples/tutorial.py
@@ -1,15 +1,21 @@
+# -*- coding: utf-8 -*-
+"""Tutorial on using the InfluxDB client."""
+
import argparse
from influxdb import InfluxDBClient
def main(host='localhost', port=8086):
+ """Instantiate a connection to the InfluxDB."""
user = 'root'
password = 'root'
dbname = 'example'
dbuser = 'smly'
dbuser_password = 'my_secret_password'
- query = 'select value from cpu_load_short;'
+ query = 'select Float_value from cpu_load_short;'
+ query_where = 'select Int_value from cpu_load_short where host=$host;'
+ bind_params = {'host': 'server01'}
json_body = [
{
"measurement": "cpu_load_short",
@@ -19,7 +25,10 @@ def main(host='localhost', port=8086):
},
"time": "2009-11-10T23:00:00Z",
"fields": {
- "value": 0.64
+ "Float_value": 0.64,
+ "Int_value": 3,
+ "String_value": "Text",
+ "Bool_value": True
}
}
]
@@ -38,11 +47,16 @@ def main(host='localhost', port=8086):
print("Write points: {0}".format(json_body))
client.write_points(json_body)
- print("Queying data: " + query)
+ print("Querying data: " + query)
result = client.query(query)
print("Result: {0}".format(result))
+ print("Querying data: " + query_where)
+ result = client.query(query_where, bind_params=bind_params)
+
+ print("Result: {0}".format(result))
+
print("Switch user: " + user)
client.switch_user(user, password)
@@ -51,9 +65,11 @@ def main(host='localhost', port=8086):
def parse_args():
+ """Parse the args."""
parser = argparse.ArgumentParser(
description='example code to play with InfluxDB')
- parser.add_argument('--host', type=str, required=False, default='localhost',
+ parser.add_argument('--host', type=str, required=False,
+ default='localhost',
help='hostname of InfluxDB http API')
parser.add_argument('--port', type=int, required=False, default=8086,
help='port of InfluxDB http API')
diff --git a/examples/tutorial_authorization.py b/examples/tutorial_authorization.py
new file mode 100644
index 00000000..9d9a800f
--- /dev/null
+++ b/examples/tutorial_authorization.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+"""Tutorial how to authorize InfluxDB client by custom Authorization token."""
+
+import argparse
+from influxdb import InfluxDBClient
+
+
+def main(token='my-token'):
+ """Instantiate a connection to the InfluxDB."""
+ client = InfluxDBClient(username=None, password=None,
+ headers={"Authorization": token})
+
+ print("Use authorization token: " + token)
+
+ version = client.ping()
+ print("Successfully connected to InfluxDB: " + version)
+ pass
+
+
+def parse_args():
+ """Parse the args from main."""
+ parser = argparse.ArgumentParser(
+ description='example code to play with InfluxDB')
+ parser.add_argument('--token', type=str, required=False,
+ default='my-token',
+ help='Authorization token for the proxy that is ahead the InfluxDB.')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(token=args.token)
diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py
index c9a09fde..13e72f8c 100644
--- a/examples/tutorial_pandas.py
+++ b/examples/tutorial_pandas.py
@@ -1,3 +1,6 @@
+# -*- coding: utf-8 -*-
+"""Tutorial for using pandas and the InfluxDB client."""
+
import argparse
import pandas as pd
@@ -5,34 +8,38 @@
def main(host='localhost', port=8086):
+ """Instantiate the connection to the InfluxDB client."""
user = 'root'
password = 'root'
- dbname = 'example'
+ dbname = 'demo'
+ protocol = 'line'
client = DataFrameClient(host, port, user, password, dbname)
print("Create pandas DataFrame")
df = pd.DataFrame(data=list(range(30)),
index=pd.date_range(start='2014-11-16',
- periods=30, freq='H'))
+ periods=30, freq='H'), columns=['0'])
print("Create database: " + dbname)
client.create_database(dbname)
print("Write DataFrame")
- client.write_points(df, 'demo')
+ client.write_points(df, 'demo', protocol=protocol)
print("Write DataFrame with Tags")
- client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'})
+ client.write_points(df, 'demo',
+ {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)
print("Read DataFrame")
client.query("select * from demo")
print("Delete database: " + dbname)
- client.delete_database(dbname)
+ client.drop_database(dbname)
def parse_args():
+ """Parse the args from main."""
parser = argparse.ArgumentParser(
description='example code to play with InfluxDB')
parser.add_argument('--host', type=str, required=False,
diff --git a/examples/tutorial_serieshelper.py b/examples/tutorial_serieshelper.py
index d7bd27c9..72b80bb5 100644
--- a/examples/tutorial_serieshelper.py
+++ b/examples/tutorial_serieshelper.py
@@ -1,6 +1,5 @@
-"""
-Tutorial/Example how to use the class helper `SeriesHelper`
-"""
+# -*- coding: utf-8 -*-
+"""Tutorial how to use the class helper `SeriesHelper`."""
from influxdb import InfluxDBClient
from influxdb import SeriesHelper
@@ -20,18 +19,28 @@
class MySeriesHelper(SeriesHelper):
- # Meta class stores time series helper configuration.
+ """Instantiate SeriesHelper to write points to the backend."""
+
class Meta:
+ """Meta class stores time series helper configuration."""
+
# The client should be an instance of InfluxDBClient.
client = myclient
- # The series name must be a string. Add dependent fields/tags in curly brackets.
+
+ # The series name must be a string. Add dependent fields/tags
+ # in curly brackets.
series_name = 'events.stats.{server_name}'
+
# Defines all the fields in this time series.
fields = ['some_stat', 'other_stat']
+
# Defines all the tags for the series.
tags = ['server_name']
- # Defines the number of data points to store prior to writing on the wire.
+
+ # Defines the number of data points to store prior to writing
+ # on the wire.
bulk_size = 5
+
# autocommit must be set to True when using bulk_size
autocommit = True
@@ -42,7 +51,8 @@ class Meta:
MySeriesHelper(server_name='us.east-1', some_stat=159, other_stat=10)
MySeriesHelper(server_name='us.east-1', some_stat=158, other_stat=20)
MySeriesHelper(server_name='us.east-1', some_stat=157, other_stat=30)
-MySeriesHelper(server_name='us.east-1', some_stat=156, other_stat=40)
+MySeriesHelper(server_name='us.east-1', some_stat=156, other_stat=30)
+MySeriesHelper(server_name='us.east-1', some_stat=156)
MySeriesHelper(server_name='us.east-1', some_stat=155, other_stat=50)
# To manually submit data points which are not yet written, call commit:
diff --git a/examples/tutorial_server_data.py b/examples/tutorial_server_data.py
index b5b26d1c..cb903fad 100644
--- a/examples/tutorial_server_data.py
+++ b/examples/tutorial_server_data.py
@@ -1,11 +1,15 @@
+# -*- coding: utf-8 -*-
+"""Tutorial on using the server functions."""
+
+from __future__ import print_function
import argparse
-from influxdb import InfluxDBClient
-from influxdb.client import InfluxDBClientError
import datetime
import random
import time
+from influxdb import InfluxDBClient
+from influxdb.client import InfluxDBClientError
USER = 'root'
PASSWORD = 'root'
@@ -13,7 +17,7 @@
def main(host='localhost', port=8086, nb_day=15):
-
+ """Instantiate a connection to the backend."""
nb_day = 15 # number of day to generate time series
timeinterval_min = 5 # create an event every x minutes
total_minutes = 1440 * nb_day
@@ -28,17 +32,18 @@ def main(host='localhost', port=8086, nb_day=15):
hostName = "server-%d" % random.randint(1, 5)
# pointValues = [int(past_date.strftime('%s')), value, hostName]
pointValues = {
- "time": int(past_date.strftime('%s')),
- "measurement": metric,
- 'fields': {
- 'value': value,
- },
- 'tags': {
- "hostName": hostName,
- },
- }
+ "time": int(past_date.strftime('%s')),
+ "measurement": metric,
+ "fields": {
+ "value": value,
+ },
+ "tags": {
+ "hostName": hostName,
+ },
+ }
series.append(pointValues)
- print series
+
+ print(series)
client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)
@@ -51,7 +56,7 @@ def main(host='localhost', port=8086, nb_day=15):
client.create_database(DBNAME)
print("Create a retention policy")
- retention_policy = 'awesome_policy'
+ retention_policy = 'server_data'
client.create_retention_policy(retention_policy, '3d', 3, default=True)
print("Write points #: {0}".format(total_records))
@@ -59,19 +64,22 @@ def main(host='localhost', port=8086, nb_day=15):
time.sleep(2)
- query = "SELECT MEAN(value) FROM %s WHERE time > now() - 10d GROUP BY time(500m)" % (metric)
- result = client.query(query, database=DBNAME, raw=False)
- print (result)
+ query = "SELECT MEAN(value) FROM {} WHERE \
+ time > now() - 10d GROUP BY time(500m)".format(metric)
+ result = client.query(query, database=DBNAME)
+ print(result)
print("Result: {0}".format(result))
- print("Drop database: " + DBNAME)
+ print("Drop database: {}".format(DBNAME))
client.drop_database(DBNAME)
def parse_args():
+ """Parse the args."""
parser = argparse.ArgumentParser(
description='example code to play with InfluxDB')
- parser.add_argument('--host', type=str, required=False, default='localhost',
+ parser.add_argument('--host', type=str, required=False,
+ default='localhost',
help='hostname influxdb http API')
parser.add_argument('--port', type=int, required=False, default=8086,
help='port influxdb http API')
diff --git a/examples/tutorial_sine_wave.py b/examples/tutorial_sine_wave.py
index 414dd10d..5dfebf3c 100644
--- a/examples/tutorial_sine_wave.py
+++ b/examples/tutorial_sine_wave.py
@@ -1,10 +1,13 @@
+# -*- coding: utf-8 -*-
+"""Tutorial using all elements to define a sine wave."""
+
import argparse
-from influxdb import InfluxDBClient
import math
import datetime
import time
+from influxdb import InfluxDBClient
USER = 'root'
PASSWORD = 'root'
@@ -12,9 +15,7 @@
def main(host='localhost', port=8086):
- """
- main function to generate the sin wave
- """
+ """Define function to generate the sin wave."""
now = datetime.datetime.today()
points = []
@@ -36,20 +37,20 @@ def main(host='localhost', port=8086):
client.create_database(DBNAME)
client.switch_database(DBNAME)
- #Write points
+ # Write points
client.write_points(points)
time.sleep(3)
query = 'SELECT * FROM foobar'
- print("Queying data: " + query)
+ print("Querying data: " + query)
result = client.query(query, database=DBNAME)
print("Result: {0}".format(result))
"""
- You might want to comment the delete and plot the result on InfluxDB Interface
- Connect on InfluxDB Interface at http://127.0.0.1:8083/
- Select the database tutorial -> Explore Data
+ You might want to comment the delete and plot the result on InfluxDB
+ Interface. Connect on InfluxDB Interface at http://127.0.0.1:8083/
+ Select the database tutorial -> Explore Data
Then run the following query:
@@ -61,9 +62,11 @@ def main(host='localhost', port=8086):
def parse_args():
+ """Parse the args."""
parser = argparse.ArgumentParser(
description='example code to play with InfluxDB')
- parser.add_argument('--host', type=str, required=False, default='localhost',
+ parser.add_argument('--host', type=str, required=False,
+ default='localhost',
help='hostname influxdb http API')
parser.add_argument('--port', type=int, required=False, default=8086,
help='port influxdb http API')
diff --git a/examples/tutorial_udp.py b/examples/tutorial_udp.py
new file mode 100644
index 00000000..93b923d7
--- /dev/null
+++ b/examples/tutorial_udp.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+"""Example for sending batch information to InfluxDB via UDP."""
+
+"""
+INFO: In order to use UDP, one should enable the UDP service from the
+`influxdb.conf` under section
+ [[udp]]
+ enabled = true
+ bind-address = ":8089" # port number for sending data via UDP
+ database = "udp1" # name of database to be stored
+ [[udp]]
+ enabled = true
+ bind-address = ":8090"
+ database = "udp2"
+"""
+
+
+import argparse
+
+from influxdb import InfluxDBClient
+
+
+def main(uport):
+ """Instantiate connection to the InfluxDB."""
+ # NOTE: structure of the UDP packet is different than that of information
+ # sent via HTTP
+ json_body = {
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "points": [{
+ "measurement": "cpu_load_short",
+ "fields": {
+ "value": 0.64
+ },
+ "time": "2009-11-10T23:00:00Z",
+ },
+ {
+ "measurement": "cpu_load_short",
+ "fields": {
+ "value": 0.67
+ },
+ "time": "2009-11-10T23:05:00Z"
+ }]
+ }
+
+ # make `use_udp` True and add `udp_port` number from `influxdb.conf` file
+ # no need to mention the database name since it is already configured
+ client = InfluxDBClient(use_udp=True, udp_port=uport)
+
+ # Instead of `write_points` use `send_packet`
+ client.send_packet(json_body)
+
+
+def parse_args():
+ """Parse the args."""
+ parser = argparse.ArgumentParser(
+ description='example code to play with InfluxDB along with UDP Port')
+ parser.add_argument('--uport', type=int, required=True,
+ help=' UDP port of InfluxDB')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(uport=args.uport)
diff --git a/influxdb/__init__.py b/influxdb/__init__.py
index dab1c004..e66f80ea 100644
--- a/influxdb/__init__.py
+++ b/influxdb/__init__.py
@@ -1,16 +1,21 @@
# -*- coding: utf-8 -*-
+"""Initialize the influxdb package."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
from .client import InfluxDBClient
-from .client import InfluxDBClusterClient
from .dataframe_client import DataFrameClient
from .helper import SeriesHelper
__all__ = [
'InfluxDBClient',
- 'InfluxDBClusterClient',
'DataFrameClient',
'SeriesHelper',
]
-__version__ = '2.10.0'
+__version__ = '5.3.2'
diff --git a/influxdb/_dataframe_client.py b/influxdb/_dataframe_client.py
index 8b4b505a..907db2cb 100644
--- a/influxdb/_dataframe_client.py
+++ b/influxdb/_dataframe_client.py
@@ -1,12 +1,19 @@
# -*- coding: utf-8 -*-
-"""
-DataFrame client for InfluxDB
-"""
+"""DataFrame client for InfluxDB."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
import math
+from collections import defaultdict
import pandas as pd
+import numpy as np
from .client import InfluxDBClient
+from .line_protocol import _escape_tag
def _pandas_time_unit(time_precision):
@@ -21,8 +28,13 @@ def _pandas_time_unit(time_precision):
return unit
+def _escape_pandas_series(s):
+ return s.apply(lambda v: _escape_tag(v))
+
+
class DataFrameClient(InfluxDBClient):
- """
+ """DataFrameClient instantiates InfluxDBClient to connect to the backend.
+
The ``DataFrameClient`` object holds information necessary to connect
to InfluxDB. Requests can be made to InfluxDB directly through the client.
The client reads and writes from pandas DataFrames.
@@ -30,77 +42,176 @@ class DataFrameClient(InfluxDBClient):
EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00')
- def write_points(self, dataframe, measurement, tags=None,
- time_precision=None, database=None, retention_policy=None,
- batch_size=None):
- """
- Write to multiple time series names.
+ def write_points(self,
+ dataframe,
+ measurement,
+ tags=None,
+ tag_columns=None,
+ field_columns=None,
+ time_precision=None,
+ database=None,
+ retention_policy=None,
+ batch_size=None,
+ protocol='line',
+ numeric_precision=None):
+ """Write to multiple time series names.
:param dataframe: data points in a DataFrame
:param measurement: name of measurement
:param tags: dictionary of tags, with string key-values
+ :param tag_columns: [Optional, default None] List of data tag names
+ :param field_columns: [Options, default None] List of data field names
:param time_precision: [Optional, default None] Either 's', 'ms', 'u'
or 'n'.
:param batch_size: [Optional] Value to write the points in batches
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation
:type batch_size: int
-
+ :param protocol: Protocol for writing data. Either 'line' or 'json'.
+ :param numeric_precision: Precision for floating point values.
+ Either None, 'full' or some int, where int is the desired decimal
+ precision. 'full' preserves full precision for int and float
+ datatypes. Defaults to None, which preserves 14-15 significant
+ figures for float and all significant figures for int datatypes.
"""
+ if tag_columns is None:
+ tag_columns = []
+
+ if field_columns is None:
+ field_columns = []
+
if batch_size:
- number_batches = int(math.ceil(
- len(dataframe) / float(batch_size)))
+ number_batches = int(math.ceil(len(dataframe) / float(batch_size)))
+
for batch in range(number_batches):
start_index = batch * batch_size
end_index = (batch + 1) * batch_size
- points = self._convert_dataframe_to_json(
- dataframe.ix[start_index:end_index].copy(),
- measurement, tags, time_precision
- )
+
+ if protocol == 'line':
+ points = self._convert_dataframe_to_lines(
+ dataframe.iloc[start_index:end_index].copy(),
+ measurement=measurement,
+ global_tags=tags,
+ time_precision=time_precision,
+ tag_columns=tag_columns,
+ field_columns=field_columns,
+ numeric_precision=numeric_precision)
+ else:
+ points = self._convert_dataframe_to_json(
+ dataframe.iloc[start_index:end_index].copy(),
+ measurement=measurement,
+ tags=tags,
+ time_precision=time_precision,
+ tag_columns=tag_columns,
+ field_columns=field_columns)
+
super(DataFrameClient, self).write_points(
- points, time_precision, database, retention_policy)
+ points,
+ time_precision,
+ database,
+ retention_policy,
+ protocol=protocol)
+
return True
+
+ if protocol == 'line':
+ points = self._convert_dataframe_to_lines(
+ dataframe,
+ measurement=measurement,
+ global_tags=tags,
+ tag_columns=tag_columns,
+ field_columns=field_columns,
+ time_precision=time_precision,
+ numeric_precision=numeric_precision)
else:
points = self._convert_dataframe_to_json(
- dataframe, measurement, tags, time_precision
- )
- super(DataFrameClient, self).write_points(
- points, time_precision, database, retention_policy)
- return True
+ dataframe,
+ measurement=measurement,
+ tags=tags,
+ time_precision=time_precision,
+ tag_columns=tag_columns,
+ field_columns=field_columns)
+
+ super(DataFrameClient, self).write_points(
+ points,
+ time_precision,
+ database,
+ retention_policy,
+ protocol=protocol)
- def query(self, query, chunked=False, database=None):
+ return True
+
+ def query(self,
+ query,
+ params=None,
+ bind_params=None,
+ epoch=None,
+ expected_response_code=200,
+ database=None,
+ raise_errors=True,
+ chunked=False,
+ chunk_size=0,
+ method="GET",
+ dropna=True,
+ data_frame_index=None):
"""
- Quering data into a DataFrame.
+ Query data into a DataFrame.
- :param chunked: [Optional, default=False] True if the data shall be
- retrieved in chunks, False otherwise.
+ .. danger::
+ In order to avoid injection vulnerabilities (similar to `SQL
+ injection `_
+ vulnerabilities), do not directly include untrusted data into the
+ ``query`` parameter, use ``bind_params`` instead.
+ :param query: the actual query string
+ :param params: additional parameters for the request, defaults to {}
+ :param bind_params: bind parameters for the query:
+ any variable in the query written as ``'$var_name'`` will be
+ replaced with ``bind_params['var_name']``. Only works in the
+ ``WHERE`` clause and takes precedence over ``params['params']``
+ :param epoch: response timestamps to be in epoch format either 'h',
+ 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
+ RFC3339 UTC format with nanosecond precision
+ :param expected_response_code: the expected status code of response,
+ defaults to 200
+ :param database: database to query, defaults to None
+ :param raise_errors: Whether or not to raise exceptions when InfluxDB
+ returns errors, defaults to True
+ :param chunked: Enable to use chunked responses from InfluxDB.
+ With ``chunked`` enabled, one ResultSet is returned per chunk
+ containing all results within that chunk
+ :param chunk_size: Size of each chunk to tell InfluxDB to use.
+ :param dropna: drop columns where all values are missing
+ :param data_frame_index: the list of columns that
+ are used as DataFrame index
+ :returns: the queried data
+ :rtype: :class:`~.ResultSet`
"""
- results = super(DataFrameClient, self).query(query, database=database)
- if query.upper().startswith("SELECT"):
+ query_args = dict(params=params,
+ bind_params=bind_params,
+ epoch=epoch,
+ expected_response_code=expected_response_code,
+ raise_errors=raise_errors,
+ chunked=chunked,
+ database=database,
+ method=method,
+ chunk_size=chunk_size)
+ results = super(DataFrameClient, self).query(query, **query_args)
+ if query.strip().upper().startswith("SELECT"):
if len(results) > 0:
- return self._to_dataframe(results)
+ return self._to_dataframe(results, dropna,
+ data_frame_index=data_frame_index)
else:
return {}
else:
return results
- def get_list_series(self, database=None):
- """
- Get the list of series, in DataFrame
+ def _to_dataframe(self, rs, dropna=True, data_frame_index=None):
+ result = defaultdict(list)
+ if isinstance(rs, list):
+ return map(self._to_dataframe, rs,
+ [dropna for _ in range(len(rs))])
- """
- results = super(DataFrameClient, self)\
- .query("SHOW SERIES", database=database)
- if len(results):
- return dict(
- (key[0], pd.DataFrame(data)) for key, data in results.items()
- )
- else:
- return {}
-
- def _to_dataframe(self, rs):
- result = {}
for key, data in rs.items():
name, tags = key
if tags is None:
@@ -109,24 +220,51 @@ def _to_dataframe(self, rs):
key = (name, tuple(sorted(tags.items())))
df = pd.DataFrame(data)
df.time = pd.to_datetime(df.time)
- df.set_index('time', inplace=True)
- df.index = df.index.tz_localize('UTC')
- df.index.name = None
+
+ if data_frame_index:
+ df.set_index(data_frame_index, inplace=True)
+ else:
+ df.set_index('time', inplace=True)
+ if df.index.tzinfo is None:
+ df.index = df.index.tz_localize('UTC')
+ df.index.name = None
+
+ result[key].append(df)
+ for key, data in result.items():
+ df = pd.concat(data).sort_index()
+ if dropna:
+ df.dropna(how='all', axis=1, inplace=True)
result[key] = df
+
return result
- def _convert_dataframe_to_json(self, dataframe, measurement, tags=None,
+ @staticmethod
+ def _convert_dataframe_to_json(dataframe,
+ measurement,
+ tags=None,
+ tag_columns=None,
+ field_columns=None,
time_precision=None):
if not isinstance(dataframe, pd.DataFrame):
- raise TypeError('Must be DataFrame, but type was: {}.'
+ raise TypeError('Must be DataFrame, but type was: {0}.'
.format(type(dataframe)))
- if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or
- isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)):
- raise TypeError('Must be DataFrame with DatetimeIndex or \
- PeriodIndex.')
+ if not (isinstance(dataframe.index, pd.PeriodIndex) or
+ isinstance(dataframe.index, pd.DatetimeIndex)):
+ raise TypeError('Must be DataFrame with DatetimeIndex or '
+ 'PeriodIndex.')
- dataframe.index = dataframe.index.to_datetime()
+ # Make sure tags and tag columns are correctly typed
+ tag_columns = tag_columns if tag_columns is not None else []
+ field_columns = field_columns if field_columns is not None else []
+ tags = tags if tags is not None else {}
+ # Assume field columns are all columns not included in tag columns
+ if not field_columns:
+ field_columns = list(
+ set(dataframe.columns).difference(set(tag_columns)))
+
+ if not isinstance(dataframe.index, pd.DatetimeIndex):
+ dataframe.index = pd.to_datetime(dataframe.index)
if dataframe.index.tzinfo is None:
dataframe.index = dataframe.index.tz_localize('UTC')
@@ -145,15 +283,204 @@ def _convert_dataframe_to_json(self, dataframe, measurement, tags=None,
"h": 1e9 * 3600,
}.get(time_precision, 1)
+ if not tag_columns:
+ points = [
+ {'measurement': measurement,
+ 'fields':
+ rec.replace([np.inf, -np.inf], np.nan).dropna().to_dict(),
+ 'time': np.int64(ts.value / precision_factor)}
+ for ts, (_, rec) in zip(
+ dataframe.index,
+ dataframe[field_columns].iterrows()
+ )
+ ]
+
+ return points
+
points = [
{'measurement': measurement,
- 'tags': tags if tags else {},
- 'fields': rec,
- 'time': int(ts.value / precision_factor)
- }
- for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))]
+ 'tags': dict(list(tag.items()) + list(tags.items())),
+ 'fields':
+ rec.replace([np.inf, -np.inf], np.nan).dropna().to_dict(),
+ 'time': np.int64(ts.value / precision_factor)}
+ for ts, tag, (_, rec) in zip(
+ dataframe.index,
+ dataframe[tag_columns].to_dict('record'),
+ dataframe[field_columns].iterrows()
+ )
+ ]
+
return points
+ def _convert_dataframe_to_lines(self,
+ dataframe,
+ measurement,
+ field_columns=None,
+ tag_columns=None,
+ global_tags=None,
+ time_precision=None,
+ numeric_precision=None):
+
+ dataframe = dataframe.dropna(how='all').copy()
+ if len(dataframe) == 0:
+ return []
+
+ if not isinstance(dataframe, pd.DataFrame):
+ raise TypeError('Must be DataFrame, but type was: {0}.'
+ .format(type(dataframe)))
+ if not (isinstance(dataframe.index, pd.PeriodIndex) or
+ isinstance(dataframe.index, pd.DatetimeIndex)):
+ raise TypeError('Must be DataFrame with DatetimeIndex or '
+ 'PeriodIndex.')
+
+ dataframe = dataframe.rename(
+ columns={item: _escape_tag(item) for item in dataframe.columns})
+ # Create a Series of columns for easier indexing
+ column_series = pd.Series(dataframe.columns)
+
+ if field_columns is None:
+ field_columns = []
+
+ if tag_columns is None:
+ tag_columns = []
+
+ if global_tags is None:
+ global_tags = {}
+
+ # Make sure field_columns and tag_columns are lists
+ field_columns = list(field_columns) if list(field_columns) else []
+ tag_columns = list(tag_columns) if list(tag_columns) else []
+
+ # If field columns but no tag columns, assume rest of columns are tags
+ if field_columns and (not tag_columns):
+ tag_columns = list(column_series[~column_series.isin(
+ field_columns)])
+
+ # If no field columns, assume non-tag columns are fields
+ if not field_columns:
+ field_columns = list(column_series[~column_series.isin(
+ tag_columns)])
+
+ precision_factor = {
+ "n": 1,
+ "u": 1e3,
+ "ms": 1e6,
+ "s": 1e9,
+ "m": 1e9 * 60,
+ "h": 1e9 * 3600,
+ }.get(time_precision, 1)
+
+ # Make array of timestamp ints
+ if isinstance(dataframe.index, pd.PeriodIndex):
+ time = ((dataframe.index.to_timestamp().values.astype(np.int64) //
+ precision_factor).astype(np.int64).astype(str))
+ else:
+ time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) //
+ precision_factor).astype(np.int64).astype(str))
+
+ # If tag columns exist, make an array of formatted tag keys and values
+ if tag_columns:
+
+ # Make global_tags as tag_columns
+ if global_tags:
+ for tag in global_tags:
+ dataframe[tag] = global_tags[tag]
+ tag_columns.append(tag)
+
+ tag_df = dataframe[tag_columns]
+ tag_df = tag_df.fillna('') # replace NA with empty string
+ tag_df = tag_df.sort_index(axis=1)
+ tag_df = self._stringify_dataframe(
+ tag_df, numeric_precision, datatype='tag')
+
+ # join prepended tags, leaving None values out
+ tags = tag_df.apply(
+ lambda s: [',' + s.name + '=' + v if v else '' for v in s])
+ tags = tags.sum(axis=1)
+
+ del tag_df
+ elif global_tags:
+ tag_string = ''.join(
+ [",{}={}".format(k, _escape_tag(v))
+ if v not in [None, ''] else ""
+ for k, v in sorted(global_tags.items())]
+ )
+ tags = pd.Series(tag_string, index=dataframe.index)
+ else:
+ tags = ''
+
+ # Make an array of formatted field keys and values
+ field_df = dataframe[field_columns].replace([np.inf, -np.inf], np.nan)
+ nans = pd.isnull(field_df)
+
+ field_df = self._stringify_dataframe(field_df,
+ numeric_precision,
+ datatype='field')
+
+ field_df = (field_df.columns.values + '=').tolist() + field_df
+ field_df[field_df.columns[1:]] = ',' + field_df[field_df.columns[1:]]
+ field_df[nans] = ''
+
+ fields = field_df.sum(axis=1).map(lambda x: x.lstrip(','))
+ del field_df
+
+ # Generate line protocol string
+ measurement = _escape_tag(measurement)
+ points = (measurement + tags + ' ' + fields + ' ' + time).tolist()
+ return points
+
+ @staticmethod
+ def _stringify_dataframe(dframe, numeric_precision, datatype='field'):
+
+ # Prevent modification of input dataframe
+ dframe = dframe.copy()
+
+ # Find int and string columns for field-type data
+ int_columns = dframe.select_dtypes(include=['integer']).columns
+ string_columns = dframe.select_dtypes(include=['object']).columns
+
+ # Convert dframe to string
+ if numeric_precision is None:
+ # If no precision specified, convert directly to string (fast)
+ dframe = dframe.astype(str)
+ elif numeric_precision == 'full':
+ # If full precision, use repr to get full float precision
+ float_columns = (dframe.select_dtypes(
+ include=['floating']).columns)
+ nonfloat_columns = dframe.columns[~dframe.columns.isin(
+ float_columns)]
+ dframe[float_columns] = dframe[float_columns].applymap(repr)
+ dframe[nonfloat_columns] = (dframe[nonfloat_columns].astype(str))
+ elif isinstance(numeric_precision, int):
+ # If precision is specified, round to appropriate precision
+ float_columns = (dframe.select_dtypes(
+ include=['floating']).columns)
+ nonfloat_columns = dframe.columns[~dframe.columns.isin(
+ float_columns)]
+ dframe[float_columns] = (dframe[float_columns].round(
+ numeric_precision))
+
+ # If desired precision is > 10 decimal places, need to use repr
+ if numeric_precision > 10:
+ dframe[float_columns] = (dframe[float_columns].applymap(repr))
+ dframe[nonfloat_columns] = (dframe[nonfloat_columns]
+ .astype(str))
+ else:
+ dframe = dframe.astype(str)
+ else:
+ raise ValueError('Invalid numeric precision.')
+
+ if datatype == 'field':
+ # If dealing with fields, format ints and strings correctly
+ dframe[int_columns] += 'i'
+ dframe[string_columns] = '"' + dframe[string_columns] + '"'
+ elif datatype == 'tag':
+ dframe = dframe.apply(_escape_pandas_series)
+
+ dframe.columns = dframe.columns.astype(str)
+
+ return dframe
+
def _datetime_to_epoch(self, datetime, time_precision='s'):
seconds = (datetime - self.EPOCH).total_seconds()
if time_precision == 'h':
diff --git a/influxdb/chunked_json.py b/influxdb/chunked_json.py
index 50d304f1..4e40f01a 100644
--- a/influxdb/chunked_json.py
+++ b/influxdb/chunked_json.py
@@ -1,17 +1,23 @@
# -*- coding: utf-8 -*-
+"""Module to generate chunked JSON replies."""
#
# Author: Adrian Sampson
# Source: https://gist.github.com/sampsyo/920215
#
-import json
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
-_decoder = json.JSONDecoder()
+import json
def loads(s):
- """A generator reading a sequence of JSON values from a string."""
+ """Generate a sequence of JSON values from a string."""
+ _decoder = json.JSONDecoder()
+
while s:
s = s.strip()
obj, pos = _decoder.raw_decode(s)
diff --git a/influxdb/client.py b/influxdb/client.py
index 83d04697..c535a3f1 100644
--- a/influxdb/client.py
+++ b/influxdb/client.py
@@ -1,38 +1,44 @@
# -*- coding: utf-8 -*-
-"""
-Python client for InfluxDB
-"""
+"""Python client for InfluxDB."""
-from functools import wraps
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import datetime
+import gzip
+import itertools
+import io
import json
-import socket
-import threading
import random
+import socket
+import struct
+import time
+from itertools import chain, islice
+
+import msgpack
import requests
import requests.exceptions
-from sys import version_info
+from requests.adapters import HTTPAdapter
+from six.moves.urllib.parse import urlparse
-from influxdb.line_protocol import make_lines
+from influxdb.line_protocol import make_lines, quote_ident, quote_literal
from influxdb.resultset import ResultSet
from .exceptions import InfluxDBClientError
from .exceptions import InfluxDBServerError
-try:
- xrange
-except NameError:
- xrange = range
-
-if version_info[0] == 3:
- from urllib.parse import urlparse
-else:
- from urlparse import urlparse
-
class InfluxDBClient(object):
- """The :class:`~.InfluxDBClient` object holds information necessary to
+ """InfluxDBClient primary client object to connect InfluxDB.
+
+ The :class:`~.InfluxDBClient` object holds information necessary to
connect to InfluxDB. Requests can be made to InfluxDB directly through
the client.
+ The client supports the use as a `context manager
+ `_.
+
:param host: hostname to connect to InfluxDB, defaults to 'localhost'
:type host: str
:param port: port to connect to InfluxDB, defaults to 8086
@@ -41,6 +47,8 @@ class InfluxDBClient(object):
:type username: str
:param password: password of the user, defaults to 'root'
:type password: str
+ :param pool_size: urllib3 connection pool size, defaults to 10.
+ :type pool_size: int
:param database: database name to connect to, defaults to None
:type database: str
:param ssl: use https instead of http to connect to InfluxDB, defaults to
@@ -52,12 +60,40 @@ class InfluxDBClient(object):
:param timeout: number of seconds Requests will wait for your client to
establish a connection, defaults to None
:type timeout: int
+ :param retries: number of attempts your client will make before aborting,
+ defaults to 3
+ 0 - try until success
+ 1 - attempt only once (without retry)
+ 2 - maximum two attempts (including one retry)
+ 3 - maximum three attempts (default option)
+ :type retries: int
:param use_udp: use UDP to connect to InfluxDB, defaults to False
- :type use_udp: int
+ :type use_udp: bool
:param udp_port: UDP port to connect to InfluxDB, defaults to 4444
:type udp_port: int
:param proxies: HTTP(S) proxy to use for Requests, defaults to {}
:type proxies: dict
+ :param path: path of InfluxDB on the server to connect, defaults to ''
+ :type path: str
+ :param cert: Path to client certificate information to use for mutual TLS
+ authentication. You can specify a local cert to use
+ as a single file containing the private key and the certificate, or as
+ a tuple of both files’ paths, defaults to None
+ :type cert: str
+ :param gzip: use gzip content encoding to compress requests
+ :type gzip: bool
+ :param session: allow for the new client request to use an existing
+ requests Session, defaults to None
+ :type session: requests.Session
+ :param headers: headers to add to Requests, will add 'Content-Type'
+ and 'Accept' unless these are already present, defaults to {}
+ :type headers: dict
+ :param socket_options: use custom tcp socket options,
+ If not specified, then defaults are loaded from
+ ``HTTPConnection.default_socket_options``
+ :type socket_options: list
+
+ :raises ValueError: if cert is provided but ssl is disabled (set to False)
"""
def __init__(self,
@@ -69,65 +105,123 @@ def __init__(self,
ssl=False,
verify_ssl=False,
timeout=None,
+ retries=3,
use_udp=False,
udp_port=4444,
proxies=None,
+ pool_size=10,
+ path='',
+ cert=None,
+ gzip=False,
+ session=None,
+ headers=None,
+ socket_options=None,
):
"""Construct a new InfluxDBClient object."""
self.__host = host
- self._port = port
+ self.__port = int(port)
self._username = username
self._password = password
self._database = database
self._timeout = timeout
+ self._retries = retries
self._verify_ssl = verify_ssl
- self.use_udp = use_udp
- self.udp_port = udp_port
- self._session = requests.Session()
+ self.__use_udp = use_udp
+ self.__udp_port = int(udp_port)
+
+ if not session:
+ session = requests.Session()
+
+ self._session = session
+ adapter = _SocketOptionsAdapter(
+ pool_connections=int(pool_size),
+ pool_maxsize=int(pool_size),
+ socket_options=socket_options
+ )
+
if use_udp:
self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ if not path:
+ self.__path = ''
+ elif path[0] == '/':
+ self.__path = path
+ else:
+ self.__path = '/' + path
+
self._scheme = "http"
if ssl is True:
self._scheme = "https"
+ self._session.mount(self._scheme + '://', adapter)
+
if proxies is None:
self._proxies = {}
else:
self._proxies = proxies
- self.__baseurl = "{0}://{1}:{2}".format(
+ if cert:
+ if not ssl:
+ raise ValueError(
+ "Client certificate provided but ssl is disabled."
+ )
+ else:
+ self._session.cert = cert
+
+ self.__baseurl = "{0}://{1}:{2}{3}".format(
self._scheme,
self._host,
- self._port)
+ self._port,
+ self._path)
- self._headers = {
- 'Content-type': 'application/json',
- 'Accept': 'text/plain'
- }
+ if headers is None:
+ headers = {}
+ headers.setdefault('Content-Type', 'application/json')
+ headers.setdefault('Accept', 'application/x-msgpack')
+ self._headers = headers
+
+ self._gzip = gzip
+
+ def __enter__(self):
+ """Enter function as used by context manager."""
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ """Exit function as used by context manager."""
+ self.close()
- # _baseurl and _host are properties to allow InfluxDBClusterClient
- # to override them with thread-local variables
@property
def _baseurl(self):
- return self._get_baseurl()
-
- def _get_baseurl(self):
return self.__baseurl
@property
def _host(self):
- return self._get_host()
-
- def _get_host(self):
return self.__host
- @staticmethod
- def from_DSN(dsn, **kwargs):
- """Return an instance of :class:`~.InfluxDBClient` from the provided
+ @property
+ def _port(self):
+ return self.__port
+
+ @property
+ def _path(self):
+ return self.__path
+
+ @property
+ def _udp_port(self):
+ return self.__udp_port
+
+ @property
+ def _use_udp(self):
+ return self.__use_udp
+
+ @classmethod
+ def from_dsn(cls, dsn, **kwargs):
+ r"""Generate an instance of InfluxDBClient from given data source name.
+
+ Return an instance of :class:`~.InfluxDBClient` from the provided
data source name. Supported schemes are "influxdb", "https+influxdb"
and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient`
constructor may also be passed to this method.
@@ -142,12 +236,12 @@ def from_DSN(dsn, **kwargs):
::
- >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\
-localhost:8086/databasename', timeout=5)
+ >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\
+ localhost:8086/databasename', timeout=5)
>> type(cli)
- >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\
-localhost:8086/databasename', timeout=5, udp_port=159)
+ >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\
+ localhost:8086/databasename', timeout=5, udp_port=159)
>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli))
http://localhost:8086 - True 159
@@ -156,14 +250,13 @@ def from_DSN(dsn, **kwargs):
be used for the TCP connection; specify the UDP port with the
additional `udp_port` parameter (cf. examples).
"""
-
- init_args = parse_dsn(dsn)
+ init_args = _parse_dsn(dsn)
host, port = init_args.pop('hosts')[0]
init_args['host'] = host
init_args['port'] = port
init_args.update(kwargs)
- return InfluxDBClient(**init_args)
+ return cls(**init_args)
def switch_database(self, database):
"""Change the client's database.
@@ -184,7 +277,7 @@ def switch_user(self, username, password):
self._username = username
self._password = password
- def request(self, url, method='GET', params=None, data=None,
+ def request(self, url, method='GET', params=None, data=None, stream=False,
expected_response_code=200, headers=None):
"""Make a HTTP request to the InfluxDB API.
@@ -196,9 +289,13 @@ def request(self, url, method='GET', params=None, data=None,
:type params: dict
:param data: the data of the request, defaults to None
:type data: str
+ :param stream: True if a query uses chunked responses
+ :type stream: bool
:param expected_response_code: the expected response code of
the request, defaults to 200
:type expected_response_code: int
+ :param headers: headers to add to the request
+ :type headers: dict
:returns: the response from the request
:rtype: :class:`requests.Response`
:raises InfluxDBServerError: if the response code is any server error
@@ -217,82 +314,174 @@ def request(self, url, method='GET', params=None, data=None,
if isinstance(data, (dict, list)):
data = json.dumps(data)
- # Try to send the request a maximum of three times. (see #103)
- # TODO (aviau): Make this configurable.
- for i in range(0, 3):
+ if self._gzip:
+ # Receive and send compressed data
+ headers.update({
+ 'Accept-Encoding': 'gzip',
+ 'Content-Encoding': 'gzip',
+ })
+ if data is not None:
+ # For Py 2.7 compatability use Gzipfile
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(data)
+ data = compressed.getvalue()
+
+ # Try to send the request more than once by default (see #103)
+ retry = True
+ _try = 0
+ while retry:
try:
+ if "Authorization" in headers:
+ auth = (None, None)
+ else:
+ auth = (self._username, self._password)
response = self._session.request(
method=method,
url=url,
- auth=(self._username, self._password),
+ auth=auth if None not in auth else None,
params=params,
data=data,
+ stream=stream,
headers=headers,
proxies=self._proxies,
verify=self._verify_ssl,
timeout=self._timeout
)
break
- except requests.exceptions.ConnectionError as e:
- if i < 2:
- continue
- else:
- raise e
+ except (requests.exceptions.ConnectionError,
+ requests.exceptions.HTTPError,
+ requests.exceptions.Timeout):
+ _try += 1
+ if self._retries != 0:
+ retry = _try < self._retries
+ if not retry:
+ raise
+ if method == "POST":
+ time.sleep((2 ** _try) * random.random() / 100.0)
+
+ type_header = response.headers and response.headers.get("Content-Type")
+ if type_header == "application/x-msgpack" and response.content:
+ response._msgpack = msgpack.unpackb(
+ packed=response.content,
+ ext_hook=_msgpack_parse_hook,
+ raw=False)
+ else:
+ response._msgpack = None
+
+ def reformat_error(response):
+ if response._msgpack:
+ return json.dumps(response._msgpack, separators=(',', ':'))
+ else:
+ return response.content
- if response.status_code >= 500 and response.status_code < 600:
- raise InfluxDBServerError(response.content)
+ # if there's not an error, there must have been a successful response
+ if 500 <= response.status_code < 600:
+ raise InfluxDBServerError(reformat_error(response))
elif response.status_code == expected_response_code:
return response
else:
- raise InfluxDBClientError(response.content, response.status_code)
+ err_msg = reformat_error(response)
+ raise InfluxDBClientError(err_msg, response.status_code)
- def write(self, data, params=None, expected_response_code=204):
+ def write(self, data, params=None, expected_response_code=204,
+ protocol='json'):
"""Write data to InfluxDB.
:param data: the data to be written
- :type data: dict
+ :type data: (if protocol is 'json') dict
+ (if protocol is 'line') sequence of line protocol strings
+ or single string
:param params: additional parameters for the request, defaults to None
:type params: dict
:param expected_response_code: the expected response code of the write
operation, defaults to 204
:type expected_response_code: int
+ :param protocol: protocol of input data, either 'json' or 'line'
+ :type protocol: str
:returns: True, if the write operation is successful
:rtype: bool
"""
-
- headers = self._headers
- headers['Content-type'] = 'application/octet-stream'
+ headers = self._headers.copy()
+ headers['Content-Type'] = 'application/octet-stream'
if params:
precision = params.get('precision')
else:
precision = None
+ if protocol == 'json':
+ data = make_lines(data, precision).encode('utf-8')
+ elif protocol == 'line':
+ if isinstance(data, str):
+ data = [data]
+ data = ('\n'.join(data) + '\n').encode('utf-8')
+
self.request(
url="write",
method='POST',
params=params,
- data=make_lines(data, precision).encode('utf-8'),
+ data=data,
expected_response_code=expected_response_code,
headers=headers
)
return True
+ @staticmethod
+ def _read_chunked_response(response, raise_errors=True):
+ for line in response.iter_lines():
+ if isinstance(line, bytes):
+ line = line.decode('utf-8')
+ data = json.loads(line)
+ result_set = {}
+ for result in data.get('results', []):
+ for _key in result:
+ if isinstance(result[_key], list):
+ result_set.setdefault(
+ _key, []).extend(result[_key])
+ yield ResultSet(result_set, raise_errors=raise_errors)
+
def query(self,
query,
params=None,
+ bind_params=None,
epoch=None,
expected_response_code=200,
database=None,
- raise_errors=True):
+ raise_errors=True,
+ chunked=False,
+ chunk_size=0,
+ method="GET"):
"""Send a query to InfluxDB.
+ .. danger::
+ In order to avoid injection vulnerabilities (similar to `SQL
+ injection `_
+ vulnerabilities), do not directly include untrusted data into the
+ ``query`` parameter, use ``bind_params`` instead.
+
:param query: the actual query string
:type query: str
- :param params: additional parameters for the request, defaults to {}
+ :param params: additional parameters for the request,
+ defaults to {}
:type params: dict
+ :param bind_params: bind parameters for the query:
+ any variable in the query written as ``'$var_name'`` will be
+ replaced with ``bind_params['var_name']``. Only works in the
+ ``WHERE`` clause and takes precedence over ``params['params']``
+ :type bind_params: dict
+
+ :param epoch: response timestamps to be in epoch format either 'h',
+ 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
+ RFC3339 UTC format with nanosecond precision
+ :type epoch: str
+
:param expected_response_code: the expected status code of response,
defaults to 200
:type expected_response_code: int
@@ -304,27 +493,56 @@ def query(self,
returns errors, defaults to True
:type raise_errors: bool
+ :param chunked: Enable to use chunked responses from InfluxDB.
+ With ``chunked`` enabled, one ResultSet is returned per chunk
+ containing all results within that chunk
+ :type chunked: bool
+
+ :param chunk_size: Size of each chunk to tell InfluxDB to use.
+ :type chunk_size: int
+
+ :param method: the HTTP method for the request, defaults to GET
+ :type method: str
+
:returns: the queried data
:rtype: :class:`~.ResultSet`
"""
if params is None:
params = {}
+ if bind_params is not None:
+ params_dict = json.loads(params.get('params', '{}'))
+ params_dict.update(bind_params)
+ params['params'] = json.dumps(params_dict)
+
params['q'] = query
params['db'] = database or self._database
if epoch is not None:
params['epoch'] = epoch
+ if chunked:
+ params['chunked'] = 'true'
+ if chunk_size > 0:
+ params['chunk_size'] = chunk_size
+
+ if query.lower().startswith("select ") and " into " in query.lower():
+ method = "POST"
+
response = self.request(
url="query",
- method='GET',
+ method=method,
params=params,
data=None,
+ stream=chunked,
expected_response_code=expected_response_code
)
- data = response.json()
+ data = response._msgpack
+ if not data:
+ if chunked:
+ return self._read_chunked_response(response)
+ data = response.json()
results = [
ResultSet(result, raise_errors=raise_errors)
@@ -335,8 +553,8 @@ def query(self,
# TODO(aviau): Always return a list. (This would be a breaking change)
if len(results) == 1:
return results[0]
- else:
- return results
+
+ return results
def write_points(self,
points,
@@ -345,11 +563,17 @@ def write_points(self,
retention_policy=None,
tags=None,
batch_size=None,
+ protocol='json',
+ consistency=None
):
"""Write to multiple time series names.
:param points: the list of points to be written in the database
:type points: list of dictionaries, each dictionary represents a point
+ :type points: (if protocol is 'json') list of dicts, where each dict
+ represents a point.
+ (if protocol is 'line') sequence of line protocol strings.
+
:param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
:type time_precision: str
:param database: the database to write the points to. Defaults to
@@ -367,72 +591,110 @@ def write_points(self,
one database to another or when doing a massive write operation,
defaults to None
:type batch_size: int
+ :param protocol: Protocol for writing data. Either 'line' or 'json'.
+ :type protocol: str
+ :param consistency: Consistency for the points.
+ One of {'any','one','quorum','all'}.
+ :type consistency: str
:returns: True, if the operation is successful
:rtype: bool
.. note:: if no retention policy is specified, the default retention
policy for the database is used
"""
-
if batch_size and batch_size > 0:
for batch in self._batches(points, batch_size):
self._write_points(points=batch,
time_precision=time_precision,
database=database,
retention_policy=retention_policy,
- tags=tags)
+ tags=tags, protocol=protocol,
+ consistency=consistency)
return True
- else:
- return self._write_points(points=points,
- time_precision=time_precision,
- database=database,
- retention_policy=retention_policy,
- tags=tags)
- def _batches(self, iterable, size):
- for i in xrange(0, len(iterable), size):
- yield iterable[i:i + size]
+ return self._write_points(points=points,
+ time_precision=time_precision,
+ database=database,
+ retention_policy=retention_policy,
+ tags=tags, protocol=protocol,
+ consistency=consistency)
+
+ def ping(self):
+ """Check connectivity to InfluxDB.
+
+ :returns: The version of the InfluxDB the client is connected to
+ """
+ response = self.request(
+ url="ping",
+ method='GET',
+ expected_response_code=204
+ )
+
+ return response.headers['X-Influxdb-Version']
+
+ @staticmethod
+ def _batches(iterable, size):
+ # Iterate over an iterable producing iterables of batches. Based on:
+ # http://code.activestate.com/recipes/303279-getting-items-in-batches/
+ iterator = iter(iterable)
+ while True:
+ try: # Try get the first element in the iterator...
+ head = (next(iterator),)
+ except StopIteration:
+ return # ...so that we can stop if there isn't one
+ # Otherwise, lazily slice the rest of the batch
+ rest = islice(iterator, size - 1)
+ yield chain(head, rest)
def _write_points(self,
points,
time_precision,
database,
retention_policy,
- tags):
+ tags,
+ protocol='json',
+ consistency=None):
if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]:
raise ValueError(
"Invalid time precision is given. "
"(use 'n', 'u', 'ms', 's', 'm' or 'h')")
- if self.use_udp and time_precision and time_precision != 's':
- raise ValueError(
- "InfluxDB only supports seconds precision for udp writes"
- )
+ if consistency not in ['any', 'one', 'quorum', 'all', None]:
+ raise ValueError('Invalid consistency: {}'.format(consistency))
- data = {
- 'points': points
- }
+ if protocol == 'json':
+ data = {
+ 'points': points
+ }
- if tags is not None:
- data['tags'] = tags
+ if tags is not None:
+ data['tags'] = tags
+ else:
+ data = points
params = {
'db': database or self._database
}
+ if consistency is not None:
+ params['consistency'] = consistency
+
if time_precision is not None:
params['precision'] = time_precision
if retention_policy is not None:
params['rp'] = retention_policy
- if self.use_udp:
- self.send_packet(data)
+ if self._use_udp:
+ self.send_packet(
+ data, protocol=protocol, time_precision=time_precision
+ )
else:
self.write(
data=data,
params=params,
- expected_response_code=204
+ expected_response_code=204,
+ protocol=protocol
)
return True
@@ -453,13 +715,48 @@ def get_list_database(self):
"""
return list(self.query("SHOW DATABASES").get_points())
+ def get_list_series(self, database=None, measurement=None, tags=None):
+ """
+ Query SHOW SERIES returns the distinct series in your database.
+
+ FROM and WHERE clauses are optional.
+
+ :param measurement: Show all series from a measurement
+ :type id: string
+ :param tags: Show all series that match given tags
+ :type id: dict
+ :param database: the database from which the series should be
+ shows, defaults to client's current database
+ :type database: str
+ """
+ database = database or self._database
+ query_str = 'SHOW SERIES'
+
+ if measurement:
+ query_str += ' FROM "{0}"'.format(measurement)
+
+ if tags:
+ query_str += ' WHERE ' + ' and '.join(["{0}='{1}'".format(k, v)
+ for k, v in tags.items()])
+
+ return list(
+ itertools.chain.from_iterable(
+ [
+ x.values()
+ for x in (self.query(query_str, database=database)
+ .get_points())
+ ]
+ )
+ )
+
def create_database(self, dbname):
"""Create a new database in InfluxDB.
:param dbname: the name of the database to create
:type dbname: str
"""
- self.query("CREATE DATABASE %s" % dbname)
+ self.query("CREATE DATABASE {0}".format(quote_ident(dbname)),
+ method="POST")
def drop_database(self, dbname):
"""Drop a database from InfluxDB.
@@ -467,10 +764,39 @@ def drop_database(self, dbname):
:param dbname: the name of the database to drop
:type dbname: str
"""
- self.query("DROP DATABASE %s" % dbname)
+ self.query("DROP DATABASE {0}".format(quote_ident(dbname)),
+ method="POST")
+
+ def get_list_measurements(self):
+ """Get the list of measurements in InfluxDB.
+
+ :returns: all measurements in InfluxDB
+ :rtype: list of dictionaries
+
+ :Example:
+
+ ::
+
+ >> dbs = client.get_list_measurements()
+ >> dbs
+ [{u'name': u'measurements1'},
+ {u'name': u'measurements2'},
+ {u'name': u'measurements3'}]
+ """
+ return list(self.query("SHOW MEASUREMENTS").get_points())
+
+ def drop_measurement(self, measurement):
+ """Drop a measurement from InfluxDB.
+
+ :param measurement: the name of the measurement to drop
+ :type measurement: str
+ """
+ self.query("DROP MEASUREMENT {0}".format(quote_ident(measurement)),
+ method="POST")
def create_retention_policy(self, name, duration, replication,
- database=None, default=False):
+ database=None,
+ default=False, shard_duration="0s"):
"""Create a retention policy for a database.
:param name: the name of the new retention policy
@@ -478,8 +804,8 @@ def create_retention_policy(self, name, duration, replication,
:param duration: the duration of the new retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported
and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
- respectively. For infinite retention – meaning the data will
- never be deleted – use 'INF' for duration.
+ respectively. For infinite retention - meaning the data will
+ never be deleted - use 'INF' for duration.
The minimum retention period is 1 hour.
:type duration: str
:param replication: the replication of the retention policy
@@ -489,20 +815,31 @@ def create_retention_policy(self, name, duration, replication,
:type database: str
:param default: whether or not to set the policy as default
:type default: bool
+ :param shard_duration: the shard duration of the retention policy.
+ Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and
+ mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
+ respectively. Infinite retention is not supported. As a workaround,
+ specify a "1000w" duration to achieve an extremely long shard group
+ duration. Defaults to "0s", which is interpreted by the database
+ to mean the default value given the duration.
+ The minimum shard group duration is 1 hour.
+ :type shard_duration: str
"""
query_string = \
- "CREATE RETENTION POLICY %s ON %s " \
- "DURATION %s REPLICATION %s" % \
- (name, database or self._database, duration, replication)
+ "CREATE RETENTION POLICY {0} ON {1} " \
+ "DURATION {2} REPLICATION {3} SHARD DURATION {4}".format(
+ quote_ident(name), quote_ident(database or self._database),
+ duration, replication, shard_duration)
if default is True:
query_string += " DEFAULT"
- self.query(query_string)
+ self.query(query_string, method="POST")
def alter_retention_policy(self, name, database=None,
- duration=None, replication=None, default=None):
- """Mofidy an existing retention policy for a database.
+ duration=None, replication=None,
+ default=None, shard_duration=None):
+ """Modify an existing retention policy for a database.
:param name: the name of the retention policy to modify
:type name: str
@@ -512,30 +849,55 @@ def alter_retention_policy(self, name, database=None,
:param duration: the new duration of the existing retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported
and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
- respectively. For infinite retention – meaning the data will
- never be deleted – use 'INF' for duration.
+ respectively. For infinite retention, meaning the data will
+ never be deleted, use 'INF' for duration.
The minimum retention period is 1 hour.
:type duration: str
:param replication: the new replication of the existing
retention policy
- :type replication: str
+ :type replication: int
:param default: whether or not to set the modified policy as default
:type default: bool
+ :param shard_duration: the shard duration of the retention policy.
+ Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and
+ mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
+ respectively. Infinite retention is not supported. As a workaround,
+ specify a "1000w" duration to achieve an extremely long shard group
+ duration.
+ The minimum shard group duration is 1 hour.
+ :type shard_duration: str
.. note:: at least one of duration, replication, or default flag
should be set. Otherwise the operation will fail.
"""
query_string = (
- "ALTER RETENTION POLICY {} ON {}"
- ).format(name, database or self._database)
+ "ALTER RETENTION POLICY {0} ON {1}"
+ ).format(quote_ident(name),
+ quote_ident(database or self._database))
if duration:
- query_string += " DURATION {}".format(duration)
+ query_string += " DURATION {0}".format(duration)
+ if shard_duration:
+ query_string += " SHARD DURATION {0}".format(shard_duration)
if replication:
- query_string += " REPLICATION {}".format(replication)
+ query_string += " REPLICATION {0}".format(replication)
if default is True:
query_string += " DEFAULT"
- self.query(query_string)
+ self.query(query_string, method="POST")
+
+ def drop_retention_policy(self, name, database=None):
+ """Drop an existing retention policy for a database.
+
+ :param name: the name of the retention policy to drop
+ :type name: str
+ :param database: the database for which the retention policy is
+ dropped. Defaults to current client's database
+ :type database: str
+ """
+ query_string = (
+ "DROP RETENTION POLICY {0} ON {1}"
+ ).format(quote_ident(name), quote_ident(database or self._database))
+ self.query(query_string, method="POST")
def get_list_retention_policies(self, database=None):
"""Get the list of retention policies for a database.
@@ -556,41 +918,18 @@ def get_list_retention_policies(self, database=None):
u'duration': u'0',
u'name': u'default',
u'replicaN': 1}]
- """
+ """
+ if not (database or self._database):
+ raise InfluxDBClientError(
+ "get_list_retention_policies() requires a database as a "
+ "parameter or the client to be using a database")
+
rsp = self.query(
- "SHOW RETENTION POLICIES ON %s" % (database or self._database)
+ "SHOW RETENTION POLICIES ON {0}".format(
+ quote_ident(database or self._database))
)
return list(rsp.get_points())
- def get_list_series(self, database=None):
- """Get the list of series for a database.
-
- :param database: the name of the database, defaults to the client's
- current database
- :type database: str
- :returns: all series in the specified database
- :rtype: list of dictionaries
-
- :Example:
-
- >> series = client.get_list_series('my_database')
- >> series
- [{'name': u'cpu_usage',
- 'tags': [{u'_id': 1,
- u'host': u'server01',
- u'region': u'us-west'}]}]
- """
- rsp = self.query("SHOW SERIES", database=database)
- series = []
- for serie in rsp.items():
- series.append(
- {
- "name": serie[0][0],
- "tags": list(serie[1])
- }
- )
- return series
-
def get_list_users(self):
"""Get the list of all users in InfluxDB.
@@ -610,7 +949,7 @@ def get_list_users(self):
return list(self.query("SHOW USERS").get_points())
def create_user(self, username, password, admin=False):
- """Create a new user in InfluxDB
+ """Create a new user in InfluxDB.
:param username: the new username to create
:type username: str
@@ -620,19 +959,20 @@ def create_user(self, username, password, admin=False):
privileges or not
:type admin: boolean
"""
- text = "CREATE USER {} WITH PASSWORD '{}'".format(username, password)
+ text = "CREATE USER {0} WITH PASSWORD {1}".format(
+ quote_ident(username), quote_literal(password))
if admin:
text += ' WITH ALL PRIVILEGES'
- self.query(text)
+ self.query(text, method="POST")
def drop_user(self, username):
- """Drop an user from InfluxDB.
+ """Drop a user from InfluxDB.
:param username: the username to drop
:type username: str
"""
- text = "DROP USER {}".format(username)
- self.query(text)
+ text = "DROP USER {0}".format(quote_ident(username))
+ self.query(text, method="POST")
def set_user_password(self, username, password):
"""Change the password of an existing user.
@@ -642,33 +982,50 @@ def set_user_password(self, username, password):
:param password: the new password for the user
:type password: str
"""
- text = "SET PASSWORD FOR {} = '{}'".format(username, password)
+ text = "SET PASSWORD FOR {0} = {1}".format(
+ quote_ident(username), quote_literal(password))
self.query(text)
def delete_series(self, database=None, measurement=None, tags=None):
- """Delete series from a database. Series can be filtered by
- measurement and tags.
+ """Delete series from a database.
+
+ Series must be filtered by either measurement and tags.
+ This method cannot be used to delete all series, use
+ `drop_database` instead.
- :param measurement: Delete all series from a measurement
- :type id: string
- :param tags: Delete all series that match given tags
- :type id: dict
:param database: the database from which the series should be
deleted, defaults to client's current database
:type database: str
+ :param measurement: Delete all series from a measurement
+ :type measurement: str
+ :param tags: Delete all series that match given tags
+ :type tags: dict
"""
database = database or self._database
query_str = 'DROP SERIES'
if measurement:
- query_str += ' FROM "{}"'.format(measurement)
+ query_str += ' FROM {0}'.format(quote_ident(measurement))
if tags:
- query_str += ' WHERE ' + ' and '.join(["{}='{}'".format(k, v)
- for k, v in tags.items()])
- self.query(query_str, database=database)
+ tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v))
+ for k, v in tags.items()]
+ query_str += ' WHERE ' + ' AND '.join(tag_eq_list)
+ self.query(query_str, database=database, method="POST")
+
+ def grant_admin_privileges(self, username):
+ """Grant cluster administration privileges to a user.
+
+ :param username: the username to grant privileges to
+ :type username: str
+
+ .. note:: Only a cluster administrator can create/drop databases
+ and manage users.
+ """
+ text = "GRANT ALL PRIVILEGES TO {0}".format(quote_ident(username))
+ self.query(text, method="POST")
def revoke_admin_privileges(self, username):
- """Revoke cluster administration privileges from an user.
+ """Revoke cluster administration privileges from a user.
:param username: the username to revoke privileges from
:type username: str
@@ -676,11 +1033,11 @@ def revoke_admin_privileges(self, username):
.. note:: Only a cluster administrator can create/ drop databases
and manage users.
"""
- text = "REVOKE ALL PRIVILEGES FROM {}".format(username)
- self.query(text)
+ text = "REVOKE ALL PRIVILEGES FROM {0}".format(quote_ident(username))
+ self.query(text, method="POST")
def grant_privilege(self, privilege, database, username):
- """Grant a privilege on a database to an user.
+ """Grant a privilege on a database to a user.
:param privilege: the privilege to grant, one of 'read', 'write'
or 'all'. The string is case-insensitive
@@ -690,13 +1047,13 @@ def grant_privilege(self, privilege, database, username):
:param username: the username to grant the privilege to
:type username: str
"""
- text = "GRANT {} ON {} TO {}".format(privilege,
- database,
- username)
- self.query(text)
+ text = "GRANT {0} ON {1} TO {2}".format(privilege,
+ quote_ident(database),
+ quote_ident(username))
+ self.query(text, method="POST")
def revoke_privilege(self, privilege, database, username):
- """Revoke a privilege on a database from an user.
+ """Revoke a privilege on a database from a user.
:param privilege: the privilege to revoke, one of 'read', 'write'
or 'all'. The string is case-insensitive
@@ -706,164 +1063,154 @@ def revoke_privilege(self, privilege, database, username):
:param username: the username to revoke the privilege from
:type username: str
"""
- text = "REVOKE {} ON {} FROM {}".format(privilege,
- database,
- username)
- self.query(text)
+ text = "REVOKE {0} ON {1} FROM {2}".format(privilege,
+ quote_ident(database),
+ quote_ident(username))
+ self.query(text, method="POST")
- def send_packet(self, packet):
- """Send an UDP packet.
+ def get_list_privileges(self, username):
+ """Get the list of all privileges granted to given user.
- :param packet: the packet to be sent
- :type packet: dict
- """
- data = make_lines(packet).encode('utf-8')
- self.udp_socket.sendto(data, (self._host, self.udp_port))
-
-
-class InfluxDBClusterClient(object):
- """The :class:`~.InfluxDBClusterClient` is the client for connecting
- to a cluster of InfluxDB servers. Each query hits different host from the
- list of hosts.
-
- :param hosts: all hosts to be included in the cluster, each of which
- should be in the format (address, port),
- e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)]. Defaults to
- [('localhost', 8086)]
- :type hosts: list of tuples
- :param shuffle: whether the queries should hit servers evenly(randomly),
- defaults to True
- :type shuffle: bool
- :param client_base_class: the base class for the cluster client.
- This parameter is used to enable the support of different client
- types. Defaults to :class:`~.InfluxDBClient`
- """
+ :param username: the username to get privileges of
+ :type username: str
- def __init__(self,
- hosts=[('localhost', 8086)],
- username='root',
- password='root',
- database=None,
- ssl=False,
- verify_ssl=False,
- timeout=None,
- use_udp=False,
- udp_port=4444,
- shuffle=True,
- client_base_class=InfluxDBClient,
- ):
- self.clients = [self] # Keep it backwards compatible
- self.hosts = hosts
- self.bad_hosts = [] # Corresponding server has failures in history
- self.shuffle = shuffle
- host, port = self.hosts[0]
- self._hosts_lock = threading.Lock()
- self._thread_local = threading.local()
- self._client = client_base_class(host=host,
- port=port,
- username=username,
- password=password,
- database=database,
- ssl=ssl,
- verify_ssl=verify_ssl,
- timeout=timeout,
- use_udp=use_udp,
- udp_port=udp_port)
- for method in dir(client_base_class):
- orig_attr = getattr(client_base_class, method, '')
- if method.startswith('_') or not callable(orig_attr):
- continue
-
- setattr(self, method, self._make_func(orig_attr))
-
- self._client._get_host = self._get_host
- self._client._get_baseurl = self._get_baseurl
- self._update_client_host(self.hosts[0])
+ :returns: all privileges granted to given user
+ :rtype: list of dictionaries
- @staticmethod
- def from_DSN(dsn, client_base_class=InfluxDBClient,
- shuffle=True, **kwargs):
- """Same as :meth:`~.InfluxDBClient.from_DSN`, but supports
- multiple servers.
-
- :param shuffle: whether the queries should hit servers
- evenly(randomly), defaults to True
- :type shuffle: bool
- :param client_base_class: the base class for all clients in the
- cluster. This parameter is used to enable the support of
- different client types. Defaults to :class:`~.InfluxDBClient`
+ :Example:
+
+ ::
+
+ >> privileges = client.get_list_privileges('user1')
+ >> privileges
+ [{u'privilege': u'WRITE', u'database': u'db1'},
+ {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'},
+ {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}]
+ """
+ text = "SHOW GRANTS FOR {0}".format(quote_ident(username))
+ return list(self.query(text).get_points())
+
+ def get_list_continuous_queries(self):
+ """Get the list of continuous queries in InfluxDB.
+
+ :return: all CQs in InfluxDB
+ :rtype: list of dictionaries
:Example:
::
- >> cluster = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd\
-@host1:8086,usr:pwd@host2:8086/db_name', timeout=5)
- >> type(cluster)
-
- >> cluster.hosts
- [('host1', 8086), ('host2', 8086)]
- >> cluster._client
- ]
- """
- init_args = parse_dsn(dsn)
- init_args.update(**kwargs)
- init_args['shuffle'] = shuffle
- init_args['client_base_class'] = client_base_class
- cluster_client = InfluxDBClusterClient(**init_args)
- return cluster_client
-
- def _update_client_host(self, host):
- self._thread_local.host, self._thread_local.port = host
- self._thread_local.baseurl = "{0}://{1}:{2}".format(
- self._client._scheme,
- self._client._host,
- self._client._port
- )
+ >> cqs = client.get_list_cqs()
+ >> cqs
+ [
+ {
+ u'db1': []
+ },
+ {
+ u'db2': [
+ {
+ u'name': u'vampire',
+ u'query': u'CREATE CONTINUOUS QUERY vampire ON '
+ 'mydb BEGIN SELECT count(dracula) INTO '
+ 'mydb.autogen.all_of_them FROM '
+ 'mydb.autogen.one GROUP BY time(5m) END'
+ }
+ ]
+ }
+ ]
+ """
+ query_string = "SHOW CONTINUOUS QUERIES"
+ return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()]
+
+ def create_continuous_query(self, name, select, database=None,
+ resample_opts=None):
+ r"""Create a continuous query for a database.
+
+ :param name: the name of continuous query to create
+ :type name: str
+ :param select: select statement for the continuous query
+ :type select: str
+ :param database: the database for which the continuous query is
+ created. Defaults to current client's database
+ :type database: str
+ :param resample_opts: resample options
+ :type resample_opts: str
+
+ :Example:
+
+ ::
+
+ >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \
+ ... 'FROM "cpu" GROUP BY time(1m)'
+ >> client.create_continuous_query(
+ ... 'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m'
+ ... )
+ >> client.get_list_continuous_queries()
+ [
+ {
+ 'db_name': [
+ {
+ 'name': 'cpu_mean',
+ 'query': 'CREATE CONTINUOUS QUERY "cpu_mean" '
+ 'ON "db_name" '
+ 'RESAMPLE EVERY 10s FOR 2m '
+ 'BEGIN SELECT mean("value") '
+ 'INTO "cpu_mean" FROM "cpu" '
+ 'GROUP BY time(1m) END'
+ }
+ ]
+ }
+ ]
+ """
+ query_string = (
+ "CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END"
+ ).format(quote_ident(name), quote_ident(database or self._database),
+ ' RESAMPLE ' + resample_opts if resample_opts else '', select)
+ self.query(query_string)
+
+ def drop_continuous_query(self, name, database=None):
+ """Drop an existing continuous query for a database.
+
+ :param name: the name of continuous query to drop
+ :type name: str
+ :param database: the database for which the continuous query is
+ dropped. Defaults to current client's database
+ :type database: str
+ """
+ query_string = (
+ "DROP CONTINUOUS QUERY {0} ON {1}"
+ ).format(quote_ident(name), quote_ident(database or self._database))
+ self.query(query_string)
- def _get_baseurl(self):
- return self._thread_local.baseurl
-
- def _get_host(self):
- return self._thread_local.host
-
- def _make_func(self, orig_func):
-
- @wraps(orig_func)
- def func(*args, **kwargs):
- with self._hosts_lock:
- if self.shuffle:
- random.shuffle(self.hosts)
-
- hosts = self.hosts + self.bad_hosts
-
- for h in hosts:
- bad_host = False
- try:
- self._update_client_host(h)
- return orig_func(self._client, *args, **kwargs)
- except InfluxDBClientError as e:
- # Errors caused by user's requests, re-raise
- raise e
- except Exception as e:
- # Errors that might caused by server failure, try another
- bad_host = True
- with self._hosts_lock:
- if h in self.hosts:
- self.hosts.remove(h)
- self.bad_hosts.append(h)
- finally:
- with self._hosts_lock:
- if not bad_host and h in self.bad_hosts:
- self.bad_hosts.remove(h)
- self.hosts.append(h)
-
- raise InfluxDBServerError("InfluxDB: no viable server!")
-
- return func
-
-
-def parse_dsn(dsn):
+ def send_packet(self, packet, protocol='json', time_precision=None):
+ """Send an UDP packet.
+
+ :param packet: the packet to be sent
+ :type packet: (if protocol is 'json') dict
+ (if protocol is 'line') list of line protocol strings
+ :param protocol: protocol of input data, either 'json' or 'line'
+ :type protocol: str
+ :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
+ :type time_precision: str
+ """
+ if protocol == 'json':
+ data = make_lines(packet, time_precision).encode('utf-8')
+ elif protocol == 'line':
+ data = ('\n'.join(packet) + '\n').encode('utf-8')
+ self.udp_socket.sendto(data, (self._host, self._udp_port))
+
+ def close(self):
+ """Close http session."""
+ if isinstance(self._session, requests.Session):
+ self._session.close()
+
+
+def _parse_dsn(dsn):
+ """Parse data source name.
+
+ This is a helper function to split the data source name provided in
+ the from_dsn classmethod
+ """
conn_params = urlparse(dsn)
init_args = {}
scheme_info = conn_params.scheme.split('+')
@@ -874,7 +1221,7 @@ def parse_dsn(dsn):
modifier, scheme = scheme_info
if scheme != 'influxdb':
- raise ValueError('Unknown scheme "{}".'.format(scheme))
+ raise ValueError('Unknown scheme "{0}".'.format(scheme))
if modifier:
if modifier == 'udp':
@@ -882,7 +1229,7 @@ def parse_dsn(dsn):
elif modifier == 'https':
init_args['ssl'] = True
else:
- raise ValueError('Unknown modifier "{}".'.format(modifier))
+ raise ValueError('Unknown modifier "{0}".'.format(modifier))
netlocs = conn_params.netloc.split(',')
@@ -900,13 +1247,30 @@ def parse_dsn(dsn):
def _parse_netloc(netloc):
- import re
- parsed = re.findall(r'(\w*):(\w*)@([a-zA-Z0-9_\.]*):(\d*)', netloc)
- if not parsed:
- raise ValueError('Invalid netloc "{}".'.format(netloc))
-
- info = parsed[0]
- return {'username': info[0] or None,
- 'password': info[1] or None,
- 'host': info[2] or 'localhost',
- 'port': info[3] or 8086}
+ info = urlparse("http://{0}".format(netloc))
+ return {'username': info.username or None,
+ 'password': info.password or None,
+ 'host': info.hostname or 'localhost',
+ 'port': info.port or 8086}
+
+
+def _msgpack_parse_hook(code, data):
+ if code == 5:
+ (epoch_s, epoch_ns) = struct.unpack(">QI", data)
+ timestamp = datetime.datetime.utcfromtimestamp(epoch_s)
+ timestamp += datetime.timedelta(microseconds=(epoch_ns / 1000))
+ return timestamp.isoformat() + 'Z'
+ return msgpack.ExtType(code, data)
+
+
+class _SocketOptionsAdapter(HTTPAdapter):
+ """_SocketOptionsAdapter injects socket_options into HTTP Adapter."""
+
+ def __init__(self, *args, **kwargs):
+ self.socket_options = kwargs.pop("socket_options", None)
+ super(_SocketOptionsAdapter, self).__init__(*args, **kwargs)
+
+ def init_poolmanager(self, *args, **kwargs):
+ if self.socket_options is not None:
+ kwargs["socket_options"] = self.socket_options
+ super(_SocketOptionsAdapter, self).init_poolmanager(*args, **kwargs)
diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py
index 41a6796c..babfe0dd 100644
--- a/influxdb/dataframe_client.py
+++ b/influxdb/dataframe_client.py
@@ -1,7 +1,10 @@
# -*- coding: utf-8 -*-
-"""
-DataFrame client for InfluxDB
-"""
+"""DataFrame client for InfluxDB."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
__all__ = ['DataFrameClient']
@@ -12,8 +15,14 @@
from .client import InfluxDBClient
class DataFrameClient(InfluxDBClient):
+ """DataFrameClient default class instantiation."""
+
+ err = err
+
def __init__(self, *a, **kw):
+ """Initialize the default DataFrameClient."""
+ super(DataFrameClient, self).__init__()
raise ImportError("DataFrameClient requires Pandas "
- "which couldn't be imported: %s" % err)
+ "which couldn't be imported: %s" % self.err)
else:
- from ._dataframe_client import DataFrameClient
+ from ._dataframe_client import DataFrameClient # type: ignore
diff --git a/influxdb/exceptions.py b/influxdb/exceptions.py
index 33c909b6..bd71d301 100644
--- a/influxdb/exceptions.py
+++ b/influxdb/exceptions.py
@@ -1,8 +1,19 @@
+# -*- coding: utf-8 -*-
+"""Exception handler for InfluxDBClient."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+
class InfluxDBClientError(Exception):
"""Raised when an error occurs in the request."""
+
def __init__(self, content, code=None):
+ """Initialize the InfluxDBClientError handler."""
if isinstance(content, type(b'')):
- content = content.decode('UTF-8', errors='replace')
+ content = content.decode('UTF-8', 'replace')
if code is not None:
message = "%s: %s" % (code, content)
@@ -18,5 +29,7 @@ def __init__(self, content, code=None):
class InfluxDBServerError(Exception):
"""Raised when a server error occurs."""
+
def __init__(self, content):
+ """Initialize the InfluxDBServerError handler."""
super(InfluxDBServerError, self).__init__(content)
diff --git a/influxdb/helper.py b/influxdb/helper.py
index 941aa4b8..138cf6e8 100644
--- a/influxdb/helper.py
+++ b/influxdb/helper.py
@@ -1,20 +1,27 @@
# -*- coding: utf-8 -*-
-"""
-Helper class for InfluxDB
-"""
+"""Helper class for InfluxDB."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
from collections import namedtuple, defaultdict
+from datetime import datetime
from warnings import warn
import six
class SeriesHelper(object):
+ """Subclass this helper eases writing data points in bulk.
- """
- Subclassing this helper eases writing data points in bulk.
- All data points are immutable, insuring they do not get overwritten.
+ All data points are immutable, ensuring they do not get overwritten.
Each subclass can write to its own database.
The time series names can also be based on one or more defined fields.
+ The field "time" can be specified when creating a point, and may be any of
+ the time types supported by the client (i.e. str, datetime, int).
+ If the time is not specified, the current system time (utc) will be used.
Annotated example::
@@ -34,13 +41,19 @@ class Meta:
# Only applicable if autocommit is True.
autocommit = True
# If True and no bulk_size, then will set bulk_size to 1.
+ retention_policy = 'your_retention_policy'
+ # Specify the retention policy for the data points
+ time_precision = "h"|"m"|s"|"ms"|"u"|"ns"
+ # Default is ns (nanoseconds)
+ # Setting time precision while writing point
+ # You should also make sure time is set in the given precision
"""
+
__initialized__ = False
def __new__(cls, *args, **kwargs):
- """
- Initializes class attributes for subsequent constructor calls.
+ """Initialize class attributes for subsequent constructor calls.
:note: *args and **kwargs are not explicitly used in this function,
but needed for Python 2 compatibility.
@@ -51,7 +64,7 @@ def __new__(cls, *args, **kwargs):
_meta = getattr(cls, 'Meta')
except AttributeError:
raise AttributeError(
- 'Missing Meta class in {}.'.format(
+ 'Missing Meta class in {0}.'.format(
cls.__name__))
for attr in ['series_name', 'fields', 'tags']:
@@ -59,23 +72,32 @@ def __new__(cls, *args, **kwargs):
setattr(cls, '_' + attr, getattr(_meta, attr))
except AttributeError:
raise AttributeError(
- 'Missing {} in {} Meta class.'.format(
+ 'Missing {0} in {1} Meta class.'.format(
attr,
cls.__name__))
cls._autocommit = getattr(_meta, 'autocommit', False)
+ cls._time_precision = getattr(_meta, 'time_precision', None)
+
+ allowed_time_precisions = ['h', 'm', 's', 'ms', 'u', 'ns', None]
+ if cls._time_precision not in allowed_time_precisions:
+ raise AttributeError(
+ 'In {}, time_precision is set, but invalid use any of {}.'
+ .format(cls.__name__, ','.join(allowed_time_precisions)))
+
+ cls._retention_policy = getattr(_meta, 'retention_policy', None)
cls._client = getattr(_meta, 'client', None)
if cls._autocommit and not cls._client:
raise AttributeError(
- 'In {}, autocommit is set to True, but no client is set.'
+ 'In {0}, autocommit is set to True, but no client is set.'
.format(cls.__name__))
try:
cls._bulk_size = getattr(_meta, 'bulk_size')
if cls._bulk_size < 1 and cls._autocommit:
warn(
- 'Definition of bulk_size in {} forced to 1, '
+ 'Definition of bulk_size in {0} forced to 1, '
'was less than 1.'.format(cls.__name__))
cls._bulk_size = 1
except AttributeError:
@@ -83,30 +105,43 @@ def __new__(cls, *args, **kwargs):
else:
if not cls._autocommit:
warn(
- 'Definition of bulk_size in {} has no affect because'
+ 'Definition of bulk_size in {0} has no affect because'
' autocommit is false.'.format(cls.__name__))
cls._datapoints = defaultdict(list)
- cls._type = namedtuple(cls.__name__, cls._fields + cls._tags)
+
+ if 'time' in cls._fields:
+ cls._fields.remove('time')
+ cls._type = namedtuple(cls.__name__,
+ ['time'] + cls._tags + cls._fields)
+ cls._type.__new__.__defaults__ = (None,) * len(cls._fields)
return super(SeriesHelper, cls).__new__(cls)
def __init__(self, **kw):
- """
- Constructor call creates a new data point. All fields must be present.
+ """Call to constructor creates a new data point.
:note: Data points written when `bulk_size` is reached per Helper.
:warning: Data points are *immutable* (`namedtuples`).
"""
cls = self.__class__
+ timestamp = kw.pop('time', self._current_timestamp())
+ tags = set(cls._tags)
+ fields = set(cls._fields)
+ keys = set(kw.keys())
- if sorted(cls._fields + cls._tags) != sorted(kw.keys()):
+ # all tags should be passed, and keys - tags should be a subset of keys
+ if not (tags <= keys):
raise NameError(
- 'Expected {0}, got {1}.'.format(
- sorted(cls._fields + cls._tags),
- kw.keys()))
+ 'Expected arguments to contain all tags {0}, instead got {1}.'
+ .format(cls._tags, kw.keys()))
+ if not (keys - tags <= fields):
+ raise NameError('Got arguments not in tags or fields: {0}'
+ .format(keys - tags - fields))
- cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw))
+ cls._datapoints[cls._series_name.format(**kw)].append(
+ cls._type(time=timestamp, **kw)
+ )
if cls._autocommit and \
sum(len(series) for series in cls._datapoints.values()) \
@@ -115,8 +150,7 @@ def __init__(self, **kw):
@classmethod
def commit(cls, client=None):
- """
- Commit everything from datapoints via the client.
+ """Commit everything from datapoints via the client.
:param client: InfluxDBClient instance for writing points to InfluxDB.
:attention: any provided client will supersede the class client.
@@ -124,26 +158,37 @@ def commit(cls, client=None):
"""
if not client:
client = cls._client
- rtn = client.write_points(cls._json_body_())
+
+ rtn = client.write_points(
+ cls._json_body_(),
+ time_precision=cls._time_precision,
+ retention_policy=cls._retention_policy)
+ # will be None if not set and will default to ns
cls._reset_()
return rtn
@classmethod
def _json_body_(cls):
- """
+ """Return the JSON body of given datapoints.
+
:return: JSON body of these datapoints.
"""
json = []
+ if not cls.__initialized__:
+ cls._reset_()
for series_name, data in six.iteritems(cls._datapoints):
for point in data:
json_point = {
"measurement": series_name,
"fields": {},
"tags": {},
+ "time": getattr(point, "time")
}
for field in cls._fields:
- json_point['fields'][field] = getattr(point, field)
+ value = getattr(point, field)
+ if value is not None:
+ json_point['fields'][field] = value
for tag in cls._tags:
json_point['tags'][tag] = getattr(point, tag)
@@ -153,7 +198,9 @@ def _json_body_(cls):
@classmethod
def _reset_(cls):
- """
- Reset data storage.
- """
+ """Reset data storage."""
cls._datapoints = defaultdict(list)
+
+ @staticmethod
+ def _current_timestamp():
+ return datetime.utcnow()
diff --git a/influxdb/influxdb08/__init__.py b/influxdb/influxdb08/__init__.py
index 6ba218e3..f4e6c082 100644
--- a/influxdb/influxdb08/__init__.py
+++ b/influxdb/influxdb08/__init__.py
@@ -1,4 +1,11 @@
# -*- coding: utf-8 -*-
+"""Define the influxdb08 package."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
from .client import InfluxDBClient
from .dataframe_client import DataFrameClient
from .helper import SeriesHelper
diff --git a/influxdb/influxdb08/chunked_json.py b/influxdb/influxdb08/chunked_json.py
index 50d304f1..d6847de1 100644
--- a/influxdb/influxdb08/chunked_json.py
+++ b/influxdb/influxdb08/chunked_json.py
@@ -1,17 +1,23 @@
# -*- coding: utf-8 -*-
+"""Module to generate chunked JSON replies for influxdb08."""
#
# Author: Adrian Sampson
# Source: https://gist.github.com/sampsyo/920215
#
-import json
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
-_decoder = json.JSONDecoder()
+import json
def loads(s):
- """A generator reading a sequence of JSON values from a string."""
+ """Generate a sequence of JSON values from a string."""
+ _decoder = json.JSONDecoder()
+
while s:
s = s.strip()
obj, pos = _decoder.raw_decode(s)
diff --git a/influxdb/influxdb08/client.py b/influxdb/influxdb08/client.py
index 5d8f59c5..40c58145 100644
--- a/influxdb/influxdb08/client.py
+++ b/influxdb/influxdb08/client.py
@@ -1,32 +1,25 @@
# -*- coding: utf-8 -*-
-"""
-Python client for InfluxDB
-"""
+"""Python client for InfluxDB v0.8."""
+
+import warnings
+
import json
import socket
import requests
import requests.exceptions
-import warnings
-from sys import version_info
+from six.moves import xrange
+from six.moves.urllib.parse import urlparse
from influxdb import chunked_json
-try:
- xrange
-except NameError:
- xrange = range
-
-if version_info[0] == 3:
- from urllib.parse import urlparse
-else:
- from urlparse import urlparse
-
session = requests.Session()
class InfluxDBClientError(Exception):
- """Raised when an error occurs in the request"""
+ """Raised when an error occurs in the request."""
+
def __init__(self, content, code=-1):
+ """Initialize an InfluxDBClientError handler."""
super(InfluxDBClientError, self).__init__(
"{0}: {1}".format(code, content))
self.content = content
@@ -34,8 +27,8 @@ def __init__(self, content, code=-1):
class InfluxDBClient(object):
+ """Define the standard InfluxDBClient for influxdb v0.8.
- """
The ``InfluxDBClient`` object holds information necessary to connect
to InfluxDB. Requests can be made to InfluxDB directly through the client.
@@ -55,6 +48,9 @@ class InfluxDBClient(object):
:param verify_ssl: verify SSL certificates for HTTPS requests, defaults is
False
:type verify_ssl: boolean
+ :param retries: number of retries your client will try before aborting,
+ defaults to 3. 0 indicates try until success
+ :type retries: int
:param timeout: number of seconds Requests will wait for your client to
establish a connection, defaults to None
:type timeout: int
@@ -73,22 +69,22 @@ def __init__(self,
ssl=False,
verify_ssl=False,
timeout=None,
+ retries=3,
use_udp=False,
udp_port=4444):
- """
- Construct a new InfluxDBClient object.
- """
+ """Construct a new InfluxDBClient object."""
self._host = host
self._port = port
self._username = username
self._password = password
self._database = database
self._timeout = timeout
+ self._retries = retries
self._verify_ssl = verify_ssl
- self.use_udp = use_udp
- self.udp_port = udp_port
+ self._use_udp = use_udp
+ self._udp_port = udp_port
if use_udp:
self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -107,19 +103,20 @@ def __init__(self,
'Accept': 'text/plain'}
@staticmethod
- def from_DSN(dsn, **kwargs):
- """
+ def from_dsn(dsn, **kwargs):
+ r"""Return an instaance of InfluxDBClient from given data source name.
+
Returns an instance of InfluxDBClient from the provided data source
name. Supported schemes are "influxdb", "https+influxdb",
"udp+influxdb". Parameters for the InfluxDBClient constructor may be
also be passed to this function.
Examples:
- >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\
+ >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\
... localhost:8086/databasename', timeout=5)
>> type(cli)
- >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\
+ >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\
... localhost:8086/databasename', timeout=5, udp_port=159)
>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli))
http://localhost:8086 - True 159
@@ -133,11 +130,12 @@ def from_DSN(dsn, **kwargs):
used for the TCP connection; specify the udp port with the additional
udp_port parameter (cf. examples).
:raise ValueError: if the provided DSN has any unexpected value.
- """
+ """
init_args = {}
conn_params = urlparse(dsn)
scheme_info = conn_params.scheme.split('+')
+
if len(scheme_info) == 1:
scheme = scheme_info[0]
modifier = None
@@ -145,14 +143,15 @@ def from_DSN(dsn, **kwargs):
modifier, scheme = scheme_info
if scheme != 'influxdb':
- raise ValueError('Unknown scheme "{}".'.format(scheme))
+ raise ValueError('Unknown scheme "{0}".'.format(scheme))
+
if modifier:
if modifier == 'udp':
init_args['use_udp'] = True
elif modifier == 'https':
init_args['ssl'] = True
else:
- raise ValueError('Unknown modifier "{}".'.format(modifier))
+ raise ValueError('Unknown modifier "{0}".'.format(modifier))
if conn_params.hostname:
init_args['host'] = conn_params.hostname
@@ -172,10 +171,7 @@ def from_DSN(dsn, **kwargs):
# Change member variables
def switch_database(self, database):
- """
- switch_database()
-
- Change client database.
+ """Change client database.
:param database: the new database name to switch to
:type database: string
@@ -183,9 +179,9 @@ def switch_database(self, database):
self._database = database
def switch_db(self, database):
- """
- DEPRECATED. Change client database.
+ """Change client database.
+ DEPRECATED.
"""
warnings.warn(
"switch_db is deprecated, and will be removed "
@@ -195,10 +191,7 @@ def switch_db(self, database):
return self.switch_database(database)
def switch_user(self, username, password):
- """
- switch_user()
-
- Change client username.
+ """Change client username.
:param username: the new username to switch to
:type username: string
@@ -210,9 +203,7 @@ def switch_user(self, username, password):
def request(self, url, method='GET', params=None, data=None,
expected_response_code=200):
- """
- Make a http request to API
- """
+ """Make a http request to API."""
url = "{0}/{1}".format(self._baseurl, url)
if params is None:
@@ -228,9 +219,10 @@ def request(self, url, method='GET', params=None, data=None,
if data is not None and not isinstance(data, str):
data = json.dumps(data)
- # Try to send the request a maximum of three times. (see #103)
- # TODO (aviau): Make this configurable.
- for i in range(0, 3):
+ retry = True
+ _try = 0
+ # Try to send the request more than once by default (see #103)
+ while retry:
try:
response = session.request(
method=method,
@@ -243,11 +235,12 @@ def request(self, url, method='GET', params=None, data=None,
)
break
except (requests.exceptions.ConnectionError,
- requests.exceptions.Timeout) as e:
- if i < 2:
- continue
- else:
- raise e
+ requests.exceptions.Timeout):
+ _try += 1
+ if self._retries != 0:
+ retry = _try < self._retries
+ else:
+ raise requests.exceptions.ConnectionError
if response.status_code == expected_response_code:
return response
@@ -255,7 +248,7 @@ def request(self, url, method='GET', params=None, data=None,
raise InfluxDBClientError(response.content, response.status_code)
def write(self, data):
- """ Provided as convenience for influxdb v0.9.0, this may change. """
+ """Provide as convenience for influxdb v0.9.0, this may change."""
self.request(
url="write",
method='POST',
@@ -272,8 +265,9 @@ def write(self, data):
# with a JSON body of points.
def write_points(self, data, time_precision='s', *args, **kwargs):
- """
- Write to multiple time series names. An example data blob is:
+ """Write to multiple time series names.
+
+ An example data blob is:
data = [
{
@@ -296,13 +290,12 @@ def write_points(self, data, time_precision='s', *args, **kwargs):
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation
:type batch_size: int
- """
- def list_chunks(l, n):
- """ Yield successive n-sized chunks from l.
- """
- for i in xrange(0, len(l), n):
- yield l[i:i + n]
+ """
+ def list_chunks(data_list, n):
+ """Yield successive n-sized chunks from l."""
+ for i in xrange(0, len(data_list), n):
+ yield data_list[i:i + n]
batch_size = kwargs.get('batch_size')
if batch_size and batch_size > 0:
@@ -321,14 +314,14 @@ def list_chunks(l, n):
data=item,
time_precision=time_precision)
return True
- else:
- return self._write_points(data=data,
- time_precision=time_precision)
+
+ return self._write_points(data=data,
+ time_precision=time_precision)
def write_points_with_precision(self, data, time_precision='s'):
- """
- DEPRECATED. Write to multiple time series names
+ """Write to multiple time series names.
+ DEPRECATED.
"""
warnings.warn(
"write_points_with_precision is deprecated, and will be removed "
@@ -342,7 +335,7 @@ def _write_points(self, data, time_precision):
raise Exception(
"Invalid time precision is given. (use 's', 'm', 'ms' or 'u')")
- if self.use_udp and time_precision != 's':
+ if self._use_udp and time_precision != 's':
raise Exception(
"InfluxDB only supports seconds precision for udp writes"
)
@@ -353,7 +346,7 @@ def _write_points(self, data, time_precision):
'time_precision': time_precision
}
- if self.use_udp:
+ if self._use_udp:
self.send_packet(data)
else:
self.request(
@@ -369,9 +362,7 @@ def _write_points(self, data, time_precision):
# One Time Deletes
def delete_points(self, name):
- """
- Delete an entire series
- """
+ """Delete an entire series."""
url = "db/{0}/series/{1}".format(self._database, name)
self.request(
@@ -385,12 +376,12 @@ def delete_points(self, name):
# Regularly Scheduled Deletes
def create_scheduled_delete(self, json_body):
- """
- TODO: Create scheduled delete
+ """Create schedule delete from database.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
@@ -401,28 +392,27 @@ def create_scheduled_delete(self, json_body):
# curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id
def get_list_scheduled_delete(self):
- """
- TODO: Get list of scheduled deletes
+ """Get list of scheduled deletes.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
def remove_scheduled_delete(self, delete_id):
- """
- TODO: Remove scheduled delete
+ """Remove scheduled delete.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
def query(self, query, time_precision='s', chunked=False):
- """
- Quering data
+ """Query data from the influxdb v0.8 database.
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
or 'u'.
@@ -445,7 +435,7 @@ def _query(self, query, time_precision='s', chunked=False):
else:
chunked_param = 'false'
- # Build the URL of the serie to query
+ # Build the URL of the series to query
url = "db/{0}/series".format(self._database)
params = {
@@ -462,15 +452,14 @@ def _query(self, query, time_precision='s', chunked=False):
)
if chunked:
- decoded = {}
try:
decoded = chunked_json.loads(response.content.decode())
except UnicodeDecodeError:
decoded = chunked_json.loads(response.content.decode('utf-8'))
- finally:
- return list(decoded)
- else:
- return response.json()
+
+ return list(decoded)
+
+ return response.json()
# Creating and Dropping Databases
#
@@ -481,10 +470,7 @@ def _query(self, query, time_precision='s', chunked=False):
# curl -X DELETE http://localhost:8086/db/site_development
def create_database(self, database):
- """
- create_database()
-
- Create a database on the InfluxDB server.
+ """Create a database on the InfluxDB server.
:param database: the name of the database to create
:type database: string
@@ -504,10 +490,7 @@ def create_database(self, database):
return True
def delete_database(self, database):
- """
- delete_database()
-
- Drop a database on the InfluxDB server.
+ """Drop a database on the InfluxDB server.
:param database: the name of the database to delete
:type database: string
@@ -527,9 +510,7 @@ def delete_database(self, database):
# curl -X GET http://localhost:8086/db
def get_list_database(self):
- """
- Get the list of databases
- """
+ """Get the list of databases."""
url = "db"
response = self.request(
@@ -541,9 +522,9 @@ def get_list_database(self):
return response.json()
def get_database_list(self):
- """
- DEPRECATED. Get the list of databases
+ """Get the list of databases.
+ DEPRECATED.
"""
warnings.warn(
"get_database_list is deprecated, and will be removed "
@@ -553,10 +534,7 @@ def get_database_list(self):
return self.get_list_database()
def delete_series(self, series):
- """
- delete_series()
-
- Drop a series on the InfluxDB server.
+ """Drop a series on the InfluxDB server.
:param series: the name of the series to delete
:type series: string
@@ -576,29 +554,14 @@ def delete_series(self, series):
return True
def get_list_series(self):
- """
- Get a list of all time series in a database
- """
-
+ """Get a list of all time series in a database."""
response = self._query('list series')
-
- series_list = []
- for series in response[0]['points']:
- series_list.append(series[1])
-
- return series_list
+ return [series[1] for series in response[0]['points']]
def get_list_continuous_queries(self):
- """
- Get a list of continuous queries
- """
-
+ """Get a list of continuous queries."""
response = self._query('list continuous queries')
- queries_list = []
- for query in response[0]['points']:
- queries_list.append(query[2])
-
- return queries_list
+ return [query[2] for query in response[0]['points']]
# Security
# get list of cluster admins
@@ -632,9 +595,7 @@ def get_list_continuous_queries(self):
# http://localhost:8086/db/site_dev/admins/paul?u=root&p=root
def get_list_cluster_admins(self):
- """
- Get list of cluster admins
- """
+ """Get list of cluster admins."""
response = self.request(
url="cluster_admins",
method='GET',
@@ -644,9 +605,7 @@ def get_list_cluster_admins(self):
return response.json()
def add_cluster_admin(self, new_username, new_password):
- """
- Add cluster admin
- """
+ """Add cluster admin."""
data = {
'name': new_username,
'password': new_password
@@ -662,9 +621,7 @@ def add_cluster_admin(self, new_username, new_password):
return True
def update_cluster_admin_password(self, username, new_password):
- """
- Update cluster admin password
- """
+ """Update cluster admin password."""
url = "cluster_admins/{0}".format(username)
data = {
@@ -681,9 +638,7 @@ def update_cluster_admin_password(self, username, new_password):
return True
def delete_cluster_admin(self, username):
- """
- Delete cluster admin
- """
+ """Delete cluster admin."""
url = "cluster_admins/{0}".format(username)
self.request(
@@ -695,18 +650,15 @@ def delete_cluster_admin(self, username):
return True
def set_database_admin(self, username):
- """
- Set user as database admin
- """
+ """Set user as database admin."""
return self.alter_database_admin(username, True)
def unset_database_admin(self, username):
- """
- Unset user as database admin
- """
+ """Unset user as database admin."""
return self.alter_database_admin(username, False)
def alter_database_admin(self, username, is_admin):
+ """Alter the database admin."""
url = "db/{0}/users/{1}".format(self._database, username)
data = {'admin': is_admin}
@@ -721,42 +673,42 @@ def alter_database_admin(self, username, is_admin):
return True
def get_list_database_admins(self):
- """
- TODO: Get list of database admins
+ """Get list of database admins.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
def add_database_admin(self, new_username, new_password):
- """
- TODO: Add cluster admin
+ """Add cluster admin.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
def update_database_admin_password(self, username, new_password):
- """
- TODO: Update database admin password
+ """Update database admin password.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
def delete_database_admin(self, username):
- """
- TODO: Delete database admin
+ """Delete database admin.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
@@ -779,9 +731,7 @@ def delete_database_admin(self, username):
# curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root
def get_database_users(self):
- """
- Get list of database users
- """
+ """Get list of database users."""
url = "db/{0}/users".format(self._database)
response = self.request(
@@ -793,8 +743,7 @@ def get_database_users(self):
return response.json()
def add_database_user(self, new_username, new_password, permissions=None):
- """
- Add database user
+ """Add database user.
:param permissions: A ``(readFrom, writeTo)`` tuple
"""
@@ -823,14 +772,12 @@ def add_database_user(self, new_username, new_password, permissions=None):
return True
def update_database_user_password(self, username, new_password):
- """
- Update password
- """
+ """Update password."""
return self.alter_database_user(username, new_password)
def alter_database_user(self, username, password=None, permissions=None):
- """
- Alters a database user and/or their permissions.
+ """Alter a database user and/or their permissions.
+
:param permissions: A ``(readFrom, writeTo)`` tuple
:raise TypeError: if permissions cannot be read.
:raise ValueError: if neither password nor permissions provided.
@@ -838,7 +785,7 @@ def alter_database_user(self, username, password=None, permissions=None):
url = "db/{0}/users/{1}".format(self._database, username)
if not password and not permissions:
- raise ValueError("Nothing to alter for user {}.".format(username))
+ raise ValueError("Nothing to alter for user {0}.".format(username))
data = {}
@@ -866,9 +813,7 @@ def alter_database_user(self, username, password=None, permissions=None):
return True
def delete_database_user(self, username):
- """
- Delete database user
- """
+ """Delete database user."""
url = "db/{0}/users/{1}".format(self._database, username)
self.request(
@@ -882,16 +827,17 @@ def delete_database_user(self, username):
# update the user by POSTing to db/site_dev/users/paul
def update_permission(self, username, json_body):
- """
- TODO: Update read/write permission
+ """Update read/write permission.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
+
"""
raise NotImplementedError()
def send_packet(self, packet):
+ """Send a UDP packet along the wire."""
data = json.dumps(packet)
byte = data.encode('utf-8')
- self.udp_socket.sendto(byte, (self._host, self.udp_port))
+ self.udp_socket.sendto(byte, (self._host, self._udp_port))
diff --git a/influxdb/influxdb08/dataframe_client.py b/influxdb/influxdb08/dataframe_client.py
index c1665b3e..2867125d 100644
--- a/influxdb/influxdb08/dataframe_client.py
+++ b/influxdb/influxdb08/dataframe_client.py
@@ -1,7 +1,11 @@
# -*- coding: utf-8 -*-
-"""
-DataFrame client for InfluxDB
-"""
+"""DataFrame client for InfluxDB v0.8."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
import math
import warnings
@@ -9,13 +13,15 @@
class DataFrameClient(InfluxDBClient):
- """
+ """Primary defintion of the DataFrameClient for v0.8.
+
The ``DataFrameClient`` object holds information necessary to connect
to InfluxDB. Requests can be made to InfluxDB directly through the client.
The client reads and writes from pandas DataFrames.
"""
def __init__(self, ignore_nan=True, *args, **kwargs):
+ """Initialize an instance of the DataFrameClient."""
super(DataFrameClient, self).__init__(*args, **kwargs)
try:
@@ -24,12 +30,12 @@ def __init__(self, ignore_nan=True, *args, **kwargs):
except ImportError as ex:
raise ImportError('DataFrameClient requires Pandas, '
'"{ex}" problem importing'.format(ex=str(ex)))
+
self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00')
self.ignore_nan = ignore_nan
def write_points(self, data, *args, **kwargs):
- """
- Write to multiple time series names.
+ """Write to multiple time series names.
:param data: A dictionary mapping series names to pandas DataFrames
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
@@ -39,7 +45,6 @@ def write_points(self, data, *args, **kwargs):
one database to another or when doing a massive write operation
:type batch_size: int
"""
-
batch_size = kwargs.get('batch_size')
time_precision = kwargs.get('time_precision', 's')
if batch_size:
@@ -50,22 +55,25 @@ def write_points(self, data, *args, **kwargs):
for batch in range(number_batches):
start_index = batch * batch_size
end_index = (batch + 1) * batch_size
- data = [self._convert_dataframe_to_json(
- name=key,
- dataframe=data_frame.ix[start_index:end_index].copy(),
- time_precision=time_precision)]
- InfluxDBClient.write_points(self, data, *args, **kwargs)
+ outdata = [
+ self._convert_dataframe_to_json(
+ name=key,
+ dataframe=data_frame
+ .iloc[start_index:end_index].copy(),
+ time_precision=time_precision)]
+ InfluxDBClient.write_points(self, outdata, *args, **kwargs)
return True
- else:
- data = [self._convert_dataframe_to_json(
- name=key, dataframe=dataframe, time_precision=time_precision)
- for key, dataframe in data.items()]
- return InfluxDBClient.write_points(self, data, *args, **kwargs)
+
+ outdata = [
+ self._convert_dataframe_to_json(name=key, dataframe=dataframe,
+ time_precision=time_precision)
+ for key, dataframe in data.items()]
+ return InfluxDBClient.write_points(self, outdata, *args, **kwargs)
def write_points_with_precision(self, data, time_precision='s'):
- """
- DEPRECATED. Write to multiple time series names
+ """Write to multiple time series names.
+ DEPRECATED
"""
warnings.warn(
"write_points_with_precision is deprecated, and will be removed "
@@ -75,8 +83,7 @@ def write_points_with_precision(self, data, time_precision='s'):
return self.write_points(data, time_precision='s')
def query(self, query, time_precision='s', chunked=False):
- """
- Quering data into DataFrames.
+ """Query data into DataFrames.
Returns a DataFrame for a single time series and a map for multiple
time series with the time series as value and its name as key.
@@ -85,7 +92,6 @@ def query(self, query, time_precision='s', chunked=False):
or 'u'.
:param chunked: [Optional, default=False] True if the data shall be
retrieved in chunks, False otherwise.
-
"""
result = InfluxDBClient.query(self, query=query,
time_precision=time_precision,
@@ -101,18 +107,21 @@ def query(self, query, time_precision='s', chunked=False):
time_precision)
return ret
- def _to_dataframe(self, json_result, time_precision):
+ @staticmethod
+ def _to_dataframe(json_result, time_precision):
dataframe = pd.DataFrame(data=json_result['points'],
columns=json_result['columns'])
if 'sequence_number' in dataframe.keys():
dataframe.sort_values(['time', 'sequence_number'], inplace=True)
else:
dataframe.sort_values(['time'], inplace=True)
+
pandas_time_unit = time_precision
if time_precision == 'm':
pandas_time_unit = 'ms'
elif time_precision == 'u':
pandas_time_unit = 'us'
+
dataframe.index = pd.to_datetime(list(dataframe['time']),
unit=pandas_time_unit,
utc=True)
@@ -121,13 +130,18 @@ def _to_dataframe(self, json_result, time_precision):
def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'):
if not isinstance(dataframe, pd.DataFrame):
- raise TypeError('Must be DataFrame, but type was: {}.'
+ raise TypeError('Must be DataFrame, but type was: {0}.'
.format(type(dataframe)))
- if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or
- isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)):
+ if not (isinstance(dataframe.index, pd.PeriodIndex) or
+ isinstance(dataframe.index, pd.DatetimeIndex)):
raise TypeError('Must be DataFrame with DatetimeIndex or \
PeriodIndex.')
- dataframe.index = dataframe.index.to_datetime()
+
+ if isinstance(dataframe.index, pd.PeriodIndex):
+ dataframe.index = dataframe.index.to_timestamp()
+ else:
+ dataframe.index = pd.to_datetime(dataframe.index)
+
if dataframe.index.tzinfo is None:
dataframe.index = dataframe.index.tz_localize('UTC')
dataframe['time'] = [self._datetime_to_epoch(dt, time_precision)
@@ -144,13 +158,14 @@ def _convert_array(self, array):
except ImportError as ex:
raise ImportError('DataFrameClient requires Numpy, '
'"{ex}" problem importing'.format(ex=str(ex)))
+
if self.ignore_nan:
number_types = (int, float, np.number)
condition = (all(isinstance(el, number_types) for el in array) and
np.isnan(array))
return list(np.where(condition, None, array))
- else:
- return list(array)
+
+ return list(array)
def _datetime_to_epoch(self, datetime, time_precision='s'):
seconds = (datetime - self.EPOCH).total_seconds()
diff --git a/influxdb/influxdb08/helper.py b/influxdb/influxdb08/helper.py
index 391e9ccd..5f2d4614 100644
--- a/influxdb/influxdb08/helper.py
+++ b/influxdb/influxdb08/helper.py
@@ -1,7 +1,11 @@
# -*- coding: utf-8 -*-
-"""
-Helper class for InfluxDB
-"""
+"""Helper class for InfluxDB for v0.8."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
from collections import namedtuple, defaultdict
from warnings import warn
@@ -9,10 +13,10 @@
class SeriesHelper(object):
+ """Define the SeriesHelper object for InfluxDB v0.8.
- """
Subclassing this helper eases writing data points in bulk.
- All data points are immutable, insuring they do not get overwritten.
+ All data points are immutable, ensuring they do not get overwritten.
Each subclass can write to its own database.
The time series names can also be based on one or more defined fields.
@@ -36,11 +40,11 @@ class Meta:
# If True and no bulk_size, then will set bulk_size to 1.
"""
+
__initialized__ = False
def __new__(cls, *args, **kwargs):
- """
- Initializes class attributes for subsequent constructor calls.
+ """Initialize class attributes for subsequent constructor calls.
:note: *args and **kwargs are not explicitly used in this function,
but needed for Python 2 compatibility.
@@ -51,7 +55,7 @@ def __new__(cls, *args, **kwargs):
_meta = getattr(cls, 'Meta')
except AttributeError:
raise AttributeError(
- 'Missing Meta class in {}.'.format(
+ 'Missing Meta class in {0}.'.format(
cls.__name__))
for attr in ['series_name', 'fields']:
@@ -59,7 +63,7 @@ def __new__(cls, *args, **kwargs):
setattr(cls, '_' + attr, getattr(_meta, attr))
except AttributeError:
raise AttributeError(
- 'Missing {} in {} Meta class.'.format(
+ 'Missing {0} in {1} Meta class.'.format(
attr,
cls.__name__))
@@ -68,14 +72,14 @@ def __new__(cls, *args, **kwargs):
cls._client = getattr(_meta, 'client', None)
if cls._autocommit and not cls._client:
raise AttributeError(
- 'In {}, autocommit is set to True, but no client is set.'
+ 'In {0}, autocommit is set to True, but no client is set.'
.format(cls.__name__))
try:
cls._bulk_size = getattr(_meta, 'bulk_size')
if cls._bulk_size < 1 and cls._autocommit:
warn(
- 'Definition of bulk_size in {} forced to 1, '
+ 'Definition of bulk_size in {0} forced to 1, '
'was less than 1.'.format(cls.__name__))
cls._bulk_size = 1
except AttributeError:
@@ -83,7 +87,7 @@ def __new__(cls, *args, **kwargs):
else:
if not cls._autocommit:
warn(
- 'Definition of bulk_size in {} has no affect because'
+ 'Definition of bulk_size in {0} has no affect because'
' autocommit is false.'.format(cls.__name__))
cls._datapoints = defaultdict(list)
@@ -92,8 +96,9 @@ def __new__(cls, *args, **kwargs):
return super(SeriesHelper, cls).__new__(cls)
def __init__(self, **kw):
- """
- Constructor call creates a new data point. All fields must be present.
+ """Create a new data point.
+
+ All fields must be present.
:note: Data points written when `bulk_size` is reached per Helper.
:warning: Data points are *immutable* (`namedtuples`).
@@ -115,8 +120,7 @@ def __init__(self, **kw):
@classmethod
def commit(cls, client=None):
- """
- Commit everything from datapoints via the client.
+ """Commit everything from datapoints via the client.
:param client: InfluxDBClient instance for writing points to InfluxDB.
:attention: any provided client will supersede the class client.
@@ -130,10 +134,13 @@ def commit(cls, client=None):
@classmethod
def _json_body_(cls):
- """
- :return: JSON body of these datapoints.
+ """Return JSON body of the datapoints.
+
+ :return: JSON body of the datapoints.
"""
json = []
+ if not cls.__initialized__:
+ cls._reset_()
for series_name, data in six.iteritems(cls._datapoints):
json.append({'name': series_name,
'columns': cls._fields,
@@ -144,7 +151,5 @@ def _json_body_(cls):
@classmethod
def _reset_(cls):
- """
- Reset data storage.
- """
+ """Reset data storage."""
cls._datapoints = defaultdict(list)
diff --git a/influxdb/line_protocol.py b/influxdb/line_protocol.py
index 74430a28..25dd2ad7 100644
--- a/influxdb/line_protocol.py
+++ b/influxdb/line_protocol.py
@@ -1,37 +1,58 @@
# -*- coding: utf-8 -*-
+"""Define the line_protocol handler."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
from __future__ import unicode_literals
-from calendar import timegm
-from copy import copy
from datetime import datetime
from numbers import Integral
+from pytz import UTC
from dateutil.parser import parse
-from six import binary_type, text_type, integer_types
+from six import binary_type, text_type, integer_types, PY2
+
+EPOCH = UTC.localize(datetime.utcfromtimestamp(0))
+
+
+def _to_nanos(timestamp):
+ delta = timestamp - EPOCH
+ nanos_in_days = delta.days * 86400 * 10 ** 9
+ nanos_in_seconds = delta.seconds * 10 ** 9
+ nanos_in_micros = delta.microseconds * 10 ** 3
+ return nanos_in_days + nanos_in_seconds + nanos_in_micros
def _convert_timestamp(timestamp, precision=None):
if isinstance(timestamp, Integral):
return timestamp # assume precision is correct if timestamp is int
+
if isinstance(_get_unicode(timestamp), text_type):
timestamp = parse(timestamp)
+
if isinstance(timestamp, datetime):
- ns = (
- timegm(timestamp.utctimetuple()) * 1e9 +
- timestamp.microsecond * 1e3
- )
+ if not timestamp.tzinfo:
+ timestamp = UTC.localize(timestamp)
+
+ ns = _to_nanos(timestamp)
if precision is None or precision == 'n':
return ns
- elif precision == 'u':
- return ns / 1e3
- elif precision == 'ms':
- return ns / 1e6
- elif precision == 's':
- return ns / 1e9
- elif precision == 'm':
- return ns / 1e9 / 60
- elif precision == 'h':
- return ns / 1e9 / 3600
+
+ if precision == 'u':
+ return ns / 10**3
+
+ if precision == 'ms':
+ return ns / 10**6
+
+ if precision == 's':
+ return ns / 10**9
+
+ if precision == 'm':
+ return ns / 10**9 / 60
+
+ if precision == 'h':
+ return ns / 10**9 / 3600
raise ValueError(timestamp)
@@ -46,93 +67,144 @@ def _escape_tag(tag):
",", "\\,"
).replace(
"=", "\\="
+ ).replace(
+ "\n", "\\n"
)
+def _escape_tag_value(value):
+ ret = _escape_tag(value)
+ if ret.endswith('\\'):
+ ret += ' '
+ return ret
+
+
+def quote_ident(value):
+ """Indent the quotes."""
+ return "\"{}\"".format(value
+ .replace("\\", "\\\\")
+ .replace("\"", "\\\"")
+ .replace("\n", "\\n"))
+
+
+def quote_literal(value):
+ """Quote provided literal."""
+ return "'{}'".format(value
+ .replace("\\", "\\\\")
+ .replace("'", "\\'"))
+
+
+def _is_float(value):
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ return False
+
+ return True
+
+
def _escape_value(value):
+ if value is None:
+ return ''
+
value = _get_unicode(value)
- if isinstance(value, text_type) and value != '':
- return "\"{}\"".format(
- value.replace(
- "\"", "\\\""
- ).replace(
- "\n", "\\n"
- )
- )
- elif isinstance(value, integer_types) and not isinstance(value, bool):
+ if isinstance(value, text_type):
+ return quote_ident(value)
+
+ if isinstance(value, integer_types) and not isinstance(value, bool):
return str(value) + 'i'
- else:
+
+ if isinstance(value, bool):
return str(value)
+ if _is_float(value):
+ return repr(float(value))
+
+ return str(value)
+
def _get_unicode(data, force=False):
- """
- Try to return a text aka unicode object from the given data.
- """
+ """Try to return a text aka unicode object from the given data."""
if isinstance(data, binary_type):
return data.decode('utf-8')
- elif data is None:
+
+ if data is None:
return ''
- elif force:
+
+ if force:
+ if PY2:
+ return unicode(data)
return str(data)
- else:
- return data
+
+ return data
+
+
+def make_line(measurement, tags=None, fields=None, time=None, precision=None):
+ """Extract the actual point from a given measurement line."""
+ tags = tags or {}
+ fields = fields or {}
+
+ line = _escape_tag(_get_unicode(measurement))
+
+ # tags should be sorted client-side to take load off server
+ tag_list = []
+ for tag_key in sorted(tags.keys()):
+ key = _escape_tag(tag_key)
+ value = _escape_tag(tags[tag_key])
+
+ if key != '' and value != '':
+ tag_list.append(
+ "{key}={value}".format(key=key, value=value)
+ )
+
+ if tag_list:
+ line += ',' + ','.join(tag_list)
+
+ field_list = []
+ for field_key in sorted(fields.keys()):
+ key = _escape_tag(field_key)
+ value = _escape_value(fields[field_key])
+
+ if key != '' and value != '':
+ field_list.append("{key}={value}".format(
+ key=key,
+ value=value
+ ))
+
+ if field_list:
+ line += ' ' + ','.join(field_list)
+
+ if time is not None:
+ timestamp = _get_unicode(str(int(
+ _convert_timestamp(time, precision)
+ )))
+ line += ' ' + timestamp
+
+ return line
def make_lines(data, precision=None):
- """
+ """Extract points from given dict.
+
Extracts the points from the given dict and returns a Unicode string
matching the line protocol introduced in InfluxDB 0.9.0.
"""
lines = []
- static_tags = data.get('tags', None)
+ static_tags = data.get('tags')
for point in data['points']:
- elements = []
-
- # add measurement name
- measurement = _escape_tag(_get_unicode(
- point.get('measurement', data.get('measurement'))
- ))
- key_values = [measurement]
-
- # add tags
- if static_tags is None:
- tags = point.get('tags', {})
+ if static_tags:
+ tags = dict(static_tags) # make a copy, since we'll modify
+ tags.update(point.get('tags') or {})
else:
- tags = copy(static_tags)
- tags.update(point.get('tags', {}))
-
- # tags should be sorted client-side to take load off server
- for tag_key in sorted(tags.keys()):
- key = _escape_tag(tag_key)
- value = _escape_tag(tags[tag_key])
-
- if key != '' and value != '':
- key_values.append("{key}={value}".format(key=key, value=value))
- key_values = ','.join(key_values)
- elements.append(key_values)
-
- # add fields
- field_values = []
- for field_key in sorted(point['fields'].keys()):
- key = _escape_tag(field_key)
- value = _escape_value(point['fields'][field_key])
- if key != '' and value != '':
- field_values.append("{key}={value}".format(
- key=key,
- value=value
- ))
- field_values = ','.join(field_values)
- elements.append(field_values)
-
- # add timestamp
- if 'time' in point:
- timestamp = _get_unicode(str(int(
- _convert_timestamp(point['time'], precision)
- )))
- elements.append(timestamp)
-
- line = ' '.join(elements)
+ tags = point.get('tags') or {}
+
+ line = make_line(
+ point.get('measurement', data.get('measurement')),
+ tags=tags,
+ fields=point.get('fields'),
+ precision=precision,
+ time=point.get('time')
+ )
lines.append(line)
- lines = '\n'.join(lines)
- return lines + '\n'
+
+ return '\n'.join(lines) + '\n'
diff --git a/influxdb/resultset.py b/influxdb/resultset.py
index de7b1a3a..ba4f3c13 100644
--- a/influxdb/resultset.py
+++ b/influxdb/resultset.py
@@ -1,4 +1,10 @@
# -*- coding: utf-8 -*-
+"""Module to prepare the resultset."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
import warnings
@@ -8,18 +14,19 @@
class ResultSet(object):
- """A wrapper around a single InfluxDB query result"""
+ """A wrapper around a single InfluxDB query result."""
def __init__(self, series, raise_errors=True):
+ """Initialize the ResultSet."""
self._raw = series
- self._error = self.raw.get('error', None)
+ self._error = self._raw.get('error', None)
if self.error is not None and raise_errors is True:
raise InfluxDBClientError(self.error)
@property
def raw(self):
- """Raw JSON from InfluxDB"""
+ """Raw JSON from InfluxDB."""
return self._raw
@raw.setter
@@ -28,24 +35,24 @@ def raw(self, value):
@property
def error(self):
- """Error returned by InfluxDB"""
+ """Error returned by InfluxDB."""
return self._error
def __getitem__(self, key):
- """
- :param key: Either a serie name, or a tags_dict, or
- a 2-tuple(serie_name, tags_dict).
- If the serie name is None (or not given) then any serie
+ """Retrieve the series name or specific set based on key.
+
+ :param key: Either a series name, or a tags_dict, or
+ a 2-tuple(series_name, tags_dict).
+ If the series name is None (or not given) then any serie
matching the eventual given tags will be given its points
one after the other.
- To get the points of every serie in this resultset then
+ To get the points of every series in this resultset then
you have to provide None as key.
:return: A generator yielding `Point`s matching the given key.
NB:
The order in which the points are yielded is actually undefined but
it might change..
"""
-
warnings.warn(
("ResultSet's ``__getitem__`` method will be deprecated. Use"
"``get_points`` instead."),
@@ -53,10 +60,12 @@ def __getitem__(self, key):
)
if isinstance(key, tuple):
- if 2 != len(key):
+ if len(key) != 2:
raise TypeError('only 2-tuples allowed')
+
name = key[0]
tags = key[1]
+
if not isinstance(tags, dict) and tags is not None:
raise TypeError('tags should be a dict')
elif isinstance(key, dict):
@@ -69,8 +78,7 @@ def __getitem__(self, key):
return self.get_points(name, tags)
def get_points(self, measurement=None, tags=None):
- """
- Returns a generator for all the points that match the given filters.
+ """Return a generator for all the points that match the given filters.
:param measurement: The measurement name
:type measurement: str
@@ -80,37 +88,34 @@ def get_points(self, measurement=None, tags=None):
:return: Points generator
"""
-
# Raise error if measurement is not str or bytes
if not isinstance(measurement,
(bytes, type(b''.decode()), type(None))):
raise TypeError('measurement must be an str or None')
- for serie in self._get_series():
- serie_name = serie.get('measurement', serie.get('name', 'results'))
- if serie_name is None:
+ for series in self._get_series():
+ series_name = series.get('measurement',
+ series.get('name', 'results'))
+ if series_name is None:
# this is a "system" query or a query which
# doesn't return a name attribute.
# like 'show retention policies' ..
if tags is None:
- for point in serie['values']:
- yield self.point_from_cols_vals(
- serie['columns'],
- point
- )
+ for item in self._get_points_for_series(series):
+ yield item
- elif measurement in (None, serie_name):
+ elif measurement in (None, series_name):
# by default if no tags was provided then
- # we will matches every returned serie
- serie_tags = serie.get('tags', {})
- if tags is None or self._tag_matches(serie_tags, tags):
- for point in serie.get('values', []):
- yield self.point_from_cols_vals(
- serie['columns'],
- point
- )
+ # we will matches every returned series
+ series_tags = series.get('tags', {})
+ for item in self._get_points_for_series(series):
+ if tags is None or \
+ self._tag_matches(item, tags) or \
+ self._tag_matches(series_tags, tags):
+ yield item
def __repr__(self):
+ """Representation of ResultSet object."""
items = []
for item in self.items():
@@ -119,60 +124,76 @@ def __repr__(self):
return "ResultSet({%s})" % ", ".join(items)
def __iter__(self):
- """ Iterating a ResultSet will yield one dict instance per serie result.
- """
+ """Yield one dict instance per series result."""
for key in self.keys():
yield list(self.__getitem__(key))
- def _tag_matches(self, tags, filter):
- """Checks if all key/values in filter match in tags"""
+ @staticmethod
+ def _tag_matches(tags, filter):
+ """Check if all key/values in filter match in tags."""
for tag_name, tag_value in filter.items():
# using _sentinel as I'm not sure that "None"
# could be used, because it could be a valid
- # serie_tags value : when a serie has no such tag
+ # series_tags value : when a series has no such tag
# then I think it's set to /null/None/.. TBC..
- serie_tag_value = tags.get(tag_name, _sentinel)
- if serie_tag_value != tag_value:
+ series_tag_value = tags.get(tag_name, _sentinel)
+ if series_tag_value != tag_value:
return False
+
return True
def _get_series(self):
- """Returns all series"""
+ """Return all series."""
return self.raw.get('series', [])
def __len__(self):
+ """Return the len of the keys in the ResultSet."""
return len(self.keys())
def keys(self):
- """
- :return: List of keys. Keys are tuples (serie_name, tags)
+ """Return the list of keys in the ResultSet.
+
+ :return: List of keys. Keys are tuples (series_name, tags)
"""
keys = []
- for serie in self._get_series():
+ for series in self._get_series():
keys.append(
- (serie.get('measurement',
- serie.get('name', 'results')),
- serie.get('tags', None))
+ (series.get('measurement',
+ series.get('name', 'results')),
+ series.get('tags', None))
)
return keys
def items(self):
- """
+ """Return the set of items from the ResultSet.
+
:return: List of tuples, (key, generator)
"""
items = []
- for serie in self._get_series():
- serie_key = (serie.get('measurement',
- serie.get('name', 'results')),
- serie.get('tags', None))
+ for series in self._get_series():
+ series_key = (series.get('measurement',
+ series.get('name', 'results')),
+ series.get('tags', None))
items.append(
- (serie_key, self[serie_key])
+ (series_key, self._get_points_for_series(series))
)
return items
+ def _get_points_for_series(self, series):
+ """Return generator of dict from columns and values of a series.
+
+ :param series: One series
+ :return: Generator of dicts
+ """
+ for point in series.get('values', []):
+ yield self.point_from_cols_vals(
+ series['columns'],
+ point
+ )
+
@staticmethod
def point_from_cols_vals(cols, vals):
- """ Creates a dict from columns and values lists
+ """Create a dict from columns and values lists.
:param cols: List of columns
:param vals: List of values
@@ -181,4 +202,5 @@ def point_from_cols_vals(cols, vals):
point = {}
for col_index, col_name in enumerate(cols):
point[col_name] = vals[col_index]
+
return point
diff --git a/influxdb/tests/__init__.py b/influxdb/tests/__init__.py
index 680c1eaf..f7c5dfb9 100644
--- a/influxdb/tests/__init__.py
+++ b/influxdb/tests/__init__.py
@@ -1,13 +1,21 @@
# -*- coding: utf-8 -*-
-import unittest
+"""Configure the tests package for InfluxDBClient."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
import sys
import os
+import unittest
+
using_pypy = hasattr(sys, "pypy_version_info")
-skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.")
+skip_if_pypy = unittest.skipIf(using_pypy, "Skipping this test on pypy.")
_skip_server_tests = os.environ.get(
'INFLUXDB_PYTHON_SKIP_SERVER_TESTS',
None) == 'True'
-skipServerTests = unittest.skipIf(_skip_server_tests,
- "Skipping server tests...")
+skip_server_tests = unittest.skipIf(_skip_server_tests,
+ "Skipping server tests...")
diff --git a/influxdb/tests/chunked_json_test.py b/influxdb/tests/chunked_json_test.py
index a9a11b69..f633bcb1 100644
--- a/influxdb/tests/chunked_json_test.py
+++ b/influxdb/tests/chunked_json_test.py
@@ -1,4 +1,10 @@
# -*- coding: utf-8 -*-
+"""Chunked JSON test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
import unittest
@@ -6,26 +12,25 @@
class TestChunkJson(unittest.TestCase):
+ """Set up the TestChunkJson object."""
@classmethod
def setUpClass(cls):
+ """Initialize the TestChunkJson object."""
super(TestChunkJson, cls).setUpClass()
def test_load(self):
- """
- Tests reading a sequence of JSON values from a string
- """
+ """Test reading a sequence of JSON values from a string."""
example_response = \
'{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
'"columns": ["time", "value"], "values": ' \
'[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
- '[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' \
+ '[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\
'"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
res = list(chunked_json.loads(example_response))
# import ipdb; ipdb.set_trace()
- # self.assertTrue(res)
self.assertListEqual(
[
{
diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py
index 8df4bd64..115fbc48 100644
--- a/influxdb/tests/client_test.py
+++ b/influxdb/tests/client_test.py
@@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
-"""
-unit tests for the InfluxDBClient.
+"""Unit tests for the InfluxDBClient.
-NB/WARNING :
+NB/WARNING:
This module implements tests for the InfluxDBClient class
but does so
+ without any server instance running
@@ -14,20 +13,30 @@
See client_test_with_server.py for tests against a running server instance.
"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import random
+import socket
+import unittest
+import warnings
+
+import io
+import gzip
import json
+import mock
import requests
import requests.exceptions
-import socket
-import unittest
import requests_mock
-import random
+
from nose.tools import raises
-from mock import patch
-import warnings
-import mock
+from urllib3.connection import HTTPConnection
-from influxdb import InfluxDBClient, InfluxDBClusterClient
-from influxdb.client import InfluxDBServerError
+from influxdb import InfluxDBClient
+from influxdb.resultset import ResultSet
def _build_response_object(status_code=200, content=""):
@@ -38,10 +47,10 @@ def _build_response_object(status_code=200, content=""):
def _mocked_session(cli, method="GET", status_code=200, content=""):
-
method = method.upper()
def request(*args, **kwargs):
+ """Request content from the mocked session."""
c = content
# Check method
@@ -65,18 +74,14 @@ def request(*args, **kwargs):
return _build_response_object(status_code=status_code, content=c)
- mocked = patch.object(
- cli._session,
- 'request',
- side_effect=request
- )
-
- return mocked
+ return mock.patch.object(cli._session, 'request', side_effect=request)
class TestInfluxDBClient(unittest.TestCase):
+ """Set up the TestInfluxDBClient object."""
def setUp(self):
+ """Initialize an instance of TestInfluxDBClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
@@ -98,6 +103,7 @@ def setUp(self):
self.dsn_string = 'influxdb://uSr:pWd@my.host.fr:1886/db'
def test_scheme(self):
+ """Set up the test schema for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
self.assertEqual('http://host:8086', cli._baseurl)
@@ -106,36 +112,69 @@ def test_scheme(self):
)
self.assertEqual('https://host:8086', cli._baseurl)
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True,
+ path="somepath"
+ )
+ self.assertEqual('https://host:8086/somepath', cli._baseurl)
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True,
+ path=None
+ )
+ self.assertEqual('https://host:8086', cli._baseurl)
+
+ cli = InfluxDBClient(
+ 'host', 8086, 'username', 'password', 'database', ssl=True,
+ path="/somepath"
+ )
+ self.assertEqual('https://host:8086/somepath', cli._baseurl)
+
def test_dsn(self):
- cli = InfluxDBClient.from_DSN(self.dsn_string)
+ """Set up the test datasource name for TestInfluxDBClient object."""
+ cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886')
+ self.assertEqual('http://192.168.0.1:1886', cli._baseurl)
+
+ cli = InfluxDBClient.from_dsn(self.dsn_string)
self.assertEqual('http://my.host.fr:1886', cli._baseurl)
self.assertEqual('uSr', cli._username)
self.assertEqual('pWd', cli._password)
self.assertEqual('db', cli._database)
- self.assertFalse(cli.use_udp)
+ self.assertFalse(cli._use_udp)
- cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string)
- self.assertTrue(cli.use_udp)
+ cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
+ self.assertTrue(cli._use_udp)
- cli = InfluxDBClient.from_DSN('https+' + self.dsn_string)
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
self.assertEqual('https://my.host.fr:1886', cli._baseurl)
- cli = InfluxDBClient.from_DSN('https+' + self.dsn_string,
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
**{'ssl': False})
self.assertEqual('http://my.host.fr:1886', cli._baseurl)
+ def test_cert(self):
+ """Test mutual TLS authentication for TestInfluxDBClient object."""
+ cli = InfluxDBClient(ssl=True, cert='/etc/pki/tls/private/dummy.crt')
+ self.assertEqual(cli._session.cert, '/etc/pki/tls/private/dummy.crt')
+
+ with self.assertRaises(ValueError):
+ cli = InfluxDBClient(cert='/etc/pki/tls/private/dummy.crt')
+
def test_switch_database(self):
+ """Test switch database in TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_database('another_database')
self.assertEqual('another_database', cli._database)
def test_switch_user(self):
+ """Test switch user in TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_user('another_username', 'another_password')
self.assertEqual('another_username', cli._username)
self.assertEqual('another_password', cli._password)
def test_write(self):
+ """Test write in TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -160,6 +199,7 @@ def test_write(self):
)
def test_write_points(self):
+ """Test write points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -177,7 +217,73 @@ def test_write_points(self):
m.last_request.body.decode('utf-8'),
)
+ def test_write_gzip(self):
+ """Test write in TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db', gzip=True)
+ cli.write(
+ {"database": "mydb",
+ "retentionPolicy": "mypolicy",
+ "points": [{"measurement": "cpu_load_short",
+ "tags": {"host": "server01",
+ "region": "us-west"},
+ "time": "2009-11-10T23:00:00Z",
+ "fields": {"value": 0.64}}]}
+ )
+
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000000000000\n"
+ )
+
+ self.assertEqual(
+ m.last_request.body,
+ compressed.getvalue(),
+ )
+
+ def test_write_points_gzip(self):
+ """Test write points for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db', gzip=True)
+ cli.write_points(
+ self.dummy_points,
+ )
+
+ compressed = io.BytesIO()
+ with gzip.GzipFile(
+ compresslevel=9,
+ fileobj=compressed,
+ mode='w'
+ ) as f:
+ f.write(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456000\n'
+ )
+ self.assertEqual(
+ m.last_request.body,
+ compressed.getvalue(),
+ )
+
def test_write_points_toplevel_attributes(self):
+ """Test write points attrs for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -199,6 +305,7 @@ def test_write_points_toplevel_attributes(self):
)
def test_write_points_batch(self):
+ """Test write points batch for TestInfluxDBClient object."""
dummy_points = [
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
@@ -226,7 +333,38 @@ def test_write_points_batch(self):
self.assertEqual(expected_last_body,
m.last_request.body.decode('utf-8'))
+ def test_write_points_batch_generator(self):
+ """Test write points batch from a generator for TestInfluxDBClient."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ dummy_points_generator = (point for point in dummy_points)
+ expected_last_body = (
+ "network,direction=out,host=server01,region=us-west "
+ "value=12.0 1257894000000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = InfluxDBClient(database='db')
+ cli.write_points(points=dummy_points_generator,
+ database='db',
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ self.assertEqual(m.call_count, 2)
+ self.assertEqual(expected_last_body,
+ m.last_request.body.decode('utf-8'))
+
def test_write_points_udp(self):
+ """Test write points UDP for TestInfluxDBClient object."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
port = random.randint(4000, 8000)
s.bind(('0.0.0.0', port))
@@ -245,28 +383,15 @@ def test_write_points_udp(self):
received_data.decode()
)
- def test_write_bad_precision_udp(self):
- cli = InfluxDBClient(
- 'localhost', 8086, 'root', 'root',
- 'test', use_udp=True, udp_port=4444
- )
-
- with self.assertRaisesRegexp(
- Exception,
- "InfluxDB only supports seconds precision for udp writes"
- ):
- cli.write_points(
- self.dummy_points,
- time_precision='ms'
- )
-
@raises(Exception)
def test_write_points_fails(self):
+ """Test write points fail for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'post', 500):
cli.write_points([])
def test_write_points_with_precision(self):
+ """Test write points with precision for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -318,7 +443,84 @@ def test_write_points_with_precision(self):
m.last_request.body,
)
+ def test_write_points_with_consistency(self):
+ """Test write points with consistency for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ 'http://localhost:8086/write',
+ status_code=204
+ )
+
+ cli = InfluxDBClient(database='db')
+
+ cli.write_points(self.dummy_points, consistency='any')
+ self.assertEqual(
+ m.last_request.qs,
+ {'db': ['db'], 'consistency': ['any']}
+ )
+
+ def test_write_points_with_precision_udp(self):
+ """Test write points with precision for TestInfluxDBClient object."""
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ port = random.randint(4000, 8000)
+ s.bind(('0.0.0.0', port))
+
+ cli = InfluxDBClient(
+ 'localhost', 8086, 'root', 'root',
+ 'test', use_udp=True, udp_port=port
+ )
+
+ cli.write_points(self.dummy_points, time_precision='n')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456000\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='u')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123456\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='ms')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 1257894000123\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='s')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b"cpu_load_short,host=server01,region=us-west "
+ b"value=0.64 1257894000\n",
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='m')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 20964900\n',
+ received_data,
+ )
+
+ cli.write_points(self.dummy_points, time_precision='h')
+ received_data, addr = s.recvfrom(1024)
+ self.assertEqual(
+ b'cpu_load_short,host=server01,region=us-west '
+ b'value=0.64 349415\n',
+ received_data,
+ )
+
def test_write_points_bad_precision(self):
+ """Test write points w/bad precision TestInfluxDBClient object."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
@@ -330,13 +532,24 @@ def test_write_points_bad_precision(self):
time_precision='g'
)
+ def test_write_points_bad_consistency(self):
+ """Test write points w/bad consistency value."""
+ cli = InfluxDBClient()
+ with self.assertRaises(ValueError):
+ cli.write_points(
+ self.dummy_points,
+ consistency='boo'
+ )
+
@raises(Exception)
def test_write_points_with_precision_fails(self):
+ """Test write points w/precision fail for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'post', 500):
cli.write_points_with_precision([])
def test_query(self):
+ """Test query method for TestInfluxDBClient object."""
example_response = (
'{"results": [{"series": [{"measurement": "sdfsdfsdf", '
'"columns": ["time", "value"], "values": '
@@ -358,8 +571,55 @@ def test_query(self):
[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]
)
+ def test_query_msgpack(self):
+ """Test query method with a messagepack response."""
+ example_response = bytes(bytearray.fromhex(
+ "81a7726573756c74739182ac73746174656d656e745f696400a673657269"
+ "65739183a46e616d65a161a7636f6c756d6e7392a474696d65a176a67661"
+ "6c7565739192c70c05000000005d26178a019096c8cb3ff0000000000000"
+ ))
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ request_headers={"Accept": "application/x-msgpack"},
+ headers={"Content-Type": "application/x-msgpack"},
+ content=example_response
+ )
+ rs = self.cli.query('select * from a')
+
+ self.assertListEqual(
+ list(rs.get_points()),
+ [{'v': 1.0, 'time': '2019-07-10T16:51:22.026253Z'}]
+ )
+
+ def test_select_into_post(self):
+ """Test SELECT.*INTO is POSTed."""
+ example_response = (
+ '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
+ '"columns": ["time", "value"], "values": '
+ '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
+ '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
+ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ rs = self.cli.query('select * INTO newmeas from foo')
+
+ self.assertListEqual(
+ list(rs[0].get_points()),
+ [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]
+ )
+
@unittest.skip('Not implemented for 0.9')
def test_query_chunked(self):
+ """Test chunked query for TestInfluxDBClient object."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
@@ -393,47 +653,100 @@ def test_query_chunked(self):
@raises(Exception)
def test_query_fail(self):
+ """Test query failed for TestInfluxDBClient object."""
with _mocked_session(self.cli, 'get', 401):
self.cli.query('select column_one from foo;')
- def test_create_database(self):
+ def test_ping(self):
+ """Test ping querying InfluxDB version."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+ version = self.cli.ping()
+ self.assertEqual(version, '1.2.3')
+
+ def test_create_database(self):
+ """Test create database for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
"http://localhost:8086/query",
text='{"results":[{}]}'
)
self.cli.create_database('new_db')
self.assertEqual(
m.last_request.qs['q'][0],
- 'create database new_db'
+ 'create database "new_db"'
+ )
+
+ def test_create_numeric_named_database(self):
+ """Test create db w/numeric name for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.create_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "123"'
)
@raises(Exception)
def test_create_database_fails(self):
+ """Test create database fail for TestInfluxDBClient object."""
with _mocked_session(self.cli, 'post', 401):
self.cli.create_database('new_db')
def test_drop_database(self):
+ """Test drop database for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text='{"results":[{}]}'
)
self.cli.drop_database('new_db')
self.assertEqual(
m.last_request.qs['q'][0],
- 'drop database new_db'
+ 'drop database "new_db"'
)
- @raises(Exception)
- def test_drop_database_fails(self):
- cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
- with _mocked_session(cli, 'delete', 401):
- cli.drop_database('old_db')
+ def test_drop_measurement(self):
+ """Test drop measurement for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.drop_measurement('new_measurement')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop measurement "new_measurement"'
+ )
+
+ def test_drop_numeric_named_database(self):
+ """Test drop numeric db for TestInfluxDBClient object."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ self.cli.drop_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "123"'
+ )
def test_get_list_database(self):
+ """Test get list of databases for TestInfluxDBClient object."""
data = {'results': [
{'series': [
{'name': 'databases',
@@ -451,37 +764,96 @@ def test_get_list_database(self):
@raises(Exception)
def test_get_list_database_fails(self):
+ """Test get list of dbs fail for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 401):
cli.get_list_database()
- def test_get_list_series(self):
- example_response = \
- '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \
- '["_id", "host", "region"], "values": ' \
- '[[1, "server01", "us-west"]]}]}]}'
+ def test_get_list_measurements(self):
+ """Test get list of measurements for TestInfluxDBClient object."""
+ data = {
+ "results": [{
+ "series": [
+ {"name": "measurements",
+ "columns": ["name"],
+ "values": [["cpu"], ["disk"]
+ ]}]}
+ ]
+ }
- with requests_mock.Mocker() as m:
- m.register_uri(
- requests_mock.GET,
- "http://localhost:8086/query",
- text=example_response
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_measurements(),
+ [{'name': 'cpu'}, {'name': 'disk'}]
)
+ def test_get_list_series(self):
+ """Test get a list of series from the database."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west'],
+ ['memory_usage,host=server02,region=us-east']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
self.assertListEqual(
self.cli.get_list_series(),
- [{'name': 'cpu_load_short',
- 'tags': [
- {'host': 'server01', '_id': 1, 'region': 'us-west'}
- ]}]
- )
+ ['cpu_load_short,host=server01,region=us-west',
+ 'memory_usage,host=server02,region=us-east'])
+
+ def test_get_list_series_with_measurement(self):
+ """Test get a list of series from the database by filter."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(measurement='cpu_load_short'),
+ ['cpu_load_short,host=server01,region=us-west'])
+
+ def test_get_list_series_with_tags(self):
+ """Test get a list of series from the database by tags."""
+ data = {'results': [
+ {'series': [
+ {
+ 'values': [
+ ['cpu_load_short,host=server01,region=us-west']],
+ 'columns': ['key']
+ }
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_series(tags={'region': 'us-west'}),
+ ['cpu_load_short,host=server01,region=us-west'])
+
+ @raises(Exception)
+ def test_get_list_series_fails(self):
+ """Test get a list of series from the database but fail."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_series()
def test_create_retention_policy_default(self):
+ """Test create default ret policy for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
@@ -491,16 +863,17 @@ def test_create_retention_policy_default(self):
self.assertEqual(
m.last_request.qs['q'][0],
- 'create retention policy somename on '
- 'db duration 1d replication 4 default'
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s default'
)
def test_create_retention_policy(self):
+ """Test create retention policy for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
@@ -510,16 +883,60 @@ def test_create_retention_policy(self):
self.assertEqual(
m.last_request.qs['q'][0],
- 'create retention policy somename on '
- 'db duration 1d replication 4'
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s'
+ )
+
+ def test_create_retention_policy_shard_duration(self):
+ """Test create retention policy with a custom shard duration."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename2', '1d', 4, database='db',
+ shard_duration='1h'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename2" on '
+ '"db" duration 1d replication 4 shard duration 1h'
+ )
+
+ def test_create_retention_policy_shard_duration_default(self):
+ """Test create retention policy with a default shard duration."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.create_retention_policy(
+ 'somename3', '1d', 4, database='db',
+ shard_duration='1h', default=True
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename3" on '
+ '"db" duration 1d replication 4 shard duration 1h '
+ 'default'
)
def test_alter_retention_policy(self):
+ """Test alter retention policy for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
@@ -528,14 +945,22 @@ def test_alter_retention_policy(self):
duration='4d')
self.assertEqual(
m.last_request.qs['q'][0],
- 'alter retention policy somename on db duration 4d'
+ 'alter retention policy "somename" on "db" duration 4d'
)
# Test alter replication
self.cli.alter_retention_policy('somename', 'db',
replication=4)
self.assertEqual(
m.last_request.qs['q'][0],
- 'alter retention policy somename on db replication 4'
+ 'alter retention policy "somename" on "db" replication 4'
+ )
+
+ # Test alter shard duration
+ self.cli.alter_retention_policy('somename', 'db',
+ shard_duration='1h')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" shard duration 1h'
)
# Test alter default
@@ -543,16 +968,41 @@ def test_alter_retention_policy(self):
default=True)
self.assertEqual(
m.last_request.qs['q'][0],
- 'alter retention policy somename on db default'
+ 'alter retention policy "somename" on "db" default'
)
@raises(Exception)
def test_alter_retention_policy_invalid(self):
+ """Test invalid alter ret policy for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 400):
self.cli.alter_retention_policy('somename', 'db')
+ def test_drop_retention_policy(self):
+ """Test drop retention policy for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.drop_retention_policy('somename', 'db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop retention policy "somename" on "db"'
+ )
+
+ @raises(Exception)
+ def test_drop_retention_policy_fails(self):
+ """Test failed drop ret policy for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'delete', 401):
+ cli.drop_retention_policy('default', 'db')
+
def test_get_list_retention_policies(self):
+ """Test get retention policies for TestInfluxDBClient object."""
example_response = \
'{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
' "columns": ["name", "duration", "replicaN"]}]}]}'
@@ -564,27 +1014,30 @@ def test_get_list_retention_policies(self):
text=example_response
)
self.assertListEqual(
- self.cli.get_list_retention_policies(),
+ self.cli.get_list_retention_policies("db"),
[{'duration': '24h0m0s',
'name': 'fsfdsdf', 'replicaN': 2}]
)
@mock.patch('requests.Session.request')
def test_request_retry(self, mock_request):
- """Tests that two connection errors will be handled"""
-
+ """Test that two connection errors will be handled."""
class CustomMock(object):
- i = 0
+ """Create custom mock object for test."""
+
+ def __init__(self):
+ self.i = 0
def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
self.i += 1
if self.i < 3:
raise requests.exceptions.ConnectionError
- else:
- r = requests.Response()
- r.status_code = 204
- return r
+
+ r = requests.Response()
+ r.status_code = 204
+ return r
mock_request.side_effect = CustomMock().connection_error
@@ -595,16 +1048,19 @@ def connection_error(self, *args, **kwargs):
@mock.patch('requests.Session.request')
def test_request_retry_raises(self, mock_request):
- """Tests that three connection errors will not be handled"""
-
+ """Test that three requests errors will not be handled."""
class CustomMock(object):
- i = 0
+ """Create custom mock object for test."""
+
+ def __init__(self):
+ self.i = 0
def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
self.i += 1
if self.i < 4:
- raise requests.exceptions.ConnectionError
+ raise requests.exceptions.HTTPError
else:
r = requests.Response()
r.status_code = 200
@@ -614,10 +1070,67 @@ def connection_error(self, *args, **kwargs):
cli = InfluxDBClient(database='db')
+ with self.assertRaises(requests.exceptions.HTTPError):
+ cli.write_points(self.dummy_points)
+
+ @mock.patch('requests.Session.request')
+ def test_random_request_retry(self, mock_request):
+ """Test that a random number of connection errors will be handled."""
+ class CustomMock(object):
+ """Create custom mock object for test."""
+
+ def __init__(self, retries):
+ self.i = 0
+ self.retries = retries
+
+ def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
+ self.i += 1
+
+ if self.i < self.retries:
+ raise requests.exceptions.ConnectionError
+ else:
+ r = requests.Response()
+ r.status_code = 204
+ return r
+
+ retries = random.randint(1, 5)
+ mock_request.side_effect = CustomMock(retries).connection_error
+
+ cli = InfluxDBClient(database='db', retries=retries)
+ cli.write_points(self.dummy_points)
+
+ @mock.patch('requests.Session.request')
+ def test_random_request_retry_raises(self, mock_request):
+ """Test a random number of conn errors plus one will not be handled."""
+ class CustomMock(object):
+ """Create custom mock object for test."""
+
+ def __init__(self, retries):
+ self.i = 0
+ self.retries = retries
+
+ def connection_error(self, *args, **kwargs):
+ """Handle a connection error for the CustomMock object."""
+ self.i += 1
+
+ if self.i < self.retries + 1:
+ raise requests.exceptions.ConnectionError
+ else:
+ r = requests.Response()
+ r.status_code = 200
+ return r
+
+ retries = random.randint(1, 5)
+ mock_request.side_effect = CustomMock(retries).connection_error
+
+ cli = InfluxDBClient(database='db', retries=retries)
+
with self.assertRaises(requests.exceptions.ConnectionError):
cli.write_points(self.dummy_points)
def test_get_list_users(self):
+ """Test get users for TestInfluxDBClient object."""
example_response = (
'{"results":[{"series":[{"columns":["user","admin"],'
'"values":[["test",false]]}]}]}'
@@ -636,6 +1149,7 @@ def test_get_list_users(self):
)
def test_get_list_users_empty(self):
+ """Test get empty userlist for TestInfluxDBClient object."""
example_response = (
'{"results":[{"series":[{"columns":["user","admin"]}]}]}'
)
@@ -648,12 +1162,37 @@ def test_get_list_users_empty(self):
self.assertListEqual(self.cli.get_list_users(), [])
+ def test_grant_admin_privileges(self):
+ """Test grant admin privs for TestInfluxDBClient object."""
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.grant_admin_privileges('test')
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'grant all privileges to "test"'
+ )
+
+ @raises(Exception)
+ def test_grant_admin_privileges_invalid(self):
+ """Test grant invalid admin privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.grant_admin_privileges('')
+
def test_revoke_admin_privileges(self):
+ """Test revoke admin privs for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
@@ -661,21 +1200,23 @@ def test_revoke_admin_privileges(self):
self.assertEqual(
m.last_request.qs['q'][0],
- 'revoke all privileges from test'
+ 'revoke all privileges from "test"'
)
@raises(Exception)
def test_revoke_admin_privileges_invalid(self):
+ """Test revoke invalid admin privs for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 400):
self.cli.revoke_admin_privileges('')
def test_grant_privilege(self):
+ """Test grant privs for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
@@ -683,21 +1224,23 @@ def test_grant_privilege(self):
self.assertEqual(
m.last_request.qs['q'][0],
- 'grant read on testdb to test'
+ 'grant read on "testdb" to "test"'
)
@raises(Exception)
def test_grant_privilege_invalid(self):
+ """Test grant invalid privs for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 400):
self.cli.grant_privilege('', 'testdb', 'test')
def test_revoke_privilege(self):
+ """Test revoke privs for TestInfluxDBClient object."""
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
- requests_mock.GET,
+ requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
@@ -705,26 +1248,305 @@ def test_revoke_privilege(self):
self.assertEqual(
m.last_request.qs['q'][0],
- 'revoke read on testdb from test'
+ 'revoke read on "testdb" from "test"'
)
@raises(Exception)
def test_revoke_privilege_invalid(self):
+ """Test revoke invalid privs for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 400):
self.cli.revoke_privilege('', 'testdb', 'test')
+ def test_get_list_privileges(self):
+ """Test get list of privs for TestInfluxDBClient object."""
+ data = {'results': [
+ {'series': [
+ {'columns': ['database', 'privilege'],
+ 'values': [
+ ['db1', 'READ'],
+ ['db2', 'ALL PRIVILEGES'],
+ ['db3', 'NO PRIVILEGES']]}
+ ]}
+ ]}
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_privileges('test'),
+ [{'database': 'db1', 'privilege': 'READ'},
+ {'database': 'db2', 'privilege': 'ALL PRIVILEGES'},
+ {'database': 'db3', 'privilege': 'NO PRIVILEGES'}]
+ )
+
+ @raises(Exception)
+ def test_get_list_privileges_fails(self):
+ """Test failed get list of privs for TestInfluxDBClient object."""
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_privileges('test')
+
+ def test_get_list_continuous_queries(self):
+ """Test getting a list of continuous queries."""
+ data = {
+ "results": [
+ {
+ "statement_id": 0,
+ "series": [
+ {
+ "name": "testdb01",
+ "columns": ["name", "query"],
+ "values": [["testname01", "testquery01"],
+ ["testname02", "testquery02"]]
+ },
+ {
+ "name": "testdb02",
+ "columns": ["name", "query"],
+ "values": [["testname03", "testquery03"]]
+ },
+ {
+ "name": "testdb03",
+ "columns": ["name", "query"]
+ }
+ ]
+ }
+ ]
+ }
+
+ with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ self.cli.get_list_continuous_queries(),
+ [
+ {
+ 'testdb01': [
+ {'name': 'testname01', 'query': 'testquery01'},
+ {'name': 'testname02', 'query': 'testquery02'}
+ ]
+ },
+ {
+ 'testdb02': [
+ {'name': 'testname03', 'query': 'testquery03'}
+ ]
+ },
+ {
+ 'testdb03': []
+ }
+ ]
+ )
+
+ @raises(Exception)
+ def test_get_list_continuous_queries_fails(self):
+ """Test failing to get a list of continuous queries."""
+ with _mocked_session(self.cli, 'get', 400):
+ self.cli.get_list_continuous_queries()
+
+ def test_create_continuous_query(self):
+ """Test continuous query creation."""
+ data = {"results": [{}]}
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=json.dumps(data)
+ )
+ query = 'SELECT count("value") INTO "6_months"."events" FROM ' \
+ '"events" GROUP BY time(10m)'
+ self.cli.create_continuous_query('cq_name', query, 'db_name')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create continuous query "cq_name" on "db_name" begin select '
+ 'count("value") into "6_months"."events" from "events" group '
+ 'by time(10m) end'
+ )
+ self.cli.create_continuous_query('cq_name', query, 'db_name',
+ 'EVERY 10s FOR 2m')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create continuous query "cq_name" on "db_name" resample '
+ 'every 10s for 2m begin select count("value") into '
+ '"6_months"."events" from "events" group by time(10m) end'
+ )
+
+ @raises(Exception)
+ def test_create_continuous_query_fails(self):
+ """Test failing to create a continuous query."""
+ with _mocked_session(self.cli, 'get', 400):
+ self.cli.create_continuous_query('cq_name', 'select', 'db_name')
+
+ def test_drop_continuous_query(self):
+ """Test dropping a continuous query."""
+ data = {"results": [{}]}
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=json.dumps(data)
+ )
+ self.cli.drop_continuous_query('cq_name', 'db_name')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop continuous query "cq_name" on "db_name"'
+ )
+
+ @raises(Exception)
+ def test_drop_continuous_query_fails(self):
+ """Test failing to drop a continuous query."""
+ with _mocked_session(self.cli, 'get', 400):
+ self.cli.drop_continuous_query('cq_name', 'db_name')
+
+ def test_invalid_port_fails(self):
+ """Test invalid port fail for TestInfluxDBClient object."""
+ with self.assertRaises(ValueError):
+ InfluxDBClient('host', '80/redir', 'username', 'password')
+
+ def test_chunked_response(self):
+ """Test chunked response for TestInfluxDBClient object."""
+ example_response = \
+ u'{"results":[{"statement_id":0,"series":[{"columns":["key"],' \
+ '"values":[["cpu"],["memory"],["iops"],["network"]],"partial":' \
+ 'true}],"partial":true}]}\n{"results":[{"statement_id":0,' \
+ '"series":[{"columns":["key"],"values":[["qps"],["uptime"],' \
+ '["df"],["mount"]]}]}]}\n'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ response = self.cli.query('show series',
+ chunked=True, chunk_size=4)
+ res = list(response)
+ self.assertTrue(len(res) == 2)
+ self.assertEqual(res[0].__repr__(), ResultSet(
+ {'series': [{
+ 'columns': ['key'],
+ 'values': [['cpu'], ['memory'], ['iops'], ['network']]
+ }]}).__repr__())
+ self.assertEqual(res[1].__repr__(), ResultSet(
+ {'series': [{
+ 'columns': ['key'],
+ 'values': [['qps'], ['uptime'], ['df'], ['mount']]
+ }]}).__repr__())
+
+ def test_auth_default(self):
+ """Test auth with default settings."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient()
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic cm9vdDpyb290")
+
+ def test_auth_username_password(self):
+ """Test auth with custom username and password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username='my-username',
+ password='my-password')
+ cli.ping()
+
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "Basic bXktdXNlcm5hbWU6bXktcGFzc3dvcmQ=")
+
+ def test_auth_username_password_none(self):
+ """Test auth with not defined username or password."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(username=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ cli = InfluxDBClient(password=None)
+ cli.ping()
+ self.assertFalse('Authorization' in m.last_request.headers)
+
+ def test_auth_token(self):
+ """Test auth with custom authorization header."""
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/ping",
+ status_code=204,
+ headers={'X-Influxdb-Version': '1.2.3'}
+ )
+
+ cli = InfluxDBClient(username=None, password=None,
+ headers={"Authorization": "my-token"})
+ cli.ping()
+ self.assertEqual(m.last_request.headers["Authorization"],
+ "my-token")
+
+ def test_custom_socket_options(self):
+ """Test custom socket options."""
+ test_socket_options = HTTPConnection.default_socket_options + \
+ [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60),
+ (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 15)]
+
+ cli = InfluxDBClient(username=None, password=None,
+ socket_options=test_socket_options)
+
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ test_socket_options)
+ self.assertEquals(cli._session.adapters.get("http://").poolmanager.
+ connection_pool_kw.get("socket_options"),
+ test_socket_options)
+
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options, test_socket_options)
+
+ def test_none_socket_options(self):
+ """Test default socket options."""
+ cli = InfluxDBClient(username=None, password=None)
+ self.assertEquals(cli._session.adapters.get("http://").socket_options,
+ None)
+ connection_pool = cli._session.adapters.get("http://").poolmanager \
+ .connection_from_url(
+ url="http://localhost:8086")
+ new_connection = connection_pool._new_conn()
+ self.assertEquals(new_connection.socket_options,
+ HTTPConnection.default_socket_options)
+
class FakeClient(InfluxDBClient):
+ """Set up a fake client instance of InfluxDBClient."""
def __init__(self, *args, **kwargs):
+ """Initialize an instance of the FakeClient object."""
super(FakeClient, self).__init__(*args, **kwargs)
def query(self,
query,
- params={},
+ params=None,
expected_response_code=200,
database=None):
+ """Query data from the FakeClient object."""
if query == 'Fail':
raise Exception("Fail")
elif query == 'Fail once' and self._host == 'host1':
@@ -733,107 +1555,3 @@ def query(self,
raise Exception("Fail Twice")
else:
return "Success"
-
-
-class TestInfluxDBClusterClient(unittest.TestCase):
-
- def setUp(self):
- # By default, raise exceptions on warnings
- warnings.simplefilter('error', FutureWarning)
-
- self.hosts = [('host1', 8086), ('host2', 8086), ('host3', 8086)]
- self.dsn_string = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db'
-
- def test_init(self):
- cluster = InfluxDBClusterClient(hosts=self.hosts,
- username='username',
- password='password',
- database='database',
- shuffle=False,
- client_base_class=FakeClient)
- self.assertEqual(3, len(cluster.hosts))
- self.assertEqual(0, len(cluster.bad_hosts))
- self.assertIn((cluster._client._host,
- cluster._client._port), cluster.hosts)
-
- def test_one_server_fails(self):
- cluster = InfluxDBClusterClient(hosts=self.hosts,
- database='database',
- shuffle=False,
- client_base_class=FakeClient)
- self.assertEqual('Success', cluster.query('Fail once'))
- self.assertEqual(2, len(cluster.hosts))
- self.assertEqual(1, len(cluster.bad_hosts))
-
- def test_two_servers_fail(self):
- cluster = InfluxDBClusterClient(hosts=self.hosts,
- database='database',
- shuffle=False,
- client_base_class=FakeClient)
- self.assertEqual('Success', cluster.query('Fail twice'))
- self.assertEqual(1, len(cluster.hosts))
- self.assertEqual(2, len(cluster.bad_hosts))
-
- def test_all_fail(self):
- cluster = InfluxDBClusterClient(hosts=self.hosts,
- database='database',
- shuffle=True,
- client_base_class=FakeClient)
- with self.assertRaises(InfluxDBServerError):
- cluster.query('Fail')
- self.assertEqual(0, len(cluster.hosts))
- self.assertEqual(3, len(cluster.bad_hosts))
-
- def test_all_good(self):
- cluster = InfluxDBClusterClient(hosts=self.hosts,
- database='database',
- shuffle=True,
- client_base_class=FakeClient)
- self.assertEqual('Success', cluster.query(''))
- self.assertEqual(3, len(cluster.hosts))
- self.assertEqual(0, len(cluster.bad_hosts))
-
- def test_recovery(self):
- cluster = InfluxDBClusterClient(hosts=self.hosts,
- database='database',
- shuffle=True,
- client_base_class=FakeClient)
- with self.assertRaises(InfluxDBServerError):
- cluster.query('Fail')
- self.assertEqual('Success', cluster.query(''))
- self.assertEqual(1, len(cluster.hosts))
- self.assertEqual(2, len(cluster.bad_hosts))
-
- def test_dsn(self):
- cli = InfluxDBClusterClient.from_DSN(self.dsn_string)
- self.assertEqual([('host1', 8086), ('host2', 8086)], cli.hosts)
- self.assertEqual('http://host1:8086', cli._client._baseurl)
- self.assertEqual('uSr', cli._client._username)
- self.assertEqual('pWd', cli._client._password)
- self.assertEqual('db', cli._client._database)
- self.assertFalse(cli._client.use_udp)
-
- cli = InfluxDBClusterClient.from_DSN('udp+' + self.dsn_string)
- self.assertTrue(cli._client.use_udp)
-
- cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string)
- self.assertEqual('https://host1:8086', cli._client._baseurl)
-
- cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string,
- **{'ssl': False})
- self.assertEqual('http://host1:8086', cli._client._baseurl)
-
- def test_dsn_password_caps(self):
- cli = InfluxDBClusterClient.from_DSN(
- 'https+influxdb://usr:pWd@host:8086/db')
- self.assertEqual('pWd', cli._client._password)
-
- def test_dsn_mixed_scheme_case(self):
- cli = InfluxDBClusterClient.from_DSN(
- 'hTTps+inFLUxdb://usr:pWd@host:8086/db')
- self.assertEqual('pWd', cli._client._password)
- self.assertEqual('https://host:8086', cli._client._baseurl)
-
- cli = InfluxDBClusterClient.from_DSN(
- 'uDP+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db')
- self.assertTrue(cli._client.use_udp)
diff --git a/influxdb/tests/dataframe_client_test.py b/influxdb/tests/dataframe_client_test.py
index 0583ffba..87b8e0d8 100644
--- a/influxdb/tests/dataframe_client_test.py
+++ b/influxdb/tests/dataframe_client_test.py
@@ -1,39 +1,142 @@
# -*- coding: utf-8 -*-
-"""
-unit tests for misc module
-"""
-from .client_test import _mocked_session
+"""Unit tests for misc module."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from datetime import timedelta
-import unittest
import json
+import unittest
+import warnings
import requests_mock
+
from nose.tools import raises
-from datetime import timedelta
-from influxdb.tests import skipIfPYpy, using_pypy
-import warnings
+from influxdb.tests import skip_if_pypy, using_pypy
+
+from .client_test import _mocked_session
if not using_pypy:
import pandas as pd
from pandas.util.testing import assert_frame_equal
from influxdb import DataFrameClient
+ import numpy as np
-@skipIfPYpy
+@skip_if_pypy
class TestDataFrameClient(unittest.TestCase):
+ """Set up a test DataFrameClient object."""
def setUp(self):
+ """Instantiate a TestDataFrameClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
def test_write_points_from_dataframe(self):
+ """Test write points from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
expected = (
- b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n"
- b"foo column_one=\"2\",column_three=2.0,column_two=2i "
+ b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
+ b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_write_points_with_whitespace_measurement(self):
+ """write_points should escape white space in measurements."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"meas\\ with\\ space "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 0\n"
+ b"meas\\ with\\ space "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, 'meas with space')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_write_points_with_whitespace_in_column_names(self):
+ """write_points should escape white space in column names."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column one", "column two",
+ "column three"])
+ expected = (
+ b"foo column\\ one=\"1\",column\\ two=1i,column\\ three=1.0 0\n"
+ b"foo column\\ one=\"2\",column\\ two=2i,column\\ three=2.0 "
+ b"3600000000000\n"
+ )
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_none(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", None, 1.0], ["2", 2.0, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_three=1.0 0\n"
+ b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_line_of_none(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[[None, None, None], ["2", 2.0, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
b"3600000000000\n"
)
@@ -50,7 +153,32 @@ def test_write_points_from_dataframe(self):
cli.write_points(dataframe, 'foo', tags=None)
self.assertEqual(m.last_request.body, expected)
+ def test_write_points_from_dataframe_with_all_none(self):
+ """Test write points from df in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[[None, None, None], [None, None, None]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
def test_write_points_from_dataframe_in_batches(self):
+ """Test write points in batch from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
@@ -64,7 +192,183 @@ def test_write_points_from_dataframe_in_batches(self):
cli = DataFrameClient(database='db')
self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1))
+ def test_write_points_from_dataframe_with_tag_columns(self):
+ """Test write points from df w/tag in TestDataFrameClient object."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
+ ['red', 0, "2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'], tags=None)
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self):
+ """Test write points from df w/tag + cols in TestDataFrameClient."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
+ ['red', 0, "2", 2, 2.0]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,global_tag=value,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 "
+ b"0\n"
+ b"foo,global_tag=value,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'],
+ tags={'global_tag': 'value'})
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tag_cols_and_defaults(self):
+ """Test default write points from df w/tag in TestDataFrameClient."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0, 'hot'],
+ ['red', 0, "2", 2, 2.0, 'cold']],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three",
+ "tag_three"])
+ expected_tags_and_fields = (
+ b"foo,tag_one=blue "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ expected_tags_no_fields = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0,"
+ b"tag_three=\"hot\" 0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0,"
+ b"tag_three=\"cold\" 3600000000000\n"
+ )
+
+ expected_fields_no_tags = (
+ b"foo,tag_one=blue,tag_three=hot,tag_two=1 "
+ b"column_one=\"1\",column_two=1i,column_three=1.0 "
+ b"0\n"
+ b"foo,tag_one=red,tag_three=cold,tag_two=0 "
+ b"column_one=\"2\",column_two=2i,column_three=2.0 "
+ b"3600000000000\n"
+ )
+
+ expected_no_tags_no_fields = (
+ b"foo "
+ b"tag_one=\"blue\",tag_two=1i,column_one=\"1\","
+ b"column_two=1i,column_three=1.0,tag_three=\"hot\" "
+ b"0\n"
+ b"foo "
+ b"tag_one=\"red\",tag_two=0i,column_one=\"2\","
+ b"column_two=2i,column_three=2.0,tag_three=\"cold\" "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo',
+ field_columns=['column_one', 'column_two'],
+ tag_columns=['tag_one'])
+ self.assertEqual(m.last_request.body, expected_tags_and_fields)
+
+ cli.write_points(dataframe, 'foo',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected_tags_no_fields)
+
+ cli.write_points(dataframe, 'foo',
+ field_columns=['column_one', 'column_two',
+ 'column_three'])
+ self.assertEqual(m.last_request.body, expected_fields_no_tags)
+
+ cli.write_points(dataframe, 'foo')
+ self.assertEqual(m.last_request.body, expected_no_tags_no_fields)
+
+ def test_write_points_from_dataframe_with_tag_escaped(self):
+ """Test write points from df w/escaped tag in TestDataFrameClient."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(
+ data=[
+ ['blue orange', "1", 1, 'hot=cold'], # space, equal
+ ['red,green', "2", 2, r'cold\fire'], # comma, backslash
+ ['some', "2", 2, ''], # skip empty
+ ['some', "2", 2, None], # skip None
+ ['', "2", 2, None], # all tags empty
+ ],
+ index=pd.period_range(now, freq='H', periods=5),
+ columns=["tag_one", "column_one", "column_two", "tag_three"]
+ )
+
+ expected_escaped_tags = (
+ b"foo,tag_one=blue\\ orange,tag_three=hot\\=cold "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red\\,green,tag_three=cold\\\\fire "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ b"foo,tag_one=some "
+ b"column_one=\"2\",column_two=2i "
+ b"7200000000000\n"
+ b"foo,tag_one=some "
+ b"column_one=\"2\",column_two=2i "
+ b"10800000000000\n"
+ b"foo "
+ b"column_one=\"2\",column_two=2i "
+ b"14400000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, 'foo',
+ field_columns=['column_one', 'column_two'],
+ tag_columns=['tag_one', 'tag_three'])
+ self.assertEqual(m.last_request.body, expected_escaped_tags)
+
def test_write_points_from_dataframe_with_numeric_column_names(self):
+ """Test write points from df with numeric cols."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
# df with numeric column names
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
@@ -85,15 +389,136 @@ def test_write_points_from_dataframe_with_numeric_column_names(self):
self.assertEqual(m.last_request.body, expected)
+ def test_write_points_from_dataframe_with_leading_none_column(self):
+ """write_points detect erroneous leading comma for null first field."""
+ dataframe = pd.DataFrame(
+ dict(
+ first=[1, None, None, 8, 9],
+ second=[2, None, None, None, 10],
+ third=[3, 4.1, None, None, 11],
+ first_tag=["one", None, None, "eight", None],
+ second_tag=["two", None, None, None, None],
+ third_tag=["three", "four", None, None, None],
+ comment=[
+ "All columns filled",
+ "First two of three empty",
+ "All empty",
+ "Last two of three empty",
+ "Empty tags with values",
+ ]
+ ),
+ index=pd.date_range(
+ start=pd.to_datetime('2018-01-01'),
+ freq='1D',
+ periods=5,
+ )
+ )
+ expected = (
+ b'foo,first_tag=one,second_tag=two,third_tag=three'
+ b' comment="All columns filled",first=1.0,second=2.0,third=3.0'
+ b' 1514764800000000000\n'
+ b'foo,third_tag=four'
+ b' comment="First two of three empty",third=4.1'
+ b' 1514851200000000000\n'
+ b'foo comment="All empty" 1514937600000000000\n'
+ b'foo,first_tag=eight'
+ b' comment="Last two of three empty",first=8.0'
+ b' 1515024000000000000\n'
+ b'foo'
+ b' comment="Empty tags with values",first=9.0,second=10.0'
+ b',third=11.0'
+ b' 1515110400000000000\n'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ colnames = [
+ "first_tag",
+ "second_tag",
+ "third_tag",
+ "comment",
+ "first",
+ "second",
+ "third"
+ ]
+ cli.write_points(dataframe.loc[:, colnames], 'foo',
+ tag_columns=[
+ "first_tag",
+ "second_tag",
+ "third_tag"])
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_numeric_precision(self):
+ """Test write points from df with numeric precision."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ # df with numeric column names
+ dataframe = pd.DataFrame(data=[["1", 1, 1.1111111111111],
+ ["2", 2, 2.2222222222222]],
+ index=[now, now + timedelta(hours=1)])
+
+ if np.lib.NumpyVersion(np.__version__) <= '1.13.3':
+ expected_default_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
+ )
+ else:
+ expected_default_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n' # noqa E501 line too long
+ )
+
+ expected_specified_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.2222 3600000000000\n'
+ )
+
+ expected_full_precision = (
+ b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
+ b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n'
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"})
+
+ print(expected_default_precision)
+ print(m.last_request.body)
+
+ self.assertEqual(m.last_request.body, expected_default_precision)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"},
+ numeric_precision=4)
+
+ self.assertEqual(m.last_request.body, expected_specified_precision)
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(dataframe, "foo", {"hello": "there"},
+ numeric_precision='full')
+
+ self.assertEqual(m.last_request.body, expected_full_precision)
+
def test_write_points_from_dataframe_with_period_index(self):
+ """Test write points from df with period index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[pd.Period('1970-01-01'),
pd.Period('1970-01-02')],
columns=["column_one", "column_two",
"column_three"])
+
expected = (
- b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n"
- b"foo column_one=\"2\",column_three=2.0,column_two=2i "
+ b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
+ b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
b"86400000000000\n"
)
@@ -108,6 +533,7 @@ def test_write_points_from_dataframe_with_period_index(self):
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_time_precision(self):
+ """Test write points from df with time precision."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
@@ -125,54 +551,55 @@ def test_write_points_from_dataframe_with_time_precision(self):
cli.write_points(dataframe, measurement, time_precision='h')
self.assertEqual(m.last_request.qs['precision'], ['h'])
self.assertEqual(
- b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo '
- b'column_one="2",column_three=2.0,column_two=2i 1\n',
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 1\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='m')
self.assertEqual(m.last_request.qs['precision'], ['m'])
self.assertEqual(
- b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo '
- b'column_one="2",column_three=2.0,column_two=2i 60\n',
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 60\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='s')
self.assertEqual(m.last_request.qs['precision'], ['s'])
self.assertEqual(
- b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo '
- b'column_one="2",column_three=2.0,column_two=2i 3600\n',
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 3600\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='ms')
self.assertEqual(m.last_request.qs['precision'], ['ms'])
self.assertEqual(
- b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo '
- b'column_one="2",column_three=2.0,column_two=2i 3600000\n',
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 3600000\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='u')
self.assertEqual(m.last_request.qs['precision'], ['u'])
self.assertEqual(
- b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo '
- b'column_one="2",column_three=2.0,column_two=2i 3600000000\n',
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
+ b'column_one="2",column_two=2i,column_three=2.0 3600000000\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='n')
self.assertEqual(m.last_request.qs['precision'], ['n'])
self.assertEqual(
- b'foo column_one="1",column_three=1.0,column_two=1i 0\n'
- b'foo column_one="2",column_three=2.0,column_two=2i '
+ b'foo column_one="1",column_two=1i,column_three=1.0 0\n'
+ b'foo column_one="2",column_two=2i,column_three=2.0 '
b'3600000000000\n',
m.last_request.body,
)
@raises(TypeError)
def test_write_points_from_dataframe_fails_without_time_index(self):
+ """Test failed write points from df without time index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
columns=["column_one", "column_two",
"column_three"])
@@ -187,6 +614,7 @@ def test_write_points_from_dataframe_fails_without_time_index(self):
@raises(TypeError)
def test_write_points_from_dataframe_fails_with_series(self):
+ """Test failed write points from df with series."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.Series(data=[1.0, 2.0],
index=[now, now + timedelta(hours=1)])
@@ -199,14 +627,257 @@ def test_write_points_from_dataframe_fails_with_series(self):
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo")
+ def test_create_database(self):
+ """Test create database for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.create_database('new_db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "new_db"'
+ )
+
+ def test_create_numeric_named_database(self):
+ """Test create db w/numeric name for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.create_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create database "123"'
+ )
+
+ @raises(Exception)
+ def test_create_database_fails(self):
+ """Test create database fail for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with _mocked_session(cli, 'post', 401):
+ cli.create_database('new_db')
+
+ def test_drop_database(self):
+ """Test drop database for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.drop_database('new_db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "new_db"'
+ )
+
+ def test_drop_measurement(self):
+ """Test drop measurement for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.drop_measurement('new_measurement')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop measurement "new_measurement"'
+ )
+
+ def test_drop_numeric_named_database(self):
+ """Test drop numeric db for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text='{"results":[{}]}'
+ )
+ cli.drop_database('123')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop database "123"'
+ )
+
+ @raises(Exception)
+ def test_get_list_database_fails(self):
+ """Test get list of dbs fail for TestInfluxDBClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 401):
+ cli.get_list_database()
+
+ def test_get_list_measurements(self):
+ """Test get list of measurements for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ data = {
+ "results": [{
+ "series": [
+ {"name": "measurements",
+ "columns": ["name"],
+ "values": [["cpu"], ["disk"]
+ ]}]}
+ ]
+ }
+
+ with _mocked_session(cli, 'get', 200, json.dumps(data)):
+ self.assertListEqual(
+ cli.get_list_measurements(),
+ [{'name': 'cpu'}, {'name': 'disk'}]
+ )
+
+ def test_create_retention_policy_default(self):
+ """Test create default ret policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ cli.create_retention_policy(
+ 'somename', '1d', 4, default=True, database='db'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s default'
+ )
+
+ def test_create_retention_policy(self):
+ """Test create retention policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ cli.create_retention_policy(
+ 'somename', '1d', 4, database='db'
+ )
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'create retention policy "somename" on '
+ '"db" duration 1d replication 4 shard duration 0s'
+ )
+
+ def test_alter_retention_policy(self):
+ """Test alter retention policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ # Test alter duration
+ cli.alter_retention_policy('somename', 'db',
+ duration='4d')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" duration 4d'
+ )
+ # Test alter replication
+ cli.alter_retention_policy('somename', 'db',
+ replication=4)
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" replication 4'
+ )
+
+ # Test alter shard duration
+ cli.alter_retention_policy('somename', 'db',
+ shard_duration='1h')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" shard duration 1h'
+ )
+
+ # Test alter default
+ cli.alter_retention_policy('somename', 'db',
+ default=True)
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'alter retention policy "somename" on "db" default'
+ )
+
+ @raises(Exception)
+ def test_alter_retention_policy_invalid(self):
+ """Test invalid alter ret policy for TestInfluxDBClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ cli.alter_retention_policy('somename', 'db')
+
+ def test_drop_retention_policy(self):
+ """Test drop retention policy for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ cli.drop_retention_policy('somename', 'db')
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'drop retention policy "somename" on "db"'
+ )
+
+ @raises(Exception)
+ def test_drop_retention_policy_fails(self):
+ """Test failed drop ret policy for TestInfluxDBClient object."""
+ cli = DataFrameClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'delete', 401):
+ cli.drop_retention_policy('default', 'db')
+
+ def test_get_list_retention_policies(self):
+ """Test get retention policies for TestInfluxDBClient object."""
+ cli = DataFrameClient(database='db')
+ example_response = \
+ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
+ ' "columns": ["name", "duration", "replicaN"]}]}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.assertListEqual(
+ cli.get_list_retention_policies("db"),
+ [{'duration': '24h0m0s',
+ 'name': 'fsfdsdf', 'replicaN': 2}]
+ )
+
def test_query_into_dataframe(self):
+ """Test query into df for TestDataFrameClient object."""
data = {
"results": [{
"series": [
{"measurement": "network",
"tags": {"direction": ""},
"columns": ["time", "value"],
- "values":[["2009-11-10T23:00:00Z", 23422]]
+ "values": [["2009-11-10T23:00:00Z", 23422]]
},
{"measurement": "network",
"tags": {"direction": "in"},
@@ -222,13 +893,15 @@ def test_query_into_dataframe(self):
pd1 = pd.DataFrame(
[[23422]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
- pd1.index = pd1.index.tz_localize('UTC')
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
pd2 = pd.DataFrame(
[[23422], [23422], [23422]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z",
"2009-11-10T23:00:00Z",
"2009-11-10T23:00:00Z"]))
- pd2.index = pd2.index.tz_localize('UTC')
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
expected = {
('network', (('direction', ''),)): pd1,
('network', (('direction', 'in'),)): pd2
@@ -240,56 +913,162 @@ def test_query_into_dataframe(self):
for k in expected:
assert_frame_equal(expected[k], result[k])
- def test_query_with_empty_result(self):
+ def test_multiquery_into_dataframe(self):
+ """Test multiquery into df for TestDataFrameClient object."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value"],
+ "values": [
+ ["2015-01-29T21:55:43.702900257Z", 0.55],
+ ["2015-01-29T21:55:43.702900257Z", 23422],
+ ["2015-06-11T20:46:02Z", 0.64]
+ ]
+ }
+ ]
+ }, {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "count"],
+ "values": [
+ ["1970-01-01T00:00:00Z", 3]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ pd1 = pd.DataFrame(
+ [[0.55], [23422.0], [0.64]], columns=['value'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
+ pd2 = pd.DataFrame(
+ [[3]], columns=['count'],
+ index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
+ expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}]
+
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
- with _mocked_session(cli, 'GET', 200, {"results": [{}]}):
- result = cli.query('select column_one from foo;')
- self.assertEqual(result, {})
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;"\
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params)
+ for r, e in zip(result, expected):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
- def test_list_series(self):
- response = {
- 'results': [
- {'series': [
- {
- 'columns': ['host'],
- 'measurement': 'cpu',
- 'values': [
- ['server01']]
- },
- {
- 'columns': [
- 'host',
- 'region'
- ],
- 'measurement': 'network',
- 'values': [
- [
- 'server01',
- 'us-west'
- ],
- [
- 'server01',
- 'us-east'
+ def test_multiquery_into_dataframe_dropna(self):
+ """Test multiquery into df for TestDataFrameClient object."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "value2", "value3"],
+ "values": [
+ ["2015-01-29T21:55:43.702900257Z",
+ 0.55, 0.254, np.NaN],
+ ["2015-01-29T21:55:43.702900257Z",
+ 23422, 122878, np.NaN],
+ ["2015-06-11T20:46:02Z",
+ 0.64, 0.5434, np.NaN]
+ ]
+ }
+ ]
+ }, {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "count"],
+ "values": [
+ ["1970-01-01T00:00:00Z", 3]
]
- ]
- }
- ]}
+ }
+ ]
+ }
]
}
- expected = {
- 'cpu': pd.DataFrame([['server01']], columns=['host']),
- 'network': pd.DataFrame(
- [['server01', 'us-west'], ['server01', 'us-east']],
- columns=['host', 'region'])}
+ pd1 = pd.DataFrame(
+ [[0.55, 0.254, np.NaN],
+ [23422.0, 122878, np.NaN],
+ [0.64, 0.5434, np.NaN]],
+ columns=['value', 'value2', 'value3'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+
+ if pd1.index.tzinfo is None:
+ pd1.index = pd1.index.tz_localize('UTC')
+
+ pd1_dropna = pd.DataFrame(
+ [[0.55, 0.254], [23422.0, 122878], [0.64, 0.5434]],
+ columns=['value', 'value2'],
+ index=pd.to_datetime([
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-01-29 21:55:43.702900257+0000",
+ "2015-06-11 20:46:02+0000"]))
+
+ if pd1_dropna.index.tzinfo is None:
+ pd1_dropna.index = pd1_dropna.index.tz_localize('UTC')
+
+ pd2 = pd.DataFrame(
+ [[3]], columns=['count'],
+ index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))
+
+ if pd2.index.tzinfo is None:
+ pd2.index = pd2.index.tz_localize('UTC')
+
+ expected_dropna_true = [
+ {'cpu_load_short': pd1_dropna},
+ {'cpu_load_short': pd2}]
+ expected_dropna_false = [
+ {'cpu_load_short': pd1},
+ {'cpu_load_short': pd2}]
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+
+ for dropna in [True, False]:
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params, dropna=dropna)
+ expected = \
+ expected_dropna_true if dropna else expected_dropna_false
+ for r, e in zip(result, expected):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+
+ # test default value (dropna = True)
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params)
+ for r, e in zip(result, expected_dropna_true):
+ for k in e:
+ assert_frame_equal(e[k], r[k])
+ def test_query_with_empty_result(self):
+ """Test query with empty results in TestDataFrameClient object."""
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
- with _mocked_session(cli, 'GET', 200, response):
- series = cli.get_list_series()
- assert_frame_equal(series['cpu'], expected['cpu'])
- assert_frame_equal(series['network'], expected['network'])
+ with _mocked_session(cli, 'GET', 200, {"results": [{}]}):
+ result = cli.query('select column_one from foo;')
+ self.assertEqual(result, {})
def test_get_list_database(self):
+ """Test get list of databases in TestDataFrameClient object."""
data = {'results': [
{'series': [
{'measurement': 'databases',
@@ -307,6 +1086,7 @@ def test_get_list_database(self):
)
def test_datetime_to_epoch(self):
+ """Test convert datetime to epoch in TestDataFrameClient object."""
timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
@@ -338,3 +1118,231 @@ def test_datetime_to_epoch(self):
cli._datetime_to_epoch(timestamp, time_precision='n'),
1356998400000000000.0
)
+
+ def test_dsn_constructor(self):
+ """Test data source name deconstructor in TestDataFrameClient."""
+ client = DataFrameClient.from_dsn('influxdb://localhost:8086')
+ self.assertIsInstance(client, DataFrameClient)
+ self.assertEqual('http://localhost:8086', client._baseurl)
+
+ def test_write_points_from_dataframe_with_nan_line(self):
+ """Test write points from dataframe with Nan lines."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i 0\n"
+ b"foo column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='line')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='line')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_nan_json(self):
+ """Test write points from json with NaN lines."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[["1", 1, np.inf], ["2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["column_one", "column_two",
+ "column_three"])
+ expected = (
+ b"foo column_one=\"1\",column_two=1i 0\n"
+ b"foo column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='json')
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='json')
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tags_and_nan_line(self):
+ """Test write points from dataframe with NaN lines and tags."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf],
+ ['red', 0, "2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='line',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='line',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_write_points_from_dataframe_with_tags_and_nan_json(self):
+ """Test write points from json with NaN lines and tags."""
+ now = pd.Timestamp('1970-01-01 00:00+00:00')
+ dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, np.inf],
+ ['red', 0, "2", 2, np.nan]],
+ index=[now, now + timedelta(hours=1)],
+ columns=["tag_one", "tag_two", "column_one",
+ "column_two", "column_three"])
+ expected = (
+ b"foo,tag_one=blue,tag_two=1 "
+ b"column_one=\"1\",column_two=1i "
+ b"0\n"
+ b"foo,tag_one=red,tag_two=0 "
+ b"column_one=\"2\",column_two=2i "
+ b"3600000000000\n"
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204)
+
+ cli = DataFrameClient(database='db')
+
+ cli.write_points(dataframe, 'foo', protocol='json',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ cli.write_points(dataframe, 'foo', tags=None, protocol='json',
+ tag_columns=['tag_one', 'tag_two'])
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_query_custom_index(self):
+ """Test query with custom indexes."""
+ data = {
+ "results": [
+ {
+ "series": [
+ {
+ "name": "cpu_load_short",
+ "columns": ["time", "value", "host"],
+ "values": [
+ [1, 0.55, "local"],
+ [2, 23422, "local"],
+ [3, 0.64, "local"]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
+ iql = "SELECT value FROM cpu_load_short WHERE region=$region;" \
+ "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+ bind_params = {'region': 'us-west'}
+ with _mocked_session(cli, 'GET', 200, data):
+ result = cli.query(iql, bind_params=bind_params,
+ data_frame_index=["time", "host"])
+
+ _data_frame = result['cpu_load_short']
+ print(_data_frame)
+
+ self.assertListEqual(["time", "host"],
+ list(_data_frame.index.names))
+
+ def test_dataframe_nanosecond_precision(self):
+ """Test nanosecond precision."""
+ for_df_dict = {
+ "nanFloats": [1.1, float('nan'), 3.3, 4.4],
+ "onlyFloats": [1.1, 2.2, 3.3, 4.4],
+ "strings": ['one_one', 'two_two', 'three_three', 'four_four']
+ }
+ df = pd.DataFrame.from_dict(for_df_dict)
+ df['time'] = ['2019-10-04 06:27:19.850557111+00:00',
+ '2019-10-04 06:27:19.850557184+00:00',
+ '2019-10-04 06:27:42.251396864+00:00',
+ '2019-10-04 06:27:42.251396974+00:00']
+ df['time'] = pd.to_datetime(df['time'], unit='ns')
+ df = df.set_index('time')
+
+ expected = (
+ b'foo nanFloats=1.1,onlyFloats=1.1,strings="one_one" 1570170439850557111\n' # noqa E501 line too long
+ b'foo onlyFloats=2.2,strings="two_two" 1570170439850557184\n' # noqa E501 line too long
+ b'foo nanFloats=3.3,onlyFloats=3.3,strings="three_three" 1570170462251396864\n' # noqa E501 line too long
+ b'foo nanFloats=4.4,onlyFloats=4.4,strings="four_four" 1570170462251396974\n' # noqa E501 line too long
+ )
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ self.assertEqual(m.last_request.body, expected)
+
+ def test_dataframe_nanosecond_precision_one_microsecond(self):
+ """Test nanosecond precision within one microsecond."""
+ # 1 microsecond = 1000 nanoseconds
+ start = np.datetime64('2019-10-04T06:27:19.850557000')
+ end = np.datetime64('2019-10-04T06:27:19.850558000')
+
+ # generate timestamps with nanosecond precision
+ timestamps = np.arange(
+ start,
+ end + np.timedelta64(1, 'ns'),
+ np.timedelta64(1, 'ns')
+ )
+ # generate values
+ values = np.arange(0.0, len(timestamps))
+
+ df = pd.DataFrame({'value': values}, index=timestamps)
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.POST,
+ "http://localhost:8086/write",
+ status_code=204
+ )
+
+ cli = DataFrameClient(database='db')
+ cli.write_points(df, 'foo', time_precision='n')
+
+ lines = m.last_request.body.decode('utf-8').split('\n')
+ self.assertEqual(len(lines), 1002)
+
+ for index, line in enumerate(lines):
+ if index == 1001:
+ self.assertEqual(line, '')
+ continue
+ self.assertEqual(
+ line,
+ f"foo value={index}.0 157017043985055{7000 + index:04}"
+ )
diff --git a/influxdb/tests/helper_test.py b/influxdb/tests/helper_test.py
index 444b36ef..6737f921 100644
--- a/influxdb/tests/helper_test.py
+++ b/influxdb/tests/helper_test.py
@@ -1,4 +1,12 @@
# -*- coding: utf-8 -*-
+"""Set of series helper functions for test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from datetime import datetime, timedelta
import unittest
import warnings
@@ -9,9 +17,11 @@
class TestSeriesHelper(unittest.TestCase):
+ """Define the SeriesHelper test object."""
@classmethod
def setUpClass(cls):
+ """Set up the TestSeriesHelper object."""
super(TestSeriesHelper, cls).setUpClass()
TestSeriesHelper.client = InfluxDBClient(
@@ -23,8 +33,11 @@ def setUpClass(cls):
)
class MySeriesHelper(SeriesHelper):
+ """Define a SeriesHelper object."""
class Meta:
+ """Define metadata for the SeriesHelper object."""
+
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['some_stat']
@@ -34,13 +47,31 @@ class Meta:
TestSeriesHelper.MySeriesHelper = MySeriesHelper
+ def setUp(self):
+ """Check that MySeriesHelper has empty datapoints."""
+ super(TestSeriesHelper, self).setUp()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper in teardown did not empty datapoints.')
+
+ def tearDown(self):
+ """Deconstruct the TestSeriesHelper object."""
+ super(TestSeriesHelper, self).tearDown()
+ TestSeriesHelper.MySeriesHelper._reset_()
+ self.assertEqual(
+ TestSeriesHelper.MySeriesHelper._json_body_(),
+ [],
+ 'Resetting helper did not empty datapoints.')
+
def test_auto_commit(self):
- """
- Tests that write_points is called after the right number of events
- """
+ """Test write_points called after valid number of events."""
class AutoCommitTest(SeriesHelper):
+ """Define a SeriesHelper instance to test autocommit."""
class Meta:
+ """Define metadata for AutoCommitTest."""
+
series_name = 'events.stats.{server_name}'
fields = ['some_stat']
tags = ['server_name', 'other_tag']
@@ -58,10 +89,10 @@ class Meta:
AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg')
self.assertTrue(fake_write_points.called)
- def testSingleSeriesName(self):
- """
- Tests JSON conversion when there is only one series name.
- """
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSingleSeriesName(self, current_timestamp):
+ """Test JSON conversion when there is only one series name."""
+ current_timestamp.return_value = current_date = datetime.today()
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello', some_stat=159)
TestSeriesHelper.MySeriesHelper(
@@ -80,6 +111,7 @@ def testSingleSeriesName(self):
"fields": {
"some_stat": 159
},
+ "time": current_date,
},
{
"measurement": "events.stats.us.east-1",
@@ -90,6 +122,7 @@ def testSingleSeriesName(self):
"fields": {
"some_stat": 158
},
+ "time": current_date,
},
{
"measurement": "events.stats.us.east-1",
@@ -100,6 +133,7 @@ def testSingleSeriesName(self):
"fields": {
"some_stat": 157
},
+ "time": current_date,
},
{
"measurement": "events.stats.us.east-1",
@@ -110,6 +144,7 @@ def testSingleSeriesName(self):
"fields": {
"some_stat": 156
},
+ "time": current_date,
}
]
@@ -117,17 +152,12 @@ def testSingleSeriesName(self):
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
- '_json_body_ for one series name: {}.'.format(rcvd))
- TestSeriesHelper.MySeriesHelper._reset_()
- self.assertEqual(
- TestSeriesHelper.MySeriesHelper._json_body_(),
- [],
- 'Resetting helper did not empty datapoints.')
+ '_json_body_ for one series name: {0}.'.format(rcvd))
- def testSeveralSeriesNames(self):
- '''
- Tests JSON conversion when there is only one series name.
- '''
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSeveralSeriesNames(self, current_timestamp):
+ """Test JSON conversion when there are multiple series names."""
+ current_timestamp.return_value = current_date = datetime.today()
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', some_stat=159, other_tag='ello')
TestSeriesHelper.MySeriesHelper(
@@ -145,7 +175,8 @@ def testSeveralSeriesNames(self):
'tags': {
'other_tag': 'ello',
'server_name': 'lu.lux'
- }
+ },
+ "time": current_date,
},
{
'fields': {
@@ -155,7 +186,8 @@ def testSeveralSeriesNames(self):
'tags': {
'other_tag': 'ello',
'server_name': 'uk.london'
- }
+ },
+ "time": current_date,
},
{
'fields': {
@@ -165,7 +197,8 @@ def testSeveralSeriesNames(self):
'tags': {
'other_tag': 'ello',
'server_name': 'fr.paris-10'
- }
+ },
+ "time": current_date,
},
{
'fields': {
@@ -175,7 +208,8 @@ def testSeveralSeriesNames(self):
'tags': {
'other_tag': 'ello',
'server_name': 'us.east-1'
- }
+ },
+ "time": current_date,
}
]
@@ -183,49 +217,127 @@ def testSeveralSeriesNames(self):
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
- '_json_body_ for several series names: {}.'
+ '_json_body_ for several series names: {0}.'
.format(rcvd))
- TestSeriesHelper.MySeriesHelper._reset_()
- self.assertEqual(
- TestSeriesHelper.MySeriesHelper._json_body_(),
- [],
- 'Resetting helper did not empty datapoints.')
+
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSeriesWithoutTimeField(self, current_timestamp):
+ """Test that time is optional on a series without a time field."""
+ current_date = datetime.today()
+ yesterday = current_date - timedelta(days=1)
+ current_timestamp.return_value = yesterday
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=159, time=current_date
+ )
+ TestSeriesHelper.MySeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=158,
+ )
+ point1, point2 = TestSeriesHelper.MySeriesHelper._json_body_()
+ self.assertTrue('time' in point1 and 'time' in point2)
+ self.assertEqual(point1['time'], current_date)
+ self.assertEqual(point2['time'], yesterday)
+
+ def testSeriesWithoutAllTags(self):
+ """Test that creating a data point without a tag throws an error."""
+ class MyTimeFieldSeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 5
+ autocommit = True
+
+ self.assertRaises(NameError, MyTimeFieldSeriesHelper,
+ **{"server_name": 'us.east-1',
+ "some_stat": 158})
+
+ @mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
+ def testSeriesWithTimeField(self, current_timestamp):
+ """Test that time is optional on a series with a time field."""
+ current_date = datetime.today()
+ yesterday = current_date - timedelta(days=1)
+ current_timestamp.return_value = yesterday
+
+ class MyTimeFieldSeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = TestSeriesHelper.client
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 5
+ autocommit = True
+
+ MyTimeFieldSeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=159, time=current_date
+ )
+ MyTimeFieldSeriesHelper(
+ server_name='us.east-1', other_tag='ello',
+ some_stat=158,
+ )
+ point1, point2 = MyTimeFieldSeriesHelper._json_body_()
+ self.assertTrue('time' in point1 and 'time' in point2)
+ self.assertEqual(point1['time'], current_date)
+ self.assertEqual(point2['time'], yesterday)
def testInvalidHelpers(self):
- '''
- Tests errors in invalid helpers.
- '''
+ """Test errors in invalid helpers."""
class MissingMeta(SeriesHelper):
+ """Define instance of SeriesHelper for missing meta."""
+
pass
class MissingClient(SeriesHelper):
+ """Define SeriesHelper for missing client data."""
class Meta:
+ """Define metadat for MissingClient."""
+
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
autocommit = True
class MissingSeriesName(SeriesHelper):
+ """Define instance of SeriesHelper for missing series."""
class Meta:
+ """Define metadata for MissingSeriesName."""
+
fields = ['time', 'server_name']
class MissingFields(SeriesHelper):
+ """Define instance of SeriesHelper for missing fields."""
class Meta:
+ """Define metadata for MissingFields."""
+
series_name = 'events.stats.{server_name}'
+ class InvalidTimePrecision(SeriesHelper):
+ """Define instance of SeriesHelper for invalid time precision."""
+
+ class Meta:
+ """Define metadata for InvalidTimePrecision."""
+
+ series_name = 'events.stats.{server_name}'
+ time_precision = "ks"
+ fields = ['time', 'server_name']
+ autocommit = True
+
for cls in [MissingMeta, MissingClient, MissingFields,
- MissingSeriesName]:
+ MissingSeriesName, InvalidTimePrecision]:
self.assertRaises(
AttributeError, cls, **{'time': 159,
'server_name': 'us.east-1'})
@unittest.skip("Fails on py32")
def testWarnBulkSizeZero(self):
- """
- Tests warning for an invalid bulk size.
- """
+ """Test warning for an invalid bulk size."""
class WarnBulkSizeZero(SeriesHelper):
class Meta:
@@ -245,18 +357,19 @@ class Meta:
# the warning only.
pass
self.assertEqual(len(w), 1,
- '{} call should have generated one warning.'
+ '{0} call should have generated one warning.'
.format(WarnBulkSizeZero))
self.assertIn('forced to 1', str(w[-1].message),
'Warning message did not contain "forced to 1".')
def testWarnBulkSizeNoEffect(self):
- """
- Tests warning for a set bulk size but autocommit False.
- """
+ """Test warning for a set bulk size but autocommit False."""
class WarnBulkSizeNoEffect(SeriesHelper):
+ """Define SeriesHelper for warning on bulk size."""
class Meta:
+ """Define metadat for WarnBulkSizeNoEffect."""
+
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
@@ -267,7 +380,58 @@ class Meta:
warnings.simplefilter("always")
WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
self.assertEqual(len(w), 1,
- '{} call should have generated one warning.'
+ '{0} call should have generated one warning.'
.format(WarnBulkSizeNoEffect))
self.assertIn('has no affect', str(w[-1].message),
'Warning message did not contain "has not affect".')
+
+ def testSeriesWithRetentionPolicy(self):
+ """Test that the data is saved with the specified retention policy."""
+ my_policy = 'my_policy'
+
+ class RetentionPolicySeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = InfluxDBClient()
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 2
+ autocommit = True
+ retention_policy = my_policy
+
+ fake_write_points = mock.MagicMock()
+ RetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='gg')
+ RetentionPolicySeriesHelper._client.write_points = fake_write_points
+ RetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=158, other_tag='aa')
+
+ kall = fake_write_points.call_args
+ args, kwargs = kall
+ self.assertTrue('retention_policy' in kwargs)
+ self.assertEqual(kwargs['retention_policy'], my_policy)
+
+ def testSeriesWithoutRetentionPolicy(self):
+ """Test that the data is saved without any retention policy."""
+ class NoRetentionPolicySeriesHelper(SeriesHelper):
+
+ class Meta:
+ client = InfluxDBClient()
+ series_name = 'events.stats.{server_name}'
+ fields = ['some_stat', 'time']
+ tags = ['server_name', 'other_tag']
+ bulk_size = 2
+ autocommit = True
+
+ fake_write_points = mock.MagicMock()
+ NoRetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=159, other_tag='gg')
+ NoRetentionPolicySeriesHelper._client.write_points = fake_write_points
+ NoRetentionPolicySeriesHelper(
+ server_name='us.east-1', some_stat=158, other_tag='aa')
+
+ kall = fake_write_points.call_args
+ args, kwargs = kall
+ self.assertTrue('retention_policy' in kwargs)
+ self.assertEqual(kwargs['retention_policy'], None)
diff --git a/influxdb/tests/influxdb08/__init__.py b/influxdb/tests/influxdb08/__init__.py
index 40a96afc..0e79ed1c 100644
--- a/influxdb/tests/influxdb08/__init__.py
+++ b/influxdb/tests/influxdb08/__init__.py
@@ -1 +1,2 @@
# -*- coding: utf-8 -*-
+"""Define the influxdb08 test package."""
diff --git a/influxdb/tests/influxdb08/client_test.py b/influxdb/tests/influxdb08/client_test.py
index 343f1d22..39ab52d6 100644
--- a/influxdb/tests/influxdb08/client_test.py
+++ b/influxdb/tests/influxdb08/client_test.py
@@ -1,30 +1,33 @@
# -*- coding: utf-8 -*-
-"""
-unit tests
-"""
+"""Client unit tests."""
+
import json
-import requests
-import requests.exceptions
import socket
+import sys
import unittest
-import requests_mock
import random
-from nose.tools import raises
-from mock import patch
import warnings
+
import mock
+import requests
+import requests.exceptions
+import requests_mock
+
+from nose.tools import raises
+from mock import patch
from influxdb.influxdb08 import InfluxDBClient
from influxdb.influxdb08.client import session
-import sys
if sys.version < '3':
import codecs
def u(x):
+ """Test codec."""
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
+ """Test codec."""
return x
@@ -36,10 +39,10 @@ def _build_response_object(status_code=200, content=""):
def _mocked_session(method="GET", status_code=200, content=""):
-
method = method.upper()
def request(*args, **kwargs):
+ """Define a request for the _mocked_session."""
c = content
# Check method
@@ -73,8 +76,10 @@ def request(*args, **kwargs):
class TestInfluxDBClient(unittest.TestCase):
+ """Define a TestInfluxDBClient object."""
def setUp(self):
+ """Set up a TestInfluxDBClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
@@ -92,6 +97,7 @@ def setUp(self):
self.dsn_string = 'influxdb://uSr:pWd@host:1886/db'
def test_scheme(self):
+ """Test database scheme for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
self.assertEqual(cli._baseurl, 'http://host:8086')
@@ -101,41 +107,46 @@ def test_scheme(self):
self.assertEqual(cli._baseurl, 'https://host:8086')
def test_dsn(self):
- cli = InfluxDBClient.from_DSN(self.dsn_string)
+ """Test datasource name for TestInfluxDBClient object."""
+ cli = InfluxDBClient.from_dsn(self.dsn_string)
self.assertEqual('http://host:1886', cli._baseurl)
self.assertEqual('uSr', cli._username)
self.assertEqual('pWd', cli._password)
self.assertEqual('db', cli._database)
- self.assertFalse(cli.use_udp)
+ self.assertFalse(cli._use_udp)
- cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string)
- self.assertTrue(cli.use_udp)
+ cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
+ self.assertTrue(cli._use_udp)
- cli = InfluxDBClient.from_DSN('https+' + self.dsn_string)
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
self.assertEqual('https://host:1886', cli._baseurl)
- cli = InfluxDBClient.from_DSN('https+' + self.dsn_string,
+ cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
**{'ssl': False})
self.assertEqual('http://host:1886', cli._baseurl)
def test_switch_database(self):
+ """Test switch database for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_database('another_database')
self.assertEqual(cli._database, 'another_database')
@raises(FutureWarning)
def test_switch_db_deprecated(self):
+ """Test deprecated switch database for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_db('another_database')
self.assertEqual(cli._database, 'another_database')
def test_switch_user(self):
+ """Test switch user for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_user('another_username', 'another_password')
self.assertEqual(cli._username, 'another_username')
self.assertEqual(cli._password, 'another_password')
def test_write(self):
+ """Test write to database for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -164,6 +175,7 @@ def test_write(self):
)
def test_write_points(self):
+ """Test write points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -181,6 +193,7 @@ def test_write_points(self):
)
def test_write_points_string(self):
+ """Test write string points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -198,6 +211,7 @@ def test_write_points_string(self):
)
def test_write_points_batch(self):
+ """Test write batch points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
@@ -207,6 +221,7 @@ def test_write_points_batch(self):
self.assertEqual(1, m.call_count)
def test_write_points_batch_invalid_size(self):
+ """Test write batch points invalid size for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
@@ -216,6 +231,7 @@ def test_write_points_batch_invalid_size(self):
self.assertEqual(1, m.call_count)
def test_write_points_batch_multiple_series(self):
+ """Test write points batch multiple series."""
dummy_points = [
{"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
["4", 4, 4.0], ["5", 5, 5.0]],
@@ -240,6 +256,7 @@ def test_write_points_batch_multiple_series(self):
self.assertEqual(expected_last_body, m.request_history[4].json())
def test_write_points_udp(self):
+ """Test write points UDP for TestInfluxDBClient object."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
port = random.randint(4000, 8000)
s.bind(('0.0.0.0', port))
@@ -256,6 +273,7 @@ def test_write_points_udp(self):
json.loads(received_data.decode(), strict=True))
def test_write_bad_precision_udp(self):
+ """Test write UDP w/bad precision."""
cli = InfluxDBClient(
'localhost', 8086, 'root', 'root',
'test', use_udp=True, udp_port=4444
@@ -272,16 +290,19 @@ def test_write_bad_precision_udp(self):
@raises(Exception)
def test_write_points_fails(self):
+ """Test failed write points for TestInfluxDBClient object."""
with _mocked_session('post', 500):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.write_points([])
def test_write_points_with_precision(self):
+ """Test write points with precision."""
with _mocked_session('post', 200, self.dummy_points):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.write_points(self.dummy_points))
def test_write_points_bad_precision(self):
+ """Test write points with bad precision."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
@@ -294,11 +315,13 @@ def test_write_points_bad_precision(self):
@raises(Exception)
def test_write_points_with_precision_fails(self):
+ """Test write points where precision fails."""
with _mocked_session('post', 500):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.write_points_with_precision([])
def test_delete_points(self):
+ """Test delete points for TestInfluxDBClient object."""
with _mocked_session('delete', 204) as mocked:
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.delete_points("foo"))
@@ -312,26 +335,31 @@ def test_delete_points(self):
@raises(Exception)
def test_delete_points_with_wrong_name(self):
+ """Test delete points with wrong name."""
with _mocked_session('delete', 400):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_points("nonexist")
@raises(NotImplementedError)
def test_create_scheduled_delete(self):
+ """Test create scheduled deletes."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.create_scheduled_delete([])
@raises(NotImplementedError)
def test_get_list_scheduled_delete(self):
+ """Test get schedule list of deletes TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.get_list_scheduled_delete()
@raises(NotImplementedError)
def test_remove_scheduled_delete(self):
+ """Test remove scheduled delete TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.remove_scheduled_delete(1)
def test_query(self):
+ """Test query for TestInfluxDBClient object."""
data = [
{
"name": "foo",
@@ -348,6 +376,7 @@ def test_query(self):
self.assertEqual(len(result[0]['points']), 4)
def test_query_chunked(self):
+ """Test chunked query for TestInfluxDBClient object."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
@@ -380,6 +409,7 @@ def test_query_chunked(self):
)
def test_query_chunked_unicode(self):
+ """Test unicode chunked query for TestInfluxDBClient object."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
@@ -410,11 +440,13 @@ def test_query_chunked_unicode(self):
@raises(Exception)
def test_query_fail(self):
+ """Test failed query for TestInfluxDBClient."""
with _mocked_session('get', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.query('select column_one from foo;')
def test_query_bad_precision(self):
+ """Test query with bad precision for TestInfluxDBClient."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
@@ -423,28 +455,33 @@ def test_query_bad_precision(self):
cli.query('select column_one from foo', time_precision='g')
def test_create_database(self):
+ """Test create database for TestInfluxDBClient."""
with _mocked_session('post', 201, {"name": "new_db"}):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.create_database('new_db'))
@raises(Exception)
def test_create_database_fails(self):
+ """Test failed create database for TestInfluxDBClient."""
with _mocked_session('post', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.create_database('new_db')
def test_delete_database(self):
+ """Test delete database for TestInfluxDBClient."""
with _mocked_session('delete', 204):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.delete_database('old_db'))
@raises(Exception)
def test_delete_database_fails(self):
+ """Test failed delete database for TestInfluxDBClient."""
with _mocked_session('delete', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_database('old_db')
def test_get_list_database(self):
+ """Test get list of databases for TestInfluxDBClient."""
data = [
{"name": "a_db"}
]
@@ -455,12 +492,14 @@ def test_get_list_database(self):
@raises(Exception)
def test_get_list_database_fails(self):
+ """Test failed get list of databases for TestInfluxDBClient."""
with _mocked_session('get', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password')
cli.get_list_database()
@raises(FutureWarning)
def test_get_database_list_deprecated(self):
+ """Test deprecated get database list for TestInfluxDBClient."""
data = [
{"name": "a_db"}
]
@@ -470,17 +509,20 @@ def test_get_database_list_deprecated(self):
self.assertEqual(cli.get_database_list()[0]['name'], 'a_db')
def test_delete_series(self):
+ """Test delete series for TestInfluxDBClient."""
with _mocked_session('delete', 204):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_series('old_series')
@raises(Exception)
def test_delete_series_fails(self):
+ """Test failed delete series for TestInfluxDBClient."""
with _mocked_session('delete', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_series('old_series')
def test_get_series_list(self):
+ """Test get list of series for TestInfluxDBClient."""
cli = InfluxDBClient(database='db')
with requests_mock.Mocker() as m:
@@ -500,6 +542,7 @@ def test_get_series_list(self):
)
def test_get_continuous_queries(self):
+ """Test get continuous queries for TestInfluxDBClient."""
cli = InfluxDBClient(database='db')
with requests_mock.Mocker() as m:
@@ -532,9 +575,11 @@ def test_get_continuous_queries(self):
)
def test_get_list_cluster_admins(self):
+ """Test get list of cluster admins, not implemented."""
pass
def test_add_cluster_admin(self):
+ """Test add cluster admin for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -556,6 +601,7 @@ def test_add_cluster_admin(self):
)
def test_update_cluster_admin_password(self):
+ """Test update cluster admin pass for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -574,6 +620,7 @@ def test_update_cluster_admin_password(self):
)
def test_delete_cluster_admin(self):
+ """Test delete cluster admin for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.DELETE,
@@ -587,12 +634,15 @@ def test_delete_cluster_admin(self):
self.assertIsNone(m.last_request.body)
def test_set_database_admin(self):
+ """Test set database admin for TestInfluxDBClient."""
pass
def test_unset_database_admin(self):
+ """Test unset database admin for TestInfluxDBClient."""
pass
def test_alter_database_admin(self):
+ """Test alter database admin for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -614,25 +664,30 @@ def test_alter_database_admin(self):
@raises(NotImplementedError)
def test_get_list_database_admins(self):
+ """Test get list of database admins for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.get_list_database_admins()
@raises(NotImplementedError)
def test_add_database_admin(self):
+ """Test add database admins for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.add_database_admin('admin', 'admin_secret_password')
@raises(NotImplementedError)
def test_update_database_admin_password(self):
+ """Test update database admin pass for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.update_database_admin_password('admin', 'admin_secret_password')
@raises(NotImplementedError)
def test_delete_database_admin(self):
+ """Test delete database admin for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_database_admin('admin')
def test_get_database_users(self):
+ """Test get database users for TestInfluxDBClient."""
cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db')
example_response = \
@@ -650,6 +705,7 @@ def test_get_database_users(self):
self.assertEqual(json.loads(example_response), users)
def test_add_database_user(self):
+ """Test add database user for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -673,6 +729,7 @@ def test_add_database_user(self):
)
def test_add_database_user_bad_permissions(self):
+ """Test add database user with bad perms for TestInfluxDBClient."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
@@ -686,6 +743,7 @@ def test_add_database_user_bad_permissions(self):
)
def test_alter_database_user_password(self):
+ """Test alter database user pass for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -706,6 +764,7 @@ def test_alter_database_user_password(self):
)
def test_alter_database_user_permissions(self):
+ """Test alter database user perms for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -727,6 +786,7 @@ def test_alter_database_user_permissions(self):
)
def test_alter_database_user_password_and_permissions(self):
+ """Test alter database user pass and perms for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
@@ -750,6 +810,7 @@ def test_alter_database_user_password_and_permissions(self):
)
def test_update_database_user_password_current_user(self):
+ """Test update database user pass for TestInfluxDBClient."""
cli = InfluxDBClient(
username='root',
password='hello',
@@ -769,6 +830,7 @@ def test_update_database_user_password_current_user(self):
self.assertEqual(cli._password, 'bye')
def test_delete_database_user(self):
+ """Test delete database user for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.DELETE,
@@ -782,17 +844,21 @@ def test_delete_database_user(self):
@raises(NotImplementedError)
def test_update_permission(self):
+ """Test update permission for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.update_permission('admin', [])
@mock.patch('requests.Session.request')
def test_request_retry(self, mock_request):
- """Tests that two connection errors will be handled"""
-
+ """Test that two connection errors will be handled."""
class CustomMock(object):
- i = 0
+ """Define CustomMock object."""
+
+ def __init__(self):
+ self.i = 0
def connection_error(self, *args, **kwargs):
+ """Test connection error in CustomMock."""
self.i += 1
if self.i < 3:
@@ -811,12 +877,16 @@ def connection_error(self, *args, **kwargs):
@mock.patch('requests.Session.request')
def test_request_retry_raises(self, mock_request):
- """Tests that three connection errors will not be handled"""
-
+ """Test that three connection errors will not be handled."""
class CustomMock(object):
- i = 0
+ """Define CustomMock object."""
+
+ def __init__(self):
+ """Initialize the object."""
+ self.i = 0
def connection_error(self, *args, **kwargs):
+ """Test the connection error for CustomMock."""
self.i += 1
if self.i < 4:
diff --git a/influxdb/tests/influxdb08/dataframe_client_test.py b/influxdb/tests/influxdb08/dataframe_client_test.py
index 63a10c93..0a766af0 100644
--- a/influxdb/tests/influxdb08/dataframe_client_test.py
+++ b/influxdb/tests/influxdb08/dataframe_client_test.py
@@ -1,32 +1,38 @@
# -*- coding: utf-8 -*-
-"""
-unit tests for misc module
-"""
-from .client_test import _mocked_session
+"""Unit tests for misc module."""
-import unittest
-import json
-import requests_mock
-from nose.tools import raises
from datetime import timedelta
-from influxdb.tests import skipIfPYpy, using_pypy
+
import copy
+import json
+import unittest
import warnings
+import requests_mock
+
+from nose.tools import raises
+
+from influxdb.tests import skip_if_pypy, using_pypy
+
+from .client_test import _mocked_session
+
if not using_pypy:
import pandas as pd
from pandas.util.testing import assert_frame_equal
from influxdb.influxdb08 import DataFrameClient
-@skipIfPYpy
+@skip_if_pypy
class TestDataFrameClient(unittest.TestCase):
+ """Define the DataFramClient test object."""
def setUp(self):
+ """Set up an instance of TestDataFrameClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
def test_write_points_from_dataframe(self):
+ """Test write points from dataframe."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
@@ -53,6 +59,7 @@ def test_write_points_from_dataframe(self):
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_with_float_nan(self):
+ """Test write points from dataframe with NaN float."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]],
index=[now, now + timedelta(hours=1)],
@@ -79,6 +86,7 @@ def test_write_points_from_dataframe_with_float_nan(self):
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_in_batches(self):
+ """Test write points from dataframe in batches."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
@@ -92,6 +100,7 @@ def test_write_points_from_dataframe_in_batches(self):
self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1))
def test_write_points_from_dataframe_with_numeric_column_names(self):
+ """Test write points from dataframe with numeric columns."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
# df with numeric column names
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
@@ -117,6 +126,7 @@ def test_write_points_from_dataframe_with_numeric_column_names(self):
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_with_period_index(self):
+ """Test write points from dataframe with period index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[pd.Period('1970-01-01'),
pd.Period('1970-01-02')],
@@ -143,6 +153,7 @@ def test_write_points_from_dataframe_with_period_index(self):
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_with_time_precision(self):
+ """Test write points from dataframe with time precision."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
@@ -182,6 +193,7 @@ def test_write_points_from_dataframe_with_time_precision(self):
@raises(TypeError)
def test_write_points_from_dataframe_fails_without_time_index(self):
+ """Test write points from dataframe that fails without time index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
columns=["column_one", "column_two",
"column_three"])
@@ -195,6 +207,7 @@ def test_write_points_from_dataframe_fails_without_time_index(self):
@raises(TypeError)
def test_write_points_from_dataframe_fails_with_series(self):
+ """Test failed write points from dataframe with series."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.Series(data=[1.0, 2.0],
index=[now, now + timedelta(hours=1)])
@@ -207,6 +220,7 @@ def test_write_points_from_dataframe_fails_with_series(self):
cli.write_points({"foo": dataframe})
def test_query_into_dataframe(self):
+ """Test query into a dataframe."""
data = [
{
"name": "foo",
@@ -229,6 +243,7 @@ def test_query_into_dataframe(self):
assert_frame_equal(dataframe, result)
def test_query_multiple_time_series(self):
+ """Test query for multiple time series."""
data = [
{
"name": "series1",
@@ -269,12 +284,14 @@ def test_query_multiple_time_series(self):
assert_frame_equal(dataframes[key], result[key])
def test_query_with_empty_result(self):
+ """Test query with empty results."""
with _mocked_session('get', 200, []):
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
result = cli.query('select column_one from foo;')
self.assertEqual(result, [])
def test_list_series(self):
+ """Test list of series for dataframe object."""
response = [
{
'columns': ['time', 'name'],
@@ -288,6 +305,7 @@ def test_list_series(self):
self.assertEqual(series_list, ['seriesA', 'seriesB'])
def test_datetime_to_epoch(self):
+ """Test convert datetime to epoch."""
timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
diff --git a/influxdb/tests/influxdb08/helper_test.py b/influxdb/tests/influxdb08/helper_test.py
index b0a7ef21..2e305f3f 100644
--- a/influxdb/tests/influxdb08/helper_test.py
+++ b/influxdb/tests/influxdb08/helper_test.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+"""Define set of helper functions for the dataframe."""
import unittest
import warnings
@@ -9,9 +10,11 @@
class TestSeriesHelper(unittest.TestCase):
+ """Define the SeriesHelper for test."""
@classmethod
def setUpClass(cls):
+ """Set up an instance of the TestSerisHelper object."""
super(TestSeriesHelper, cls).setUpClass()
TestSeriesHelper.client = InfluxDBClient(
@@ -23,8 +26,11 @@ def setUpClass(cls):
)
class MySeriesHelper(SeriesHelper):
+ """Define a subset SeriesHelper instance."""
class Meta:
+ """Define metadata for the TestSeriesHelper object."""
+
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
@@ -34,12 +40,13 @@ class Meta:
TestSeriesHelper.MySeriesHelper = MySeriesHelper
def test_auto_commit(self):
- """
- Tests that write_points is called after the right number of events
- """
+ """Test that write_points called after the right number of events."""
class AutoCommitTest(SeriesHelper):
+ """Define an instance of SeriesHelper for AutoCommit test."""
class Meta:
+ """Define metadata AutoCommitTest object."""
+
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
@@ -57,9 +64,7 @@ class Meta:
self.assertTrue(fake_write_points.called)
def testSingleSeriesName(self):
- """
- Tests JSON conversion when there is only one series name.
- """
+ """Test JSON conversion when there is only one series name."""
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158)
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157)
@@ -75,7 +80,7 @@ def testSingleSeriesName(self):
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
- '_json_body_ for one series name: {}.'.format(rcvd))
+ '_json_body_ for one series name: {0}.'.format(rcvd))
TestSeriesHelper.MySeriesHelper._reset_()
self.assertEqual(
TestSeriesHelper.MySeriesHelper._json_body_(),
@@ -83,9 +88,7 @@ def testSingleSeriesName(self):
'Resetting helper did not empty datapoints.')
def testSeveralSeriesNames(self):
- '''
- Tests JSON conversion when there is only one series name.
- '''
+ """Test JSON conversion when there is only one series name."""
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158)
TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157)
@@ -107,7 +110,7 @@ def testSeveralSeriesNames(self):
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
- '_json_body_ for several series names: {}.'
+ '_json_body_ for several series names: {0}.'
.format(rcvd))
TestSeriesHelper.MySeriesHelper._reset_()
self.assertEqual(
@@ -116,27 +119,36 @@ def testSeveralSeriesNames(self):
'Resetting helper did not empty datapoints.')
def testInvalidHelpers(self):
- '''
- Tests errors in invalid helpers.
- '''
+ """Test errors in invalid helpers."""
class MissingMeta(SeriesHelper):
+ """Define SeriesHelper object for MissingMeta test."""
+
pass
class MissingClient(SeriesHelper):
+ """Define SeriesHelper object for MissingClient test."""
class Meta:
+ """Define metadata for MissingClient object."""
+
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
autocommit = True
class MissingSeriesName(SeriesHelper):
+ """Define SeriesHelper object for MissingSeries test."""
class Meta:
+ """Define metadata for MissingSeriesName object."""
+
fields = ['time', 'server_name']
class MissingFields(SeriesHelper):
+ """Define SeriesHelper for MissingFields test."""
class Meta:
+ """Define metadata for MissingFields object."""
+
series_name = 'events.stats.{server_name}'
for cls in [MissingMeta, MissingClient, MissingFields,
@@ -146,12 +158,13 @@ class Meta:
'server_name': 'us.east-1'})
def testWarnBulkSizeZero(self):
- """
- Tests warning for an invalid bulk size.
- """
+ """Test warning for an invalid bulk size."""
class WarnBulkSizeZero(SeriesHelper):
+ """Define SeriesHelper for WarnBulkSizeZero test."""
class Meta:
+ """Define metadata for WarnBulkSizeZero object."""
+
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
@@ -167,8 +180,8 @@ class Meta:
self.assertGreaterEqual(
len(rec_warnings), 1,
- '{} call should have generated one warning.'
- 'Actual generated warnings: {}'.format(
+ '{0} call should have generated one warning.'
+ 'Actual generated warnings: {1}'.format(
WarnBulkSizeZero, '\n'.join(map(str, rec_warnings))))
expected_msg = (
@@ -180,12 +193,13 @@ class Meta:
'Warning message did not contain "forced to 1".')
def testWarnBulkSizeNoEffect(self):
- """
- Tests warning for a set bulk size but autocommit False.
- """
+ """Test warning for a set bulk size but autocommit False."""
class WarnBulkSizeNoEffect(SeriesHelper):
+ """Define SeriesHelper for WarnBulkSizeNoEffect object."""
class Meta:
+ """Define metadata for WarnBulkSizeNoEffect object."""
+
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
@@ -197,8 +211,8 @@ class Meta:
self.assertGreaterEqual(
len(rec_warnings), 1,
- '{} call should have generated one warning.'
- 'Actual generated warnings: {}'.format(
+ '{0} call should have generated one warning.'
+ 'Actual generated warnings: {1}'.format(
WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings))))
expected_msg = (
diff --git a/influxdb/tests/misc.py b/influxdb/tests/misc.py
index 6a3857b0..324d13c4 100644
--- a/influxdb/tests/misc.py
+++ b/influxdb/tests/misc.py
@@ -1,10 +1,18 @@
# -*- coding: utf-8 -*-
+"""Define the misc handler for InfluxDBClient test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
import socket
def get_free_ports(num_ports, ip='127.0.0.1'):
- """Get `num_ports` free/available ports on the interface linked to the `ip´
+ """Determine free ports on provided interface.
+
+ Get `num_ports` free/available ports on the interface linked to the `ip`
:param int num_ports: The number of free ports to get
:param str ip: The ip on which the ports have to be taken
:return: a set of ports number
@@ -31,6 +39,7 @@ def get_free_ports(num_ports, ip='127.0.0.1'):
def is_port_open(port, ip='127.0.0.1'):
+ """Check if given TCP port is open for connection."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
result = sock.connect_ex((ip, port))
diff --git a/influxdb/tests/resultset_test.py b/influxdb/tests/resultset_test.py
index ce5fd41a..83faa4dd 100644
--- a/influxdb/tests/resultset_test.py
+++ b/influxdb/tests/resultset_test.py
@@ -1,4 +1,10 @@
# -*- coding: utf-8 -*-
+"""Define the resultset test package."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
import unittest
@@ -7,39 +13,48 @@
class TestResultSet(unittest.TestCase):
+ """Define the ResultSet test object."""
def setUp(self):
+ """Set up an instance of TestResultSet."""
self.query_response = {
"results": [
- {"series": [{"measurement": "cpu_load_short",
- "tags": {"host": "server01",
- "region": "us-west"},
- "columns": ["time", "value"],
- "values": [
- ["2015-01-29T21:51:28.968422294Z", 0.64]
- ]},
- {"measurement": "cpu_load_short",
- "tags": {"host": "server02",
- "region": "us-west"},
- "columns": ["time", "value"],
+ {"series": [{"name": "cpu_load_short",
+ "columns": ["time", "value", "host", "region"],
"values": [
- ["2015-01-29T21:51:28.968422294Z", 0.65]
+ ["2015-01-29T21:51:28.968422294Z",
+ 0.64,
+ "server01",
+ "us-west"],
+ ["2015-01-29T21:51:28.968422294Z",
+ 0.65,
+ "server02",
+ "us-west"],
]},
- {"measurement": "other_serie",
- "tags": {"host": "server01",
- "region": "us-west"},
- "columns": ["time", "value"],
+ {"name": "other_series",
+ "columns": ["time", "value", "host", "region"],
"values": [
- ["2015-01-29T21:51:28.968422294Z", 0.66]
+ ["2015-01-29T21:51:28.968422294Z",
+ 0.66,
+ "server01",
+ "us-west"],
]}]}
]
}
+
self.rs = ResultSet(self.query_response['results'][0])
def test_filter_by_name(self):
+ """Test filtering by name in TestResultSet object."""
expected = [
- {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'},
- {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}
+ {'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'value': 0.65,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server02',
+ 'region': 'us-west'},
]
self.assertEqual(expected, list(self.rs['cpu_load_short']))
@@ -48,9 +63,16 @@ def test_filter_by_name(self):
measurement='cpu_load_short')))
def test_filter_by_tags(self):
+ """Test filter by tags in TestResultSet object."""
expected = [
- {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64},
- {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66}
+ {'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'value': 0.66,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
]
self.assertEqual(
@@ -64,36 +86,48 @@ def test_filter_by_tags(self):
)
def test_filter_by_name_and_tags(self):
+ """Test filter by name and tags in TestResultSet object."""
self.assertEqual(
list(self.rs[('cpu_load_short', {"host": "server01"})]),
- [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}]
+ [{'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'}]
)
self.assertEqual(
list(self.rs[('cpu_load_short', {"region": "us-west"})]),
[
- {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'},
- {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}
+ {'value': 0.64,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'value': 0.65,
+ 'time': '2015-01-29T21:51:28.968422294Z',
+ 'host': 'server02',
+ 'region': 'us-west'},
]
)
def test_keys(self):
+ """Test keys in TestResultSet object."""
self.assertEqual(
self.rs.keys(),
[
- ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}),
- ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}),
- ('other_serie', {'host': 'server01', 'region': 'us-west'})
+ ('cpu_load_short', None),
+ ('other_series', None),
]
)
def test_len(self):
+ """Test length in TestResultSet object."""
self.assertEqual(
len(self.rs),
- 3
+ 2
)
def test_items(self):
+ """Test items in TestResultSet object."""
items = list(self.rs.items())
items_lists = [(item[0], list(item[1])) for item in items]
@@ -101,24 +135,27 @@ def test_items(self):
items_lists,
[
(
- ('cpu_load_short',
- {'host': 'server01', 'region': 'us-west'}),
- [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}]
- ),
- (
- ('cpu_load_short',
- {'host': 'server02', 'region': 'us-west'}),
- [{'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}]
- ),
+ ('cpu_load_short', None),
+ [
+ {'time': '2015-01-29T21:51:28.968422294Z',
+ 'value': 0.64,
+ 'host': 'server01',
+ 'region': 'us-west'},
+ {'time': '2015-01-29T21:51:28.968422294Z',
+ 'value': 0.65,
+ 'host': 'server02',
+ 'region': 'us-west'}]),
(
- ('other_serie',
- {'host': 'server01', 'region': 'us-west'}),
- [{'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z'}]
- )
- ]
+ ('other_series', None),
+ [
+ {'time': '2015-01-29T21:51:28.968422294Z',
+ 'value': 0.66,
+ 'host': 'server01',
+ 'region': 'us-west'}])]
)
def test_point_from_cols_vals(self):
+ """Test points from columns in TestResultSet object."""
cols = ['col1', 'col2']
vals = [1, '2']
@@ -129,6 +166,7 @@ def test_point_from_cols_vals(self):
)
def test_system_query(self):
+ """Test system query capabilities in TestResultSet object."""
rs = ResultSet(
{'series': [
{'values': [['another', '48h0m0s', 3, False],
@@ -156,6 +194,7 @@ def test_system_query(self):
)
def test_resultset_error(self):
+ """Test returning error in TestResultSet object."""
with self.assertRaises(InfluxDBClientError):
ResultSet({
"series": [],
diff --git a/influxdb/tests/server_tests/__init__.py b/influxdb/tests/server_tests/__init__.py
index e69de29b..ce149ab4 100644
--- a/influxdb/tests/server_tests/__init__.py
+++ b/influxdb/tests/server_tests/__init__.py
@@ -0,0 +1 @@
+"""Define the server tests package."""
diff --git a/influxdb/tests/server_tests/base.py b/influxdb/tests/server_tests/base.py
index 7bd17eaf..45a9ec80 100644
--- a/influxdb/tests/server_tests/base.py
+++ b/influxdb/tests/server_tests/base.py
@@ -1,4 +1,10 @@
# -*- coding: utf-8 -*-
+"""Define the base module for server test."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
import sys
@@ -30,41 +36,103 @@ def _setup_influxdb_server(inst):
database='db')
+def _setup_gzip_client(inst):
+ inst.cli = InfluxDBClient('localhost',
+ inst.influxd_inst.http_port,
+ 'root',
+ '',
+ database='db',
+ gzip=True)
+
+
def _teardown_influxdb_server(inst):
remove_tree = sys.exc_info() == (None, None, None)
inst.influxd_inst.close(remove_tree=remove_tree)
class SingleTestCaseWithServerMixin(object):
- ''' A mixin for unittest.TestCase to start an influxdb server instance
+ """Define the single testcase with server mixin.
+
+ A mixin for unittest.TestCase to start an influxdb server instance
in a temporary directory **for each test function/case**
- '''
+ """
# 'influxdb_template_conf' attribute must be set
# on the TestCase class or instance.
- setUp = _setup_influxdb_server
- tearDown = _teardown_influxdb_server
+ @classmethod
+ def setUp(cls):
+ """Set up an instance of the SingleTestCaseWithServerMixin."""
+ _setup_influxdb_server(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
class ManyTestCasesWithServerMixin(object):
- ''' Same than SingleTestCaseWithServerMixin
- but creates a single instance for the whole class.
- Also pre-creates a fresh database: 'db'.
- '''
+ """Define the many testcase with server mixin.
+
+ Same as the SingleTestCaseWithServerMixin but this module creates
+ a single instance for the whole class. Also pre-creates a fresh
+ database: 'db'.
+ """
# 'influxdb_template_conf' attribute must be set on the class itself !
@classmethod
def setUpClass(cls):
+ """Set up an instance of the ManyTestCasesWithServerMixin."""
_setup_influxdb_server(cls)
def setUp(self):
+ """Set up an instance of the ManyTestCasesWithServerMixin."""
self.cli.create_database('db')
@classmethod
def tearDownClass(cls):
+ """Deconstruct an instance of ManyTestCasesWithServerMixin."""
_teardown_influxdb_server(cls)
def tearDown(self):
+ """Deconstruct an instance of ManyTestCasesWithServerMixin."""
self.cli.drop_database('db')
+
+
+class SingleTestCaseWithServerGzipMixin(object):
+ """Define the single testcase with server with gzip client mixin.
+
+ Same as the SingleTestCaseWithServerGzipMixin but the InfluxDBClient has
+ gzip=True
+ """
+
+ @classmethod
+ def setUp(cls):
+ """Set up an instance of the SingleTestCaseWithServerGzipMixin."""
+ _setup_influxdb_server(cls)
+ _setup_gzip_client(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
+
+
+class ManyTestCasesWithServerGzipMixin(object):
+ """Define the many testcase with server with gzip client mixin.
+
+ Same as the ManyTestCasesWithServerMixin but the InfluxDBClient has
+ gzip=True.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ """Set up an instance of the ManyTestCasesWithServerGzipMixin."""
+ _setup_influxdb_server(cls)
+ _setup_gzip_client(cls)
+
+ @classmethod
+ def tearDown(cls):
+ """Tear down an instance of the SingleTestCaseWithServerMixin."""
+ _teardown_influxdb_server(cls)
diff --git a/influxdb/tests/server_tests/client_test_with_server.py b/influxdb/tests/server_tests/client_test_with_server.py
index ee150fa5..a0263243 100644
--- a/influxdb/tests/server_tests/client_test_with_server.py
+++ b/influxdb/tests/server_tests/client_test_with_server.py
@@ -1,16 +1,18 @@
# -*- coding: utf-8 -*-
-"""
-unit tests for checking the good/expected interaction between :
+"""Unit tests for checking the InfluxDB server.
+
+The good/expected interaction between:
+ the python client.. (obviously)
+ and a *_real_* server instance running.
This basically duplicates what's in client_test.py
- but without mocking around every call.
-
+but without mocking around every call.
"""
-
+from __future__ import absolute_import
+from __future__ import division
from __future__ import print_function
+from __future__ import unicode_literals
from functools import partial
import os
@@ -18,15 +20,17 @@
import unittest
import warnings
-# By default, raise exceptions on warnings
-warnings.simplefilter('error', FutureWarning)
-
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
-from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests
+from influxdb.tests import skip_if_pypy, using_pypy, skip_server_tests
from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin
from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin
+from influxdb.tests.server_tests.base import ManyTestCasesWithServerGzipMixin
+from influxdb.tests.server_tests.base import SingleTestCaseWithServerGzipMixin
+
+# By default, raise exceptions on warnings
+warnings.simplefilter('error', FutureWarning)
if not using_pypy:
import pandas as pd
@@ -36,12 +40,16 @@
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
-def point(serie_name, timestamp=None, tags=None, **fields):
- res = {'measurement': serie_name}
+def point(series_name, timestamp=None, tags=None, **fields):
+ """Define what a point looks like."""
+ res = {'measurement': series_name}
+
if timestamp:
res['time'] = timestamp
+
if tags:
res['tags'] = tags
+
res['fields'] = fields
return res
@@ -76,7 +84,7 @@ def point(serie_name, timestamp=None, tags=None, **fields):
]
if not using_pypy:
- dummy_pointDF = {
+ dummy_point_df = {
"measurement": "cpu_load_short",
"tags": {"host": "server01",
"region": "us-west"},
@@ -84,7 +92,7 @@ def point(serie_name, timestamp=None, tags=None, **fields):
[[0.64]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
}
- dummy_pointsDF = [{
+ dummy_points_df = [{
"measurement": "cpu_load_short",
"tags": {"host": "server01", "region": "us-west"},
"dataframe": pd.DataFrame(
@@ -114,16 +122,18 @@ def point(serie_name, timestamp=None, tags=None, **fields):
]
-@skipServerTests
-class SimpleTests(SingleTestCaseWithServerMixin,
- unittest.TestCase):
+@skip_server_tests
+class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase):
+ """Define the class of simple tests."""
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
def test_fresh_server_no_db(self):
+ """Test a fresh server without database."""
self.assertEqual([], self.cli.get_list_database())
def test_create_database(self):
+ """Test create a database."""
self.assertIsNone(self.cli.create_database('new_db_1'))
self.assertIsNone(self.cli.create_database('new_db_2'))
self.assertEqual(
@@ -131,67 +141,53 @@ def test_create_database(self):
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
)
- def test_create_database_fails(self):
- self.assertIsNone(self.cli.create_database('new_db'))
- with self.assertRaises(InfluxDBClientError) as ctx:
- self.cli.create_database('new_db')
- self.assertEqual('database already exists',
- ctx.exception.content)
-
- def test_get_list_series_empty(self):
- rsp = self.cli.get_list_series()
- self.assertEqual([], rsp)
-
- @unittest.skip("Broken as of 0.9.0")
- def test_get_list_series_empty_DF(self):
- rsp = self.cliDF.get_list_series()
- self.assertEqual({}, rsp)
-
def test_drop_database(self):
+ """Test drop a database."""
self.test_create_database()
self.assertIsNone(self.cli.drop_database('new_db_1'))
self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database())
- def test_drop_database_fails(self):
- with self.assertRaises(InfluxDBClientError) as ctx:
- self.cli.drop_database('db')
- self.assertIn('database not found: db',
- ctx.exception.content)
-
def test_query_fail(self):
+ """Test that a query failed."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.query('select column_one from foo')
self.assertIn('database not found: db',
ctx.exception.content)
def test_query_fail_ignore_errors(self):
+ """Test query failed but ignore errors."""
result = self.cli.query('select column_one from foo',
raise_errors=False)
self.assertEqual(result.error, 'database not found: db')
def test_create_user(self):
+ """Test create user."""
self.cli.create_user('test_user', 'secret_password')
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': False},
rsp)
def test_create_user_admin(self):
+ """Test create admin user."""
self.cli.create_user('test_user', 'secret_password', True)
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': True},
rsp)
def test_create_user_blank_password(self):
+ """Test create user with a blank pass."""
self.cli.create_user('test_user', '')
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': False},
rsp)
def test_get_list_users_empty(self):
+ """Test get list of users, but empty."""
rsp = self.cli.get_list_users()
self.assertEqual([], rsp)
def test_get_list_users(self):
+ """Test get list of users."""
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
rsp = self.cli.get_list_users()
@@ -201,47 +197,31 @@ def test_get_list_users(self):
)
def test_create_user_blank_username(self):
+ """Test create blank username."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.create_user('', 'secret_password')
- self.assertEqual(400, ctx.exception.code)
- self.assertIn('{"error":"error parsing query: '
- 'found WITH, expected identifier',
- ctx.exception.content)
- rsp = list(self.cli.query("SHOW USERS")['results'])
- self.assertEqual(rsp, [])
-
- def test_create_user_invalid_username(self):
- with self.assertRaises(InfluxDBClientError) as ctx:
- self.cli.create_user('very invalid', 'secret_password')
- self.assertEqual(400, ctx.exception.code)
- self.assertIn('{"error":"error parsing query: '
- 'found invalid, expected WITH',
+ self.assertIn('username required',
ctx.exception.content)
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertEqual(rsp, [])
def test_drop_user(self):
+ """Test drop a user."""
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
self.cli.drop_user('test')
users = list(self.cli.query("SHOW USERS")['results'])
self.assertEqual(users, [])
def test_drop_user_nonexisting(self):
+ """Test dropping a nonexistent user."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.drop_user('test')
self.assertIn('user not found',
ctx.exception.content)
- def test_drop_user_invalid(self):
- with self.assertRaises(InfluxDBClientError) as ctx:
- self.cli.drop_user('very invalid')
- self.assertEqual(400, ctx.exception.code)
- self.assertIn('{"error":"error parsing query: '
- 'found invalid, expected',
- ctx.exception.content)
-
@unittest.skip("Broken as of 0.9.0")
def test_revoke_admin_privileges(self):
+ """Test revoking admin privs, deprecated as of v0.9.0."""
self.cli.create_user('test', 'test', admin=True)
self.assertEqual([{'user': 'test', 'admin': True}],
self.cli.get_list_users())
@@ -249,20 +229,15 @@ def test_revoke_admin_privileges(self):
self.assertEqual([{'user': 'test', 'admin': False}],
self.cli.get_list_users())
- def test_revoke_admin_privileges_invalid(self):
- with self.assertRaises(InfluxDBClientError) as ctx:
- self.cli.revoke_admin_privileges('')
- self.assertEqual(400, ctx.exception.code)
- self.assertIn('{"error":"error parsing query: ',
- ctx.exception.content)
-
def test_grant_privilege(self):
+ """Test grant privs to user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
self.cli.grant_privilege('all', 'testdb', 'test')
# TODO: when supported by InfluxDB, check if privileges are granted
def test_grant_privilege_invalid(self):
+ """Test grant invalid privs to user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
with self.assertRaises(InfluxDBClientError) as ctx:
@@ -272,12 +247,14 @@ def test_grant_privilege_invalid(self):
ctx.exception.content)
def test_revoke_privilege(self):
+ """Test revoke privs from user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
self.cli.revoke_privilege('all', 'testdb', 'test')
# TODO: when supported by InfluxDB, check if privileges are revoked
def test_revoke_privilege_invalid(self):
+ """Test revoke invalid privs from user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
with self.assertRaises(InfluxDBClientError) as ctx:
@@ -286,20 +263,27 @@ def test_revoke_privilege_invalid(self):
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
+ def test_invalid_port_fails(self):
+ """Test invalid port access fails."""
+ with self.assertRaises(ValueError):
+ InfluxDBClient('host', '80/redir', 'username', 'password')
+
-@skipServerTests
-class CommonTests(ManyTestCasesWithServerMixin,
- unittest.TestCase):
+@skip_server_tests
+class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase):
+ """Define a class to handle common tests for the server."""
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
def test_write(self):
+ """Test write to the server."""
self.assertIs(True, self.cli.write(
{'points': dummy_point},
params={'db': 'db'},
))
def test_write_check_read(self):
+ """Test write and check read of data to server."""
self.test_write()
time.sleep(1)
rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db')
@@ -308,28 +292,35 @@ def test_write_check_read(self):
list(rsp.get_points()))
def test_write_points(self):
+ """Test writing points to the server."""
self.assertIs(True, self.cli.write_points(dummy_point))
- @skipIfPYpy
+ @skip_if_pypy
def test_write_points_DF(self):
+ """Test writing points with dataframe."""
self.assertIs(
True,
self.cliDF.write_points(
- dummy_pointDF['dataframe'],
- dummy_pointDF['measurement'],
- dummy_pointDF['tags']
+ dummy_point_df['dataframe'],
+ dummy_point_df['measurement'],
+ dummy_point_df['tags']
)
)
def test_write_points_check_read(self):
+ """Test writing points and check read back."""
self.test_write_points()
time.sleep(1) # same as test_write_check_read()
rsp = self.cli.query('SELECT * FROM cpu_load_short')
self.assertEqual(
list(rsp),
- [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
- "host": "server01", "region": "us-west"}]]
+ [[
+ {'value': 0.64,
+ 'time': '2009-11-10T23:00:00Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]]
)
rsp2 = list(rsp.get_points())
@@ -338,19 +329,22 @@ def test_write_points_check_read(self):
self.assertEqual(
pt,
- {'time': '2009-11-10T23:00:00Z', 'value': 0.64,
- "host": "server01", "region": "us-west"}
+ {'time': '2009-11-10T23:00:00Z',
+ 'value': 0.64,
+ "host": "server01",
+ "region": "us-west"}
)
@unittest.skip("Broken as of 0.9.0")
def test_write_points_check_read_DF(self):
+ """Test write points and check back with dataframe."""
self.test_write_points_DF()
time.sleep(1) # same as test_write_check_read()
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
assert_frame_equal(
rsp['cpu_load_short'],
- dummy_pointDF['dataframe']
+ dummy_point_df['dataframe']
)
# Query with Tags
@@ -359,18 +353,23 @@ def test_write_points_check_read_DF(self):
assert_frame_equal(
rsp[('cpu_load_short',
(('host', 'server01'), ('region', 'us-west')))],
- dummy_pointDF['dataframe']
+ dummy_point_df['dataframe']
)
def test_write_multiple_points_different_series(self):
+ """Test write multiple points to different series."""
self.assertIs(True, self.cli.write_points(dummy_points))
time.sleep(1)
rsp = self.cli.query('SELECT * FROM cpu_load_short')
lrsp = list(rsp)
self.assertEqual(
- [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
- "host": "server01", "region": "us-west"}]],
+ [[
+ {'value': 0.64,
+ 'time': '2009-11-10T23:00:00Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]],
lrsp
)
@@ -378,33 +377,57 @@ def test_write_multiple_points_different_series(self):
self.assertEqual(
rsp,
- [[{'value': 33, 'time': '2009-11-10T23:01:35Z',
- "host": "server01", "region": "us-west"}]]
+ [[
+ {'value': 33,
+ 'time': '2009-11-10T23:01:35Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]]
+ )
+
+ def test_select_into_as_post(self):
+ """Test SELECT INTO is POSTed."""
+ self.assertIs(True, self.cli.write_points(dummy_points))
+ time.sleep(1)
+ rsp = self.cli.query('SELECT * INTO "newmeas" FROM "memory"')
+ rsp = self.cli.query('SELECT * FROM "newmeas"')
+ lrsp = list(rsp)
+
+ self.assertEqual(
+ lrsp,
+ [[
+ {'value': 33,
+ 'time': '2009-11-10T23:01:35Z',
+ "host": "server01",
+ "region": "us-west"}
+ ]]
)
@unittest.skip("Broken as of 0.9.0")
def test_write_multiple_points_different_series_DF(self):
+ """Test write multiple points using dataframe to different series."""
for i in range(2):
self.assertIs(
True, self.cliDF.write_points(
- dummy_pointsDF[i]['dataframe'],
- dummy_pointsDF[i]['measurement'],
- dummy_pointsDF[i]['tags']))
+ dummy_points_df[i]['dataframe'],
+ dummy_points_df[i]['measurement'],
+ dummy_points_df[i]['tags']))
time.sleep(1)
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
assert_frame_equal(
rsp['cpu_load_short'],
- dummy_pointsDF[0]['dataframe']
+ dummy_points_df[0]['dataframe']
)
rsp = self.cliDF.query('SELECT * FROM memory')
assert_frame_equal(
rsp['memory'],
- dummy_pointsDF[1]['dataframe']
+ dummy_points_df[1]['dataframe']
)
def test_write_points_batch(self):
+ """Test writing points in a batch."""
dummy_points = [
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
@@ -419,7 +442,36 @@ def test_write_points_batch(self):
batch_size=2)
time.sleep(5)
net_in = self.cli.query("SELECT value FROM network "
- "WHERE direction='in'").raw
+ "WHERE direction=$dir",
+ bind_params={'dir': 'in'}
+ ).raw
+ net_out = self.cli.query("SELECT value FROM network "
+ "WHERE direction='out'").raw
+ cpu = self.cli.query("SELECT value FROM cpu_usage").raw
+ self.assertIn(123, net_in['series'][0]['values'][0])
+ self.assertIn(12, net_out['series'][0]['values'][0])
+ self.assertIn(12.34, cpu['series'][0]['values'][0])
+
+ def test_write_points_batch_generator(self):
+ """Test writing points in a batch from a generator."""
+ dummy_points = [
+ {"measurement": "cpu_usage", "tags": {"unit": "percent"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
+ {"measurement": "network", "tags": {"direction": "in"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
+ {"measurement": "network", "tags": {"direction": "out"},
+ "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
+ ]
+ dummy_points_generator = (point for point in dummy_points)
+ self.cli.write_points(points=dummy_points_generator,
+ tags={"host": "server01",
+ "region": "us-west"},
+ batch_size=2)
+ time.sleep(5)
+ net_in = self.cli.query("SELECT value FROM network "
+ "WHERE direction=$dir",
+ bind_params={'dir': 'in'}
+ ).raw
net_out = self.cli.query("SELECT value FROM network "
"WHERE direction='out'").raw
cpu = self.cli.query("SELECT value FROM cpu_usage").raw
@@ -428,10 +480,12 @@ def test_write_points_batch(self):
self.assertIn(12.34, cpu['series'][0]['values'][0])
def test_query(self):
+ """Test querying data back from server."""
self.assertIs(True, self.cli.write_points(dummy_point))
@unittest.skip('Not implemented for 0.9')
def test_query_chunked(self):
+ """Test query for chunked response from server."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
@@ -452,130 +506,210 @@ def test_query_chunked(self):
del example_object
# TODO ?
- def test_get_list_series_and_delete(self):
- self.cli.write_points(dummy_point)
- rsp = self.cli.get_list_series()
- self.assertEqual(
- [
- {'name': 'cpu_load_short',
- 'tags': [
- {'host': 'server01',
- 'region': 'us-west',
- '_key':
- 'cpu_load_short,host=server01,region=us-west'}]}
- ],
- rsp
- )
-
def test_delete_series_invalid(self):
+ """Test delete invalid series."""
with self.assertRaises(InfluxDBClientError):
self.cli.delete_series()
- def test_delete_series(self):
- self.assertEqual(len(self.cli.get_list_series()), 0)
- self.cli.write_points(dummy_points)
- self.assertEqual(len(self.cli.get_list_series()), 2)
- self.cli.delete_series(measurement='cpu_load_short')
- self.assertEqual(len(self.cli.get_list_series()), 1)
- self.cli.delete_series(tags={'region': 'us-west'})
- self.assertEqual(len(self.cli.get_list_series()), 0)
-
- @unittest.skip("Broken as of 0.9.0")
- def test_get_list_series_DF(self):
- self.cli.write_points(dummy_point)
- rsp = self.cliDF.get_list_series()
-
- expected = pd.DataFrame(
- [[1, 'server01', 'us-west']],
- columns=['_id', 'host', 'region'])
- assert_frame_equal(rsp['cpu_load_short'], expected)
-
def test_default_retention_policy(self):
+ """Test add default retention policy."""
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
- {'name': 'default',
- 'duration': '0',
+ {'name': 'autogen',
+ 'duration': '0s',
'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
'default': True}
],
rsp
)
def test_create_retention_policy_default(self):
+ """Test create a new default retention policy."""
self.cli.create_retention_policy('somename', '1d', 1, default=True)
self.cli.create_retention_policy('another', '2d', 1, default=False)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
- {'duration': '0',
+ {'duration': '0s',
'default': False,
'replicaN': 1,
- 'name': 'default'},
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
{'duration': '24h0m0s',
'default': True,
'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
'name': 'somename'},
{'duration': '48h0m0s',
'default': False,
'replicaN': 1,
+ 'shardGroupDuration': u'24h0m0s',
'name': 'another'}
],
rsp
)
def test_create_retention_policy(self):
+ """Test creating a new retention policy, not default."""
self.cli.create_retention_policy('somename', '1d', 1)
+ # NB: creating a retention policy without specifying
+ # shard group duration
+ # leads to a shard group duration of 1 hour
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '24h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ self.cli.drop_retention_policy('somename', 'db')
+ # recreate the RP
+ self.cli.create_retention_policy('somename', '1w', 1,
+ shard_duration='1h')
+
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '168h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ self.cli.drop_retention_policy('somename', 'db')
+ # recreate the RP
+ self.cli.create_retention_policy('somename', '1w', 1)
+
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
- [{'duration': '0', 'default': True,
- 'replicaN': 1, 'name': 'default'},
- {'duration': '24h0m0s', 'default': False,
- 'replicaN': 1, 'name': 'somename'}],
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '168h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'24h0m0s',
+ 'name': 'somename'}
+ ],
rsp
)
def test_alter_retention_policy(self):
+ """Test alter a retention policy, not default."""
self.cli.create_retention_policy('somename', '1d', 1)
# Test alter duration
self.cli.alter_retention_policy('somename', 'db',
- duration='4d')
+ duration='4d',
+ shard_duration='2h')
+ # NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
- [{'duration': '0', 'default': True,
- 'replicaN': 1, 'name': 'default'},
- {'duration': '96h0m0s', 'default': False,
- 'replicaN': 1, 'name': 'somename'}],
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'2h0m0s',
+ 'name': 'somename'}
+ ],
rsp
)
# Test alter replication
self.cli.alter_retention_policy('somename', 'db',
replication=4)
+
+ # NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
- [{'duration': '0', 'default': True,
- 'replicaN': 1, 'name': 'default'},
- {'duration': '96h0m0s', 'default': False,
- 'replicaN': 4, 'name': 'somename'}],
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': False,
+ 'replicaN': 4,
+ 'shardGroupDuration': u'2h0m0s',
+ 'name': 'somename'}
+ ],
rsp
)
# Test alter default
self.cli.alter_retention_policy('somename', 'db',
default=True)
+ # NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
- [{'duration': '0', 'default': False,
- 'replicaN': 1, 'name': 'default'},
- {'duration': '96h0m0s', 'default': True,
- 'replicaN': 4, 'name': 'somename'}],
+ [
+ {'duration': '0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': True,
+ 'replicaN': 4,
+ 'shardGroupDuration': u'2h0m0s',
+ 'name': 'somename'}
+ ],
+ rsp
+ )
+
+ # Test alter shard_duration
+ self.cli.alter_retention_policy('somename', 'db',
+ shard_duration='4h')
+
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '96h0m0s',
+ 'default': True,
+ 'replicaN': 4,
+ 'shardGroupDuration': u'4h0m0s',
+ 'name': 'somename'}
+ ],
rsp
)
def test_alter_retention_policy_invalid(self):
+ """Test invalid alter retention policy."""
self.cli.create_retention_policy('somename', '1d', 1)
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.alter_retention_policy('somename', 'db')
@@ -584,15 +718,72 @@ def test_alter_retention_policy_invalid(self):
ctx.exception.content)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
- [{'duration': '0', 'default': True,
- 'replicaN': 1, 'name': 'default'},
- {'duration': '24h0m0s', 'default': False,
- 'replicaN': 1, 'name': 'somename'}],
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'},
+ {'duration': '24h0m0s',
+ 'default': False,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'1h0m0s',
+ 'name': 'somename'}
+ ],
rsp
)
+ def test_drop_retention_policy(self):
+ """Test drop a retention policy."""
+ self.cli.create_retention_policy('somename', '1d', 1)
+
+ # Test drop retention
+ self.cli.drop_retention_policy('somename', 'db')
+ rsp = self.cli.get_list_retention_policies()
+ self.assertEqual(
+ [
+ {'duration': '0s',
+ 'default': True,
+ 'replicaN': 1,
+ 'shardGroupDuration': u'168h0m0s',
+ 'name': 'autogen'}
+ ],
+ rsp
+ )
+
+ def test_create_continuous_query(self):
+ """Test continuous query creation."""
+ self.cli.create_retention_policy('some_rp', '1d', 1)
+ query = 'select count("value") into "some_rp"."events" from ' \
+ '"events" group by time(10m)'
+ self.cli.create_continuous_query('test_cq', query, 'db')
+ cqs = self.cli.get_list_continuous_queries()
+ expected_cqs = [
+ {
+ 'db': [
+ {
+ 'name': 'test_cq',
+ 'query': 'CREATE CONTINUOUS QUERY test_cq ON db '
+ 'BEGIN SELECT count(value) INTO '
+ 'db.some_rp.events FROM db.autogen.events '
+ 'GROUP BY time(10m) END'
+ }
+ ]
+ }
+ ]
+ self.assertEqual(cqs, expected_cqs)
+
+ def test_drop_continuous_query(self):
+ """Test continuous query drop."""
+ self.test_create_continuous_query()
+ self.cli.drop_continuous_query('test_cq', 'db')
+ cqs = self.cli.get_list_continuous_queries()
+ expected_cqs = [{'db': []}]
+ self.assertEqual(cqs, expected_cqs)
+
def test_issue_143(self):
- pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z')
+ """Test for PR#143 from repo."""
+ pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(value=15),
pt(tags={'tag_1': 'value1'}, value=5),
@@ -600,19 +791,20 @@ def test_issue_143(self):
]
self.cli.write_points(pts)
time.sleep(1)
- rsp = list(self.cli.query('SELECT * FROM a_serie_name GROUP BY tag_1'))
+ rsp = list(self.cli.query('SELECT * FROM a_series_name \
+GROUP BY tag_1').get_points())
self.assertEqual(
[
- [{'value': 15, 'time': '2015-03-30T16:16:37Z'}],
- [{'value': 5, 'time': '2015-03-30T16:16:37Z'}],
- [{'value': 10, 'time': '2015-03-30T16:16:37Z'}]
+ {'time': '2015-03-30T16:16:37Z', 'value': 15},
+ {'time': '2015-03-30T16:16:37Z', 'value': 5},
+ {'time': '2015-03-30T16:16:37Z', 'value': 10}
],
rsp
)
# a slightly more complex one with 2 tags values:
- pt = partial(point, 'serie2', timestamp='2015-03-30T16:16:37Z')
+ pt = partial(point, 'series2', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5),
@@ -620,18 +812,18 @@ def test_issue_143(self):
]
self.cli.write_points(pts)
time.sleep(1)
- rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2')
+ rsp = self.cli.query('SELECT * FROM series2 GROUP BY tag1,tag2')
self.assertEqual(
[
- [{'value': 0, 'time': '2015-03-30T16:16:37Z'}],
- [{'value': 5, 'time': '2015-03-30T16:16:37Z'}],
- [{'value': 10, 'time': '2015-03-30T16:16:37Z'}]
+ {'value': 0, 'time': '2015-03-30T16:16:37Z'},
+ {'value': 5, 'time': '2015-03-30T16:16:37Z'},
+ {'value': 10, 'time': '2015-03-30T16:16:37Z'}
],
- list(rsp)
+ list(rsp['series2'])
)
- all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}])
+ all_tag2_equal_v1 = list(rsp.get_points(tags={'tag2': 'v1'}))
self.assertEqual(
[{'value': 0, 'time': '2015-03-30T16:16:37Z'},
@@ -640,29 +832,89 @@ def test_issue_143(self):
)
def test_query_multiple_series(self):
- pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z')
+ """Test query for multiple series."""
+ pt = partial(point, 'series1', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
]
self.cli.write_points(pts)
- pt = partial(point, 'serie2', timestamp='1970-03-30T16:16:37Z')
+ pt = partial(point, 'series2', timestamp='1970-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'},
value=0, data1=33, data2="bla"),
]
self.cli.write_points(pts)
+ def test_get_list_series(self):
+ """Test get a list of series from the database."""
+ dummy_points = [
+ {
+ "measurement": "cpu_load_short",
+ "tags": {
+ "host": "server01",
+ "region": "us-west"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 0.64
+ }
+ }
+ ]
-@skipServerTests
-class UdpTests(ManyTestCasesWithServerMixin,
- unittest.TestCase):
+ dummy_points_2 = [
+ {
+ "measurement": "memory_usage",
+ "tags": {
+ "host": "server02",
+ "region": "us-east"
+ },
+ "time": "2009-11-10T23:00:00.123456Z",
+ "fields": {
+ "value": 80
+ }
+ }
+ ]
+
+ self.cli.write_points(dummy_points)
+ self.cli.write_points(dummy_points_2)
+
+ self.assertEquals(
+ self.cli.get_list_series(),
+ ['cpu_load_short,host=server01,region=us-west',
+ 'memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(measurement='memory_usage'),
+ ['memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(measurement='memory_usage'),
+ ['memory_usage,host=server02,region=us-east']
+ )
+
+ self.assertEquals(
+ self.cli.get_list_series(tags={'host': 'server02'}),
+ ['memory_usage,host=server02,region=us-east'])
+
+ self.assertEquals(
+ self.cli.get_list_series(
+ measurement='cpu_load_short', tags={'host': 'server02'}),
+ [])
+
+
+@skip_server_tests
+class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase):
+ """Define a class to test UDP series."""
influxdb_udp_enabled = True
influxdb_template_conf = os.path.join(THIS_DIR,
'influxdb.conf.template')
def test_write_points_udp(self):
+ """Test write points UDP."""
cli = InfluxDBClient(
'localhost',
self.influxd_inst.http_port,
@@ -682,7 +934,33 @@ def test_write_points_udp(self):
self.assertEqual(
# this is dummy_points :
- [{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
- "host": "server01", "region": "us-west"}],
+ [
+ {'value': 0.64,
+ 'time': '2009-11-10T23:00:00Z',
+ "host": "server01",
+ "region": "us-west"}
+ ],
list(rsp['cpu_load_short'])
)
+
+
+# Run the tests again, but with gzip enabled this time
+@skip_server_tests
+class GzipSimpleTests(SimpleTests, SingleTestCaseWithServerGzipMixin):
+ """Repeat the simple tests with InfluxDBClient where gzip=True."""
+
+ pass
+
+
+@skip_server_tests
+class GzipCommonTests(CommonTests, ManyTestCasesWithServerGzipMixin):
+ """Repeat the common tests with InfluxDBClient where gzip=True."""
+
+ pass
+
+
+@skip_server_tests
+class GzipUdpTests(UdpTests, ManyTestCasesWithServerGzipMixin):
+ """Repeat the UDP tests with InfluxDBClient where gzip=True."""
+
+ pass
diff --git a/influxdb/tests/server_tests/influxdb.conf.template b/influxdb/tests/server_tests/influxdb.conf.template
index 1e9ddf31..efcff78a 100644
--- a/influxdb/tests/server_tests/influxdb.conf.template
+++ b/influxdb/tests/server_tests/influxdb.conf.template
@@ -1,59 +1,19 @@
+bind-address = ":{global_port}"
+
[meta]
dir = "{meta_dir}"
hostname = "localhost"
bind-address = ":{meta_port}"
- retention-autocreate = true
- election-timeout = "1s"
- heartbeat-timeout = "1s"
- leader-lease-timeout = "500ms"
- commit-timeout = "50ms"
[data]
dir = "{data_dir}"
wal-dir = "{wal_dir}"
- retention-auto-create = true
- retention-check-enabled = true
- retention-check-period = "10m0s"
- retention-create-period = "45m0s"
-
-[cluster]
- shard-writer-timeout = "5s"
-
-[retention]
- enabled = true
- check-interval = "10m0s"
-
-[shard-precreation]
- enabled = true
- check-interval = "10m0s"
- advance-period = "30m0s"
-
-[admin]
- enabled = true
- bind-address = ":{admin_port}"
+ index-version = "tsi1"
[http]
enabled = true
bind-address = ":{http_port}"
auth-enabled = false
- log-enabled = true
- write-tracing = false
-
-[collectd]
- enabled = false
- bind-address = ":25826"
- database = "collectd"
- retention-policy = ""
- batch-size = 5000
- batch-timeout = "10s"
- typesdb = "/usr/share/collectd/types.db"
-
-[opentsdb]
- enabled = false
- bind-address = ":4242"
- database = "opentsdb"
- retention-policy = ""
- consistency-level = "one"
[[udp]]
enabled = {udp_enabled}
@@ -65,18 +25,5 @@
[monitor]
store-enabled = false
-[continuous_queries]
- enabled = true
- recompute-previous-n = 2
- recompute-no-older-than = "10m0s"
- compute-runs-per-interval = 10
- compute-no-more-than = "2m0s"
-
[hinted-handoff]
- enabled = false
dir = "{handoff_dir}"
- max-size = 1073741824
- max-age = "168h0m0s"
- retry-rate-limit = 0
- retry-interval = "1s"
-
diff --git a/influxdb/tests/server_tests/influxdb_instance.py b/influxdb/tests/server_tests/influxdb_instance.py
index 1f053956..2dd823ff 100644
--- a/influxdb/tests/server_tests/influxdb_instance.py
+++ b/influxdb/tests/server_tests/influxdb_instance.py
@@ -1,29 +1,54 @@
# -*- coding: utf-8 -*-
+"""Define the test module for an influxdb instance."""
+from __future__ import absolute_import
+from __future__ import division
from __future__ import print_function
+from __future__ import unicode_literals
import datetime
+import distutils.spawn
import os
import tempfile
-import distutils
-import time
import shutil
import subprocess
-import unittest
import sys
+import time
+import unittest
from influxdb.tests.misc import is_port_open, get_free_ports
+# hack in check_output if it's not defined, like for python 2.6
+if "check_output" not in dir(subprocess):
+ def f(*popenargs, **kwargs):
+ """Check for output."""
+ if 'stdout' in kwargs:
+ raise ValueError(
+ 'stdout argument not allowed, it will be overridden.'
+ )
+ process = subprocess.Popen(stdout=subprocess.PIPE,
+ *popenargs,
+ **kwargs)
+ output, unused_err = process.communicate()
+ retcode = process.poll()
+ if retcode:
+ cmd = kwargs.get("args")
+ if cmd is None:
+ cmd = popenargs[0]
+ raise subprocess.CalledProcessError(retcode, cmd)
+ return output
+ subprocess.check_output = f
+
class InfluxDbInstance(object):
- """ A class to launch of fresh influxdb server instance
+ """Define an instance of InfluxDB.
+
+ A class to launch of fresh influxdb server instance
in a temporary place, using a config file template.
"""
- def __init__(self,
- conf_template,
- udp_enabled=False):
-
+ def __init__(self, conf_template, udp_enabled=False):
+ """Initialize an instance of InfluxDbInstance."""
if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True':
raise unittest.SkipTest(
"Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)"
@@ -43,7 +68,6 @@ def __init__(self,
raise e
def _start_server(self, conf_template, udp_enabled):
-
# create a temporary dir to store all needed files
# for the influxdb server instance :
self.temp_dir_base = tempfile.mkdtemp()
@@ -56,7 +80,7 @@ def _start_server(self, conf_template, udp_enabled):
# find a couple free ports :
free_ports = get_free_ports(4)
ports = {}
- for service in 'http', 'admin', 'meta', 'udp':
+ for service in 'http', 'global', 'meta', 'udp':
ports[service + '_port'] = free_ports.pop()
if not udp_enabled:
ports['udp_port'] = -1
@@ -89,7 +113,7 @@ def _start_server(self, conf_template, udp_enabled):
"%s > Started influxdb bin in %r with ports %s and %s.." % (
datetime.datetime.now(),
self.temp_dir_base,
- self.admin_port,
+ self.global_port,
self.http_port
)
)
@@ -101,8 +125,8 @@ def _start_server(self, conf_template, udp_enabled):
# or you run a 286 @ 1Mhz ?
try:
while time.time() < timeout:
- if (is_port_open(self.http_port)
- and is_port_open(self.admin_port)):
+ if (is_port_open(self.http_port) and
+ is_port_open(self.global_port)):
# it's hard to check if a UDP port is open..
if udp_enabled:
# so let's just sleep 0.5 sec in this case
@@ -126,6 +150,7 @@ def _start_server(self, conf_template, udp_enabled):
% data)
def find_influxd_path(self):
+ """Find the path for InfluxDB."""
influxdb_bin_path = os.environ.get(
'INFLUXDB_PYTHON_INFLUXD_PATH',
None
@@ -136,7 +161,7 @@ def find_influxd_path(self):
if not influxdb_bin_path:
try:
influxdb_bin_path = subprocess.check_output(
- ['which', 'influxdb']
+ ['which', 'influxd']
).strip()
except subprocess.CalledProcessError:
# fallback on :
@@ -151,6 +176,7 @@ def find_influxd_path(self):
return influxdb_bin_path
def get_logs_and_output(self):
+ """Query for logs and output."""
proc = self.proc
try:
with open(self.logs_file) as fh:
@@ -165,6 +191,7 @@ def get_logs_and_output(self):
}
def close(self, remove_tree=True):
+ """Close an instance of InfluxDB."""
self.proc.terminate()
self.proc.wait()
if remove_tree:
diff --git a/influxdb/tests/test_line_protocol.py b/influxdb/tests/test_line_protocol.py
index 168323fb..5b344990 100644
--- a/influxdb/tests/test_line_protocol.py
+++ b/influxdb/tests/test_line_protocol.py
@@ -1,16 +1,30 @@
# -*- coding: utf-8 -*-
+"""Define the line protocol test module."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
import unittest
+
+from datetime import datetime
+from decimal import Decimal
+
+from pytz import UTC, timezone
from influxdb import line_protocol
class TestLineProtocol(unittest.TestCase):
+ """Define the LineProtocol test object."""
def test_make_lines(self):
+ """Test make new lines in TestLineProtocol object."""
data = {
"tags": {
"empty_tag": "",
"none_tag": None,
+ "backslash_tag": "C:\\",
"integer_tag": 2,
"string_tag": "hello"
},
@@ -30,11 +44,42 @@ def test_make_lines(self):
self.assertEqual(
line_protocol.make_lines(data),
- 'test,integer_tag=2,string_tag=hello '
+ 'test,backslash_tag=C:\\\\,integer_tag=2,string_tag=hello '
'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
)
+ def test_timezone(self):
+ """Test timezone in TestLineProtocol object."""
+ dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
+ utc = UTC.localize(dt)
+ berlin = timezone('Europe/Berlin').localize(dt)
+ eastern = berlin.astimezone(timezone('US/Eastern'))
+ data = {
+ "points": [
+ {"measurement": "A", "fields": {"val": 1},
+ "time": 0},
+ {"measurement": "A", "fields": {"val": 1},
+ "time": "2009-11-10T23:00:00.123456Z"},
+ {"measurement": "A", "fields": {"val": 1}, "time": dt},
+ {"measurement": "A", "fields": {"val": 1}, "time": utc},
+ {"measurement": "A", "fields": {"val": 1}, "time": berlin},
+ {"measurement": "A", "fields": {"val": 1}, "time": eastern},
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ '\n'.join([
+ 'A val=1i 0',
+ 'A val=1i 1257894000123456000',
+ 'A val=1i 1257894000123456000',
+ 'A val=1i 1257894000123456000',
+ 'A val=1i 1257890400123456000',
+ 'A val=1i 1257890400123456000',
+ ]) + '\n'
+ )
+
def test_string_val_newline(self):
+ """Test string value with newline in TestLineProtocol object."""
data = {
"points": [
{
@@ -50,3 +95,111 @@ def test_string_val_newline(self):
line_protocol.make_lines(data),
'm1 multi_line="line1\\nline1\\nline3"\n'
)
+
+ def test_make_lines_unicode(self):
+ """Test make unicode lines in TestLineProtocol object."""
+ data = {
+ "tags": {
+ "unicode_tag": "\'Привет!\'" # Hello! in Russian
+ },
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "unicode_val": "Привет!", # Hello! in Russian
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
+ )
+
+ def test_make_lines_empty_field_string(self):
+ """Test make lines with an empty string field."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "string": "",
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test string=""\n'
+ )
+
+ def test_tag_value_newline(self):
+ """Test make lines with tag value contains newline."""
+ data = {
+ "tags": {
+ "t1": "line1\nline2"
+ },
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "val": "hello"
+ }
+ }
+ ]
+ }
+
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test,t1=line1\\nline2 val="hello"\n'
+ )
+
+ def test_quote_ident(self):
+ """Test quote indentation in TestLineProtocol object."""
+ self.assertEqual(
+ line_protocol.quote_ident(r"""\foo ' bar " Örf"""),
+ r'''"\\foo ' bar \" Örf"'''
+ )
+
+ def test_quote_literal(self):
+ """Test quote literal in TestLineProtocol object."""
+ self.assertEqual(
+ line_protocol.quote_literal(r"""\foo ' bar " Örf"""),
+ r"""'\\foo \' bar " Örf'"""
+ )
+
+ def test_float_with_long_decimal_fraction(self):
+ """Ensure precision is preserved when casting floats into strings."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "float_val": 1.0000000000000009,
+ }
+ }
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test float_val=1.0000000000000009\n'
+ )
+
+ def test_float_with_long_decimal_fraction_as_type_decimal(self):
+ """Ensure precision is preserved when casting Decimal into strings."""
+ data = {
+ "points": [
+ {
+ "measurement": "test",
+ "fields": {
+ "float_val": Decimal(0.8289445733333332),
+ }
+ }
+ ]
+ }
+ self.assertEqual(
+ line_protocol.make_lines(data),
+ 'test float_val=0.8289445733333332\n'
+ )
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000..308aa62d
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,8 @@
+[mypy]
+ignore_missing_imports = True
+warn_unused_ignores = True
+warn_unused_configs = True
+warn_redundant_casts = True
+warn_no_return = True
+no_implicit_optional = True
+strict_equality = True
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..1b68d94e
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 43c09bd1..a3df3154 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,5 @@
-python-dateutil>=2.0.0
-pytz
-requests>=1.0.3
-six>=1.9.0
+python-dateutil>=2.6.0
+pytz>=2016.10
+requests>=2.17.0
+six>=1.10.0
+msgpack>=0.5.0
diff --git a/setup.cfg b/setup.cfg
index 5e409001..217d437b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,2 +1,5 @@
+[bdist_rpm]
+requires=python-dateutil
+
[wheel]
universal = 1
diff --git a/setup.py b/setup.py
index fe09ce36..8ac7d1a7 100755
--- a/setup.py
+++ b/setup.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+"""Define the setup options."""
try:
import distribute_setup
@@ -22,6 +23,11 @@
with open('requirements.txt', 'r') as f:
requires = [x.strip() for x in f if x.strip()]
+# Debugging: Print the requires values
+print("install_requires values:")
+for req in requires:
+ print(f"- {req}")
+
with open('test-requirements.txt', 'r') as f:
test_requires = [x.strip() for x in f if x.strip()]
@@ -41,7 +47,7 @@
tests_require=test_requires,
install_requires=requires,
extras_require={'test': test_requires},
- classifiers=(
+ classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
@@ -49,8 +55,10 @@
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
- ),
+ ],
)
diff --git a/test-requirements.txt b/test-requirements.txt
index cbc6add3..9b31f5f1 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,4 +1,4 @@
nose
nose-cov
mock
-requests-mock
\ No newline at end of file
+requests-mock
diff --git a/tox.ini b/tox.ini
index 74f733d0..a1005abb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,13 +1,21 @@
[tox]
-envlist = py34, py27, pypy, flake8
+envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs, mypy
[testenv]
passenv = INFLUXDB_PYTHON_INFLUXD_PATH
setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
- py27,py32,py33,py34: pandas
+ py27: pandas==0.21.1
+ py27: numpy==1.13.3
+ py35: pandas==0.22.0
+ py35: numpy==1.14.6
+ py36: pandas==0.23.4
+ py36: numpy==1.15.4
+ py37: pandas>=0.24.2
+ py37: numpy>=1.16.2
# Only install pandas with non-pypy interpreters
+# Testing all combinations would be too expensive
commands = nosetests -v --with-doctest {posargs}
[testenv:flake8]
@@ -16,21 +24,36 @@ deps =
pep8-naming
commands = flake8 influxdb
+[testenv:pep257]
+deps = pydocstyle
+commands = pydocstyle --count -ve examples influxdb
+
[testenv:coverage]
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
- pandas
+ pandas==0.24.2
coverage
+ numpy
commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb
[testenv:docs]
deps = -r{toxinidir}/requirements.txt
- pandas
- Sphinx==1.2.3
+ pandas>=0.24.2
+ numpy>=1.16.2
+ Sphinx>=1.8.5
sphinx_rtd_theme
commands = sphinx-build -b html docs/source docs/build
+[testenv:mypy]
+deps = -r{toxinidir}/test-requirements.txt
+ mypy==0.720
+commands = mypy --config-file mypy.ini -p influxdb
+
[flake8]
-ignore = N802,F821
+ignore = W503,W504,W605,N802,F821,E402
+# W503: Line break occurred before a binary operator
+# W504: Line break occurred after a binary operator
+# W605: invalid escape sequence
# N802: nosetests's setUp function
# F821: False positive in intluxdb/dataframe_client.py
+# E402: module level import not at top of file