Hello community,

here is the log from the commit of package python-influxdb for openSUSE:Factory 
checked in at 2019-09-13 14:59:59
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-influxdb (Old)
 and      /work/SRC/openSUSE:Factory/.python-influxdb.new.7948 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-influxdb"

Fri Sep 13 14:59:59 2019 rev:5 rq:730200 version:5.2.3

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-influxdb/python-influxdb.changes  
2019-05-16 22:07:55.770397697 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-influxdb.new.7948/python-influxdb.changes    
    2019-09-13 15:00:02.101281600 +0200
@@ -1,0 +2,13 @@
+Wed Sep 11 14:50:13 UTC 2019 - Tomáš Chvátal <[email protected]>
+
+- Update to 5.2.3:
+  * Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman)
+  * Add UDP example (#648 thx @shantanoo-desai)
+  * Add consistency paramter to write_points (#664 tx @RonRothman)
+  * The query() function now accepts a bind_params argument for parameter 
binding (#678 thx @clslgrnc)
+  * Add get_list_continuous_queries, drop_continuous_query, and 
create_continuous_query management methods for continuous queries (#681 thx 
@lukaszdudek-silvair)
+  * Mutual TLS authentication (#702 thx @LloydW93)
+- Drop merged patches:
+  * python-influxdb-d5d1249.patch
+
+-------------------------------------------------------------------

Old:
----
  python-influxdb-d5d1249.patch
  v5.2.2.tar.gz

New:
----
  v5.2.3.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-influxdb.spec ++++++
--- /var/tmp/diff_new_pack.LYxuvz/_old  2019-09-13 15:00:02.821281630 +0200
+++ /var/tmp/diff_new_pack.LYxuvz/_new  2019-09-13 15:00:02.825281631 +0200
@@ -18,15 +18,13 @@
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-influxdb
-Version:        5.2.2
+Version:        5.2.3
 Release:        0
 Summary:        InfluxDB client
 License:        MIT
 Group:          Development/Languages/Python
-Url:            https://github.com/influxdb/influxdb-python
+URL:            https://github.com/influxdb/influxdb-python
 Source:         
https://github.com/influxdata/influxdb-python/archive/v%{version}.tar.gz
-# recent changes in master to fix tests
-Patch0:         python-influxdb-d5d1249.patch
 # fix module 'distutils' has no attribute 'spawn'
 Patch1:         python-influxdb-fix-testsuite.patch
 BuildRequires:  %{python_module python-dateutil >= 2.0.0}
@@ -36,22 +34,21 @@
 BuildRequires:  %{python_module six >= 1.9.0}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
+Requires:       python-python-dateutil >= 2.6.0
+Requires:       python-pytz
+Requires:       python-requests >= 1.17.0
+Requires:       python-six >= 1.10.0
+BuildArch:      noarch
 # SECTION test requirements
-%if 0%{?suse_version} >= 1500
-BuildRequires:  hostname
-%endif
 BuildRequires:  %{python_module mock}
 BuildRequires:  %{python_module nose}
 BuildRequires:  %{python_module pandas}
 BuildRequires:  %{python_module requests-mock}
 BuildRequires:  influxdb
+%if 0%{?suse_version} >= 1500
+BuildRequires:  hostname
+%endif
 # /SECTION
-Requires:       python-python-dateutil >= 2.6.0
-Requires:       python-pytz
-Requires:       python-requests >= 1.17.0
-Requires:       python-six >= 1.10.0
-BuildArch:      noarch
-
 %python_subpackages
 
 %description
@@ -59,14 +56,13 @@
 
 %prep
 %setup -q -n influxdb-python-%{version}
-%patch0 -p1
 %patch1 -p1
 
 %build
 %python_build
 
 %check
-%python_expand nosetests-%{$python_bin_suffix}
+%python_expand nosetests-%{$python_bin_suffix} -v
 
 %install
 %python_install

++++++ v5.2.2.tar.gz -> v5.2.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/.travis.yml 
new/influxdb-python-5.2.3/.travis.yml
--- old/influxdb-python-5.2.2/.travis.yml       2019-03-14 15:19:55.000000000 
+0100
+++ new/influxdb-python-5.2.3/.travis.yml       2019-08-26 19:25:51.000000000 
+0200
@@ -8,10 +8,12 @@
   - "pypy3"
 
 env:
-  - INFLUXDB_VER=1.2.4
-  - INFLUXDB_VER=1.3.9
-  - INFLUXDB_VER=1.4.2
-  - INFLUXDB_VER=1.5.4
+  - INFLUXDB_VER=1.2.4  # 2017-05-08
+  - INFLUXDB_VER=1.3.9  # 2018-01-19
+  - INFLUXDB_VER=1.4.3  # 2018-01-30
+  - INFLUXDB_VER=1.5.4  # 2018-06-22
+  - INFLUXDB_VER=1.6.4  # 2018-10-24
+  - INFLUXDB_VER=1.7.4  # 2019-02-14
 
 addons:
   apt:
@@ -20,7 +22,31 @@
 
 matrix:
   include:
-    - python: 2.7
+    - python: 3.7
+      dist: xenial
+      sudo: true
+      env: INFLUXDB_VER=1.2.4
+    - python: 3.7
+      dist: xenial
+      sudo: true
+      env: INFLUXDB_VER=1.3.9
+    - python: 3.7
+      dist: xenial
+      sudo: true
+      env: INFLUXDB_VER=1.4.3
+    - python: 3.7
+      dist: xenial
+      sudo: true
+      env: INFLUXDB_VER=1.5.4
+    - python: 3.7
+      dist: xenial
+      sudo: true
+      env: INFLUXDB_VER=1.6.4
+    - python: 3.7
+      dist: xenial
+      sudo: true
+      env: INFLUXDB_VER=1.7.4
+    - python: 3.6
       env: TOX_ENV=pep257
     - python: 3.6
       env: TOX_ENV=docs
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/CHANGELOG.md 
new/influxdb-python-5.2.3/CHANGELOG.md
--- old/influxdb-python-5.2.2/CHANGELOG.md      2019-03-14 15:19:55.000000000 
+0100
+++ new/influxdb-python-5.2.3/CHANGELOG.md      2019-08-26 19:25:51.000000000 
+0200
@@ -12,6 +12,28 @@
 
 ### Removed
 
+## [v5.2.3] - 2019-08-19
+
+### Added
+- Add consistency param to InfluxDBClient.write_points (#643 thx @RonRothman)
+- Add UDP example (#648 thx @shantanoo-desai)
+- Add consistency paramter to `write_points` (#664 tx @RonRothman)
+- The query() function now accepts a bind_params argument for parameter 
binding (#678 thx @clslgrnc)
+- Add `get_list_continuous_queries`, `drop_continuous_query`, and 
`create_continuous_query` management methods for
+  continuous queries (#681 thx @lukaszdudek-silvair)
+- Mutual TLS authentication (#702 thx @LloydW93)
+
+### Changed
+- Update test suite to add support for Python 3.7 and InfluxDB v1.6.4 and 
1.7.4 (#692 thx @clslgrnc)
+- Update supported versions of influxdb + python (#693 thx @clslgrnc)
+- Fix for the line protocol issue with leading comma (#694 thx @d3banjan)
+- Update classifiers tuple to list in setup.py (#697 thx @Hanaasagi)
+- Update documentation for empty `delete_series` confusion (#699 thx @xginn8)
+- Fix newline character issue in tag value (#716 thx @syhan)
+- Update tests/tutorials_pandas.py to reference `line` protocol, bug in `json` 
(#737 thx @Aeium)
+
+### Removed
+
 ## [v5.2.2] - 2019-03-14
 ### Added
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/CODEOWNERS 
new/influxdb-python-5.2.3/CODEOWNERS
--- old/influxdb-python-5.2.2/CODEOWNERS        1970-01-01 01:00:00.000000000 
+0100
+++ new/influxdb-python-5.2.3/CODEOWNERS        2019-08-26 19:25:51.000000000 
+0200
@@ -0,0 +1 @@
+* @aviau @xginn8 @sebito91
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/README.rst 
new/influxdb-python-5.2.3/README.rst
--- old/influxdb-python-5.2.2/README.rst        2019-03-14 15:19:55.000000000 
+0100
+++ new/influxdb-python-5.2.3/README.rst        2019-08-26 19:25:51.000000000 
+0200
@@ -39,7 +39,7 @@
 InfluxDB pre v1.1.0 users
 -------------------------
 
-This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.2, and 
v1.5.4.
+This module is tested with InfluxDB versions: v1.2.4, v1.3.9, v1.4.3, v1.5.4, 
v1.6.4, and 1.7.4.
 
 Those users still on InfluxDB v0.8.x users may still use the legacy client by 
importing ``from influxdb.influxdb08 import InfluxDBClient``.
 
@@ -59,7 +59,7 @@
 Dependencies
 ------------
 
-The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 
3.6, PyPy and PyPy3.
+The influxdb-python distribution is supported and tested on Python 2.7, 3.5, 
3.6, 3.7, PyPy and PyPy3.
 
 **Note:** Python <3.5 are currently untested. See ``.travis.yml``.
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/docs/source/examples.rst 
new/influxdb-python-5.2.3/docs/source/examples.rst
--- old/influxdb-python-5.2.2/docs/source/examples.rst  2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/docs/source/examples.rst  2019-08-26 
19:25:51.000000000 +0200
@@ -25,3 +25,9 @@
 
 .. literalinclude:: ../../examples/tutorial_serieshelper.py
    :language: python
+
+Tutorials - UDP
+===============
+
+.. literalinclude:: ../../examples/tutorial_udp.py
+   :language: python
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/examples/tutorial.py 
new/influxdb-python-5.2.3/examples/tutorial.py
--- old/influxdb-python-5.2.2/examples/tutorial.py      2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/examples/tutorial.py      2019-08-26 
19:25:51.000000000 +0200
@@ -13,7 +13,9 @@
     dbname = 'example'
     dbuser = 'smly'
     dbuser_password = 'my_secret_password'
-    query = 'select value from cpu_load_short;'
+    query = 'select Float_value from cpu_load_short;'
+    query_where = 'select Int_value from cpu_load_short where host=$host;'
+    bind_params = {'host': 'server01'}
     json_body = [
         {
             "measurement": "cpu_load_short",
@@ -50,6 +52,11 @@
 
     print("Result: {0}".format(result))
 
+    print("Querying data: " + query_where)
+    result = client.query(query_where, bind_params=bind_params)
+
+    print("Result: {0}".format(result))
+
     print("Switch user: " + user)
     client.switch_user(user, password)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/examples/tutorial_pandas.py 
new/influxdb-python-5.2.3/examples/tutorial_pandas.py
--- old/influxdb-python-5.2.2/examples/tutorial_pandas.py       2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/examples/tutorial_pandas.py       2019-08-26 
19:25:51.000000000 +0200
@@ -12,7 +12,7 @@
     user = 'root'
     password = 'root'
     dbname = 'demo'
-    protocol = 'json'
+    protocol = 'line'
 
     client = DataFrameClient(host, port, user, password, dbname)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/examples/tutorial_udp.py 
new/influxdb-python-5.2.3/examples/tutorial_udp.py
--- old/influxdb-python-5.2.2/examples/tutorial_udp.py  1970-01-01 
01:00:00.000000000 +0100
+++ new/influxdb-python-5.2.3/examples/tutorial_udp.py  2019-08-26 
19:25:51.000000000 +0200
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+"""Example for sending batch information to InfluxDB via UDP."""
+
+"""
+INFO: In order to use UDP, one should enable the UDP service from the
+`influxdb.conf` under section
+    [[udp]]
+        enabled = true
+        bind-address = ":8089" # port number for sending data via UDP
+        database = "udp1" # name of database to be stored
+    [[udp]]
+        enabled = true
+        bind-address = ":8090"
+        database = "udp2"
+"""
+
+
+import argparse
+
+from influxdb import InfluxDBClient
+
+
+def main(uport):
+    """Instantiate connection to the InfluxDB."""
+    # NOTE: structure of the UDP packet is different than that of information
+    #       sent via HTTP
+    json_body = {
+        "tags": {
+            "host": "server01",
+            "region": "us-west"
+        },
+        "time": "2009-11-10T23:00:00Z",
+        "points": [{
+            "measurement": "cpu_load_short",
+            "fields": {
+                "value": 0.64
+            }
+        },
+            {
+                "measurement": "cpu_load_short",
+                "fields": {
+                    "value": 0.67
+                }
+        }]
+    }
+
+    # make `use_udp` True and  add `udp_port` number from `influxdb.conf` file
+    # no need to mention the database name since it is already configured
+    client = InfluxDBClient(use_udp=True, udp_port=uport)
+
+    # Instead of `write_points` use `send_packet`
+    client.send_packet(json_body)
+
+
+def parse_args():
+    """Parse the args."""
+    parser = argparse.ArgumentParser(
+        description='example code to play with InfluxDB along with UDP Port')
+    parser.add_argument('--uport', type=int, required=True,
+                        help=' UDP port of InfluxDB')
+    return parser.parse_args()
+
+
+if __name__ == '__main__':
+    args = parse_args()
+    main(uport=args.uport)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/influxdb/__init__.py 
new/influxdb-python-5.2.3/influxdb/__init__.py
--- old/influxdb-python-5.2.2/influxdb/__init__.py      2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/__init__.py      2019-08-26 
19:25:51.000000000 +0200
@@ -18,4 +18,4 @@
 ]
 
 
-__version__ = '5.2.2'
+__version__ = '5.2.3'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/influxdb/_dataframe_client.py 
new/influxdb-python-5.2.3/influxdb/_dataframe_client.py
--- old/influxdb-python-5.2.2/influxdb/_dataframe_client.py     2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/_dataframe_client.py     2019-08-26 
19:25:51.000000000 +0200
@@ -142,6 +142,7 @@
     def query(self,
               query,
               params=None,
+              bind_params=None,
               epoch=None,
               expected_response_code=200,
               database=None,
@@ -153,8 +154,18 @@
         """
         Query data into a DataFrame.
 
+        .. danger::
+            In order to avoid injection vulnerabilities (similar to `SQL
+            injection <https://www.owasp.org/index.php/SQL_Injection>`_
+            vulnerabilities), do not directly include untrusted data into the
+            ``query`` parameter, use ``bind_params`` instead.
+
         :param query: the actual query string
         :param params: additional parameters for the request, defaults to {}
+        :param bind_params: bind parameters for the query:
+            any variable in the query written as ``'$var_name'`` will be
+            replaced with ``bind_params['var_name']``. Only works in the
+            ``WHERE`` clause and takes precedence over ``params['params']``
         :param epoch: response timestamps to be in epoch format either 'h',
             'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
             RFC3339 UTC format with nanosecond precision
@@ -172,6 +183,7 @@
         :rtype: :class:`~.ResultSet`
         """
         query_args = dict(params=params,
+                          bind_params=bind_params,
                           epoch=epoch,
                           expected_response_code=expected_response_code,
                           raise_errors=raise_errors,
@@ -351,7 +363,7 @@
             tag_df = self._stringify_dataframe(
                 tag_df, numeric_precision, datatype='tag')
 
-            # join preprendded tags, leaving None values out
+            # join prepended tags, leaving None values out
             tags = tag_df.apply(
                 lambda s: [',' + s.name + '=' + v if v else '' for v in s])
             tags = tags.sum(axis=1)
@@ -380,6 +392,8 @@
             field_df.columns[1:]]
         field_df = field_df.where(~mask_null, '')  # drop Null entries
         fields = field_df.sum(axis=1)
+        # take out leading , where first column has a Null value
+        fields = fields.str.lstrip(",")
         del field_df
 
         # Generate line protocol string
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/influxdb/client.py 
new/influxdb-python-5.2.3/influxdb/client.py
--- old/influxdb-python-5.2.2/influxdb/client.py        2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/client.py        2019-08-26 
19:25:51.000000000 +0200
@@ -61,6 +61,13 @@
     :type proxies: dict
     :param path: path of InfluxDB on the server to connect, defaults to ''
     :type path: str
+    :param cert: Path to client certificate information to use for mutual TLS
+        authentication. You can specify a local cert to use
+        as a single file containing the private key and the certificate, or as
+        a tuple of both files’ paths, defaults to None
+    :type cert: str
+
+    :raises ValueError: if cert is provided but ssl is disabled (set to False)
     """
 
     def __init__(self,
@@ -78,6 +85,7 @@
                  proxies=None,
                  pool_size=10,
                  path='',
+                 cert=None,
                  ):
         """Construct a new InfluxDBClient object."""
         self.__host = host
@@ -120,6 +128,14 @@
         else:
             self._proxies = proxies
 
+        if cert:
+            if not ssl:
+                raise ValueError(
+                    "Client certificate provided but ssl is disabled."
+                )
+            else:
+                self._session.cert = cert
+
         self.__baseurl = "{0}://{1}:{2}{3}".format(
             self._scheme,
             self._host,
@@ -345,6 +361,7 @@
     def query(self,
               query,
               params=None,
+              bind_params=None,
               epoch=None,
               expected_response_code=200,
               database=None,
@@ -354,6 +371,12 @@
               method="GET"):
         """Send a query to InfluxDB.
 
+        .. danger::
+            In order to avoid injection vulnerabilities (similar to `SQL
+            injection <https://www.owasp.org/index.php/SQL_Injection>`_
+            vulnerabilities), do not directly include untrusted data into the
+            ``query`` parameter, use ``bind_params`` instead.
+
         :param query: the actual query string
         :type query: str
 
@@ -361,6 +384,12 @@
             defaults to {}
         :type params: dict
 
+        :param bind_params: bind parameters for the query:
+            any variable in the query written as ``'$var_name'`` will be
+            replaced with ``bind_params['var_name']``. Only works in the
+            ``WHERE`` clause and takes precedence over ``params['params']``
+        :type bind_params: dict
+
         :param epoch: response timestamps to be in epoch format either 'h',
             'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
             RFC3339 UTC format with nanosecond precision
@@ -394,6 +423,11 @@
         if params is None:
             params = {}
 
+        if bind_params is not None:
+            params_dict = json.loads(params.get('params', '{}'))
+            params_dict.update(bind_params)
+            params['params'] = json.dumps(params_dict)
+
         params['q'] = query
         params['db'] = database or self._database
 
@@ -440,7 +474,8 @@
                      retention_policy=None,
                      tags=None,
                      batch_size=None,
-                     protocol='json'
+                     protocol='json',
+                     consistency=None
                      ):
         """Write to multiple time series names.
 
@@ -468,6 +503,9 @@
         :type batch_size: int
         :param protocol: Protocol for writing data. Either 'line' or 'json'.
         :type protocol: str
+        :param consistency: Consistency for the points.
+            One of {'any','one','quorum','all'}.
+        :type consistency: str
         :returns: True, if the operation is successful
         :rtype: bool
 
@@ -480,14 +518,16 @@
                                    time_precision=time_precision,
                                    database=database,
                                    retention_policy=retention_policy,
-                                   tags=tags, protocol=protocol)
+                                   tags=tags, protocol=protocol,
+                                   consistency=consistency)
             return True
 
         return self._write_points(points=points,
                                   time_precision=time_precision,
                                   database=database,
                                   retention_policy=retention_policy,
-                                  tags=tags, protocol=protocol)
+                                  tags=tags, protocol=protocol,
+                                  consistency=consistency)
 
     def ping(self):
         """Check connectivity to InfluxDB.
@@ -513,12 +553,16 @@
                       database,
                       retention_policy,
                       tags,
-                      protocol='json'):
+                      protocol='json',
+                      consistency=None):
         if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]:
             raise ValueError(
                 "Invalid time precision is given. "
                 "(use 'n', 'u', 'ms', 's', 'm' or 'h')")
 
+        if consistency not in ['any', 'one', 'quorum', 'all', None]:
+            raise ValueError('Invalid consistency: {}'.format(consistency))
+
         if protocol == 'json':
             data = {
                 'points': points
@@ -533,6 +577,9 @@
             'db': database or self._database
         }
 
+        if consistency is not None:
+            params['consistency'] = consistency
+
         if time_precision is not None:
             params['precision'] = time_precision
 
@@ -809,7 +856,9 @@
     def delete_series(self, database=None, measurement=None, tags=None):
         """Delete series from a database.
 
-        Series can be filtered by measurement and tags.
+        Series must be filtered by either measurement and tags.
+        This method cannot be used to delete all series, use
+        `drop_database` instead.
 
         :param database: the database from which the series should be
             deleted, defaults to client's current database
@@ -908,6 +957,98 @@
         text = "SHOW GRANTS FOR {0}".format(quote_ident(username))
         return list(self.query(text).get_points())
 
+    def get_list_continuous_queries(self):
+        """Get the list of continuous queries in InfluxDB.
+
+        :return: all CQs in InfluxDB
+        :rtype: list of dictionaries
+
+        :Example:
+
+        ::
+
+            >> cqs = client.get_list_cqs()
+            >> cqs
+            [
+                {
+                    u'db1': []
+                },
+                {
+                    u'db2': [
+                        {
+                            u'name': u'vampire',
+                            u'query': u'CREATE CONTINUOUS QUERY vampire ON '
+                                       'mydb BEGIN SELECT count(dracula) INTO '
+                                       'mydb.autogen.all_of_them FROM '
+                                       'mydb.autogen.one GROUP BY time(5m) END'
+                        }
+                    ]
+                }
+            ]
+        """
+        query_string = "SHOW CONTINUOUS QUERIES"
+        return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()]
+
+    def create_continuous_query(self, name, select, database=None,
+                                resample_opts=None):
+        r"""Create a continuous query for a database.
+
+        :param name: the name of continuous query to create
+        :type name: str
+        :param select: select statement for the continuous query
+        :type select: str
+        :param database: the database for which the continuous query is
+            created. Defaults to current client's database
+        :type database: str
+        :param resample_opts: resample options
+        :type resample_opts: str
+
+        :Example:
+
+        ::
+
+            >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \
+            ...                 'FROM "cpu" GROUP BY time(1m)'
+            >> client.create_continuous_query(
+            ...     'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m'
+            ... )
+            >> client.get_list_continuous_queries()
+            [
+                {
+                    'db_name': [
+                        {
+                            'name': 'cpu_mean',
+                            'query': 'CREATE CONTINUOUS QUERY "cpu_mean" '
+                                    'ON "db_name" '
+                                    'RESAMPLE EVERY 10s FOR 2m '
+                                    'BEGIN SELECT mean("value") '
+                                    'INTO "cpu_mean" FROM "cpu" '
+                                    'GROUP BY time(1m) END'
+                        }
+                    ]
+                }
+            ]
+        """
+        query_string = (
+            "CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END"
+        ).format(quote_ident(name), quote_ident(database or self._database),
+                 ' RESAMPLE ' + resample_opts if resample_opts else '', select)
+        self.query(query_string)
+
+    def drop_continuous_query(self, name, database=None):
+        """Drop an existing continuous query for a database.
+
+        :param name: the name of continuous query to drop
+        :type name: str
+        :param database: the database for which the continuous query is
+            dropped. Defaults to current client's database
+        :type database: str
+        """
+        query_string = (
+            "DROP CONTINUOUS QUERY {0} ON {1}"
+        ).format(quote_ident(name), quote_ident(database or self._database))
+        self.query(query_string)
+
     def send_packet(self, packet, protocol='json', time_precision=None):
         """Send an UDP packet.
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/influxdb/line_protocol.py 
new/influxdb-python-5.2.3/influxdb/line_protocol.py
--- old/influxdb-python-5.2.2/influxdb/line_protocol.py 2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/line_protocol.py 2019-08-26 
19:25:51.000000000 +0200
@@ -54,6 +54,8 @@
         ",", "\\,"
     ).replace(
         "=", "\\="
+    ).replace(
+        "\n", "\\n"
     )
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/influxdb/tests/client_test.py 
new/influxdb-python-5.2.3/influxdb/tests/client_test.py
--- old/influxdb-python-5.2.2/influxdb/tests/client_test.py     2019-03-14 
15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/tests/client_test.py     2019-08-26 
19:25:51.000000000 +0200
@@ -149,6 +149,14 @@
                                       **{'ssl': False})
         self.assertEqual('http://my.host.fr:1886', cli._baseurl)
 
+    def test_cert(self):
+        """Test mutual TLS authentication for TestInfluxDBClient object."""
+        cli = InfluxDBClient(ssl=True, cert='/etc/pki/tls/private/dummy.crt')
+        self.assertEqual(cli._session.cert, '/etc/pki/tls/private/dummy.crt')
+
+        with self.assertRaises(ValueError):
+            cli = InfluxDBClient(cert='/etc/pki/tls/private/dummy.crt')
+
     def test_switch_database(self):
         """Test switch database in TestInfluxDBClient object."""
         cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
@@ -337,6 +345,23 @@
                 m.last_request.body,
             )
 
+    def test_write_points_with_consistency(self):
+        """Test write points with consistency for TestInfluxDBClient object."""
+        with requests_mock.Mocker() as m:
+            m.register_uri(
+                requests_mock.POST,
+                'http://localhost:8086/write',
+                status_code=204
+            )
+
+            cli = InfluxDBClient(database='db')
+
+            cli.write_points(self.dummy_points, consistency='any')
+            self.assertEqual(
+                m.last_request.qs,
+                {'db': ['db'], 'consistency': ['any']}
+            )
+
     def test_write_points_with_precision_udp(self):
         """Test write points with precision for TestInfluxDBClient object."""
         s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -409,6 +434,15 @@
                 time_precision='g'
             )
 
+    def test_write_points_bad_consistency(self):
+        """Test write points w/bad consistency value."""
+        cli = InfluxDBClient()
+        with self.assertRaises(ValueError):
+            cli.write_points(
+                self.dummy_points,
+                consistency='boo'
+            )
+
     @raises(Exception)
     def test_write_points_with_precision_fails(self):
         """Test write points w/precision fail for TestInfluxDBClient object."""
@@ -1027,6 +1061,114 @@
         with _mocked_session(cli, 'get', 401):
             cli.get_list_privileges('test')
 
+    def test_get_list_continuous_queries(self):
+        """Test getting a list of continuous queries."""
+        data = {
+            "results": [
+                {
+                    "statement_id": 0,
+                    "series": [
+                        {
+                            "name": "testdb01",
+                            "columns": ["name", "query"],
+                            "values": [["testname01", "testquery01"],
+                                       ["testname02", "testquery02"]]
+                        },
+                        {
+                            "name": "testdb02",
+                            "columns": ["name", "query"],
+                            "values": [["testname03", "testquery03"]]
+                        },
+                        {
+                            "name": "testdb03",
+                            "columns": ["name", "query"]
+                        }
+                    ]
+                }
+            ]
+        }
+
+        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
+            self.assertListEqual(
+                self.cli.get_list_continuous_queries(),
+                [
+                    {
+                        'testdb01': [
+                            {'name': 'testname01', 'query': 'testquery01'},
+                            {'name': 'testname02', 'query': 'testquery02'}
+                        ]
+                    },
+                    {
+                        'testdb02': [
+                            {'name': 'testname03', 'query': 'testquery03'}
+                        ]
+                    },
+                    {
+                        'testdb03': []
+                    }
+                ]
+            )
+
+    @raises(Exception)
+    def test_get_list_continuous_queries_fails(self):
+        """Test failing to get a list of continuous queries."""
+        with _mocked_session(self.cli, 'get', 400):
+            self.cli.get_list_continuous_queries()
+
+    def test_create_continuous_query(self):
+        """Test continuous query creation."""
+        data = {"results": [{}]}
+        with requests_mock.Mocker() as m:
+            m.register_uri(
+                requests_mock.GET,
+                "http://localhost:8086/query";,
+                text=json.dumps(data)
+            )
+            query = 'SELECT count("value") INTO "6_months"."events" FROM ' \
+                    '"events" GROUP BY time(10m)'
+            self.cli.create_continuous_query('cq_name', query, 'db_name')
+            self.assertEqual(
+                m.last_request.qs['q'][0],
+                'create continuous query "cq_name" on "db_name" begin select '
+                'count("value") into "6_months"."events" from "events" group '
+                'by time(10m) end'
+            )
+            self.cli.create_continuous_query('cq_name', query, 'db_name',
+                                             'EVERY 10s FOR 2m')
+            self.assertEqual(
+                m.last_request.qs['q'][0],
+                'create continuous query "cq_name" on "db_name" resample '
+                'every 10s for 2m begin select count("value") into '
+                '"6_months"."events" from "events" group by time(10m) end'
+            )
+
+    @raises(Exception)
+    def test_create_continuous_query_fails(self):
+        """Test failing to create a continuous query."""
+        with _mocked_session(self.cli, 'get', 400):
+            self.cli.create_continuous_query('cq_name', 'select', 'db_name')
+
+    def test_drop_continuous_query(self):
+        """Test dropping a continuous query."""
+        data = {"results": [{}]}
+        with requests_mock.Mocker() as m:
+            m.register_uri(
+                requests_mock.GET,
+                "http://localhost:8086/query";,
+                text=json.dumps(data)
+            )
+            self.cli.drop_continuous_query('cq_name', 'db_name')
+            self.assertEqual(
+                m.last_request.qs['q'][0],
+                'drop continuous query "cq_name" on "db_name"'
+            )
+
+    @raises(Exception)
+    def test_drop_continuous_query_fails(self):
+        """Test failing to drop a continuous query."""
+        with _mocked_session(self.cli, 'get', 400):
+            self.cli.drop_continuous_query('cq_name', 'db_name')
+
     def test_invalid_port_fails(self):
         """Test invalid port fail for TestInfluxDBClient object."""
         with self.assertRaises(ValueError):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/influxdb-python-5.2.2/influxdb/tests/dataframe_client_test.py 
new/influxdb-python-5.2.3/influxdb/tests/dataframe_client_test.py
--- old/influxdb-python-5.2.2/influxdb/tests/dataframe_client_test.py   
2019-03-14 15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/tests/dataframe_client_test.py   
2019-08-26 19:25:51.000000000 +0200
@@ -22,6 +22,7 @@
     import pandas as pd
     from pandas.util.testing import assert_frame_equal
     from influxdb import DataFrameClient
+    import numpy
 
 
 @skip_if_pypy
@@ -388,6 +389,71 @@
 
             self.assertEqual(m.last_request.body, expected)
 
+    def test_write_points_from_dataframe_with_leading_none_column(self):
+        """write_points detect erroneous leading comma for null first field."""
+        dataframe = pd.DataFrame(
+            dict(
+                first=[1, None, None, 8, 9],
+                second=[2, None, None, None, 10],
+                third=[3, 4.1, None, None, 11],
+                first_tag=["one", None, None, "eight", None],
+                second_tag=["two", None, None, None, None],
+                third_tag=["three", "four", None, None, None],
+                comment=[
+                    "All columns filled",
+                    "First two of three empty",
+                    "All empty",
+                    "Last two of three empty",
+                    "Empty tags with values",
+                ]
+            ),
+            index=pd.date_range(
+                start=pd.to_datetime('2018-01-01'),
+                freq='1D',
+                periods=5,
+            )
+        )
+        expected = (
+            b'foo,first_tag=one,second_tag=two,third_tag=three'
+            b' comment="All columns filled",first=1.0,second=2.0,third=3.0'
+            b' 1514764800000000000\n'
+            b'foo,third_tag=four'
+            b' comment="First two of three empty",third=4.1'
+            b' 1514851200000000000\n'
+            b'foo comment="All empty" 1514937600000000000\n'
+            b'foo,first_tag=eight'
+            b' comment="Last two of three empty",first=8.0'
+            b' 1515024000000000000\n'
+            b'foo'
+            b' comment="Empty tags with values",first=9.0,second=10.0'
+            b',third=11.0'
+            b' 1515110400000000000\n'
+        )
+
+        with requests_mock.Mocker() as m:
+            m.register_uri(requests_mock.POST,
+                           "http://localhost:8086/write";,
+                           status_code=204)
+
+            cli = DataFrameClient(database='db')
+
+            colnames = [
+                "first_tag",
+                "second_tag",
+                "third_tag",
+                "comment",
+                "first",
+                "second",
+                "third"
+            ]
+            cli.write_points(dataframe.loc[:, colnames], 'foo',
+                             tag_columns=[
+                                 "first_tag",
+                                 "second_tag",
+                                 "third_tag"])
+
+            self.assertEqual(m.last_request.body, expected)
+
     def test_write_points_from_dataframe_with_numeric_precision(self):
         """Test write points from df with numeric precision."""
         now = pd.Timestamp('1970-01-01 00:00+00:00')
@@ -396,10 +462,16 @@
                                        ["2", 2, 2.2222222222222]],
                                  index=[now, now + timedelta(hours=1)])
 
-        expected_default_precision = (
-            b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
-            b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
-        )
+        if numpy.lib.NumpyVersion(numpy.__version__) <= '1.13.3':
+            expected_default_precision = (
+                b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
+                b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
+            )
+        else:
+            expected_default_precision = (
+                b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
+                b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 
3600000000000\n'  # noqa E501 line too long
+            )
 
         expected_specified_precision = (
             b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n'
@@ -419,6 +491,9 @@
             cli = DataFrameClient(database='db')
             cli.write_points(dataframe, "foo", {"hello": "there"})
 
+            print(expected_default_precision)
+            print(m.last_request.body)
+
             self.assertEqual(m.last_request.body, expected_default_precision)
 
             cli = DataFrameClient(database='db')
@@ -884,10 +959,11 @@
         expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}]
 
         cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
-        iql = "SELECT value FROM cpu_load_short WHERE region='us-west';"\
-            "SELECT count(value) FROM cpu_load_short WHERE region='us-west'"
+        iql = "SELECT value FROM cpu_load_short WHERE region=$region;"\
+            "SELECT count(value) FROM cpu_load_short WHERE region=$region"
+        bind_params = {'region': 'us-west'}
         with _mocked_session(cli, 'GET', 200, data):
-            result = cli.query(iql)
+            result = cli.query(iql, bind_params=bind_params)
             for r, e in zip(result, expected):
                 for k in e:
                     assert_frame_equal(e[k], r[k])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/influxdb-python-5.2.2/influxdb/tests/server_tests/client_test_with_server.py
 
new/influxdb-python-5.2.3/influxdb/tests/server_tests/client_test_with_server.py
--- 
old/influxdb-python-5.2.2/influxdb/tests/server_tests/client_test_with_server.py
    2019-03-14 15:19:55.000000000 +0100
+++ 
new/influxdb-python-5.2.3/influxdb/tests/server_tests/client_test_with_server.py
    2019-08-26 19:25:51.000000000 +0200
@@ -440,7 +440,9 @@
                               batch_size=2)
         time.sleep(5)
         net_in = self.cli.query("SELECT value FROM network "
-                                "WHERE direction='in'").raw
+                                "WHERE direction=$dir",
+                                bind_params={'dir': 'in'}
+                                ).raw
         net_out = self.cli.query("SELECT value FROM network "
                                  "WHERE direction='out'").raw
         cpu = self.cli.query("SELECT value FROM cpu_usage").raw
@@ -720,6 +722,36 @@
             rsp
         )
 
+    def test_create_continuous_query(self):
+        """Test continuous query creation."""
+        self.cli.create_retention_policy('some_rp', '1d', 1)
+        query = 'select count("value") into "some_rp"."events" from ' \
+                '"events" group by time(10m)'
+        self.cli.create_continuous_query('test_cq', query, 'db')
+        cqs = self.cli.get_list_continuous_queries()
+        expected_cqs = [
+            {
+                'db': [
+                    {
+                        'name': 'test_cq',
+                        'query': 'CREATE CONTINUOUS QUERY test_cq ON db '
+                                 'BEGIN SELECT count(value) INTO '
+                                 'db.some_rp.events FROM db.autogen.events '
+                                 'GROUP BY time(10m) END'
+                    }
+                ]
+            }
+        ]
+        self.assertEqual(cqs, expected_cqs)
+
+    def test_drop_continuous_query(self):
+        """Test continuous query drop."""
+        self.test_create_continuous_query()
+        self.cli.drop_continuous_query('test_cq', 'db')
+        cqs = self.cli.get_list_continuous_queries()
+        expected_cqs = [{'db': []}]
+        self.assertEqual(cqs, expected_cqs)
+
     def test_issue_143(self):
         """Test for PR#143 from repo."""
         pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/influxdb-python-5.2.2/influxdb/tests/test_line_protocol.py 
new/influxdb-python-5.2.3/influxdb/tests/test_line_protocol.py
--- old/influxdb-python-5.2.2/influxdb/tests/test_line_protocol.py      
2019-03-14 15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/influxdb/tests/test_line_protocol.py      
2019-08-26 19:25:51.000000000 +0200
@@ -115,6 +115,27 @@
             'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
         )
 
+    def test_tag_value_newline(self):
+        """Test make lines with tag value contains newline."""
+        data = {
+            "tags": {
+                "t1": "line1\nline2"
+            },
+            "points": [
+                {
+                    "measurement": "test",
+                    "fields": {
+                        "val": "hello"
+                    }
+                }
+            ]
+        }
+
+        self.assertEqual(
+            line_protocol.make_lines(data),
+            'test,t1=line1\\nline2 val="hello"\n'
+        )
+
     def test_quote_ident(self):
         """Test quote indentation in TestLineProtocol object."""
         self.assertEqual(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/setup.py 
new/influxdb-python-5.2.3/setup.py
--- old/influxdb-python-5.2.2/setup.py  2019-03-14 15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/setup.py  2019-08-26 19:25:51.000000000 +0200
@@ -42,7 +42,7 @@
     tests_require=test_requires,
     install_requires=requires,
     extras_require={'test': test_requires},
-    classifiers=(
+    classifiers=[
         'Development Status :: 3 - Alpha',
         'Intended Audience :: Developers',
         'License :: OSI Approved :: MIT License',
@@ -55,5 +55,5 @@
         'Programming Language :: Python :: 3.6',
         'Topic :: Software Development :: Libraries',
         'Topic :: Software Development :: Libraries :: Python Modules',
-    ),
+    ],
 )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/influxdb-python-5.2.2/tox.ini 
new/influxdb-python-5.2.3/tox.ini
--- old/influxdb-python-5.2.2/tox.ini   2019-03-14 15:19:55.000000000 +0100
+++ new/influxdb-python-5.2.3/tox.ini   2019-08-26 19:25:51.000000000 +0200
@@ -1,21 +1,28 @@
 [tox]
-envlist = py27, py35, py36, pypy, pypy3, flake8, pep257, coverage, docs
+envlist = py27, py35, py36, py37, pypy, pypy3, flake8, pep257, coverage, docs
 
 [testenv]
 passenv = INFLUXDB_PYTHON_INFLUXD_PATH
 setenv = INFLUXDB_PYTHON_SKIP_SERVER_TESTS=False
 deps = -r{toxinidir}/requirements.txt
        -r{toxinidir}/test-requirements.txt
-       py27,py34,py35,py36: pandas==0.20.1
-       py27,py34,py35,py36: numpy==1.13.3
+       py27: pandas==0.21.1
+       py27: numpy==1.13.3
+       py35: pandas==0.22.0
+       py35: numpy==1.14.6
+       py36: pandas==0.23.4
+       py36: numpy==1.15.4
+       py37: pandas==0.24.2
+       py37: numpy==1.16.2
 # Only install pandas with non-pypy interpreters
+# Testing all combinations would be too expensive
 commands = nosetests -v --with-doctest {posargs}
 
 [testenv:flake8]
 deps =
   flake8
   pep8-naming
-commands = flake8 --ignore=W503,W504,W605,N802,F821 influxdb
+commands = flake8 influxdb
 
 [testenv:pep257]
 deps = pydocstyle
@@ -26,19 +33,22 @@
        -r{toxinidir}/test-requirements.txt
        pandas
        coverage
-       numpy==1.13.3
+       numpy
 commands = nosetests -v --with-coverage --cover-html --cover-package=influxdb
 
 [testenv:docs]
 deps = -r{toxinidir}/requirements.txt
-       pandas==0.20.1
-       numpy==1.13.3
-       Sphinx==1.5.5
+       pandas==0.24.2
+       numpy==1.16.2
+       Sphinx==1.8.5
        sphinx_rtd_theme
 commands = sphinx-build -b html docs/source docs/build
 
 [flake8]
-ignore = N802,F821,E402
-# E402: module level import not at top of file
+ignore = W503,W504,W605,N802,F821,E402
+# W503: Line break occurred before a binary operator
+# W504: Line break occurred after a binary operator
+# W605: invalid escape sequence
 # N802: nosetests's setUp function
 # F821: False positive in intluxdb/dataframe_client.py
+# E402: module level import not at top of file


Reply via email to