Hello community,

here is the log from the commit of package python-pifpaf for openSUSE:Factory 
checked in at 2017-03-24 02:08:43
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pifpaf (Old)
 and      /work/SRC/openSUSE:Factory/.python-pifpaf.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-pifpaf"

Fri Mar 24 02:08:43 2017 rev:2 rq:456807 version:0.24.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-pifpaf/python-pifpaf.changes      
2016-09-01 14:03:19.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-pifpaf.new/python-pifpaf.changes 
2017-03-24 02:08:44.425704174 +0100
@@ -1,0 +2,50 @@
+Mon Feb 13 08:57:19 UTC 2017 - [email protected]
+
+update to version 0.24.1
+  * doc: rabbitmq cluster is supported
+  * tests: allow to find zookeeper bins
+  * Make travis-ci happy again
+  * ceph: increase rados object name limit for ext4 fs
+  * gnocchi: start statsd daemon
+  * gnocchi: Set statd old vars
+  * tests: Fix a python3 bug
+  * rabbit: put password for all hosts in the url
+  * Set storage engine to mmapv1 when wiredtiger is not present
+  * correctly decode application stdout
+  * Revert "gnocchi: set auth_mode = noauth"
+  * Add MongoDB to README
+  * tests: Don't run them in //
+  * gnocchi: allow to run with an existing storage URL
+  * aodh: auth with basic against Gnocchi
+  * Revert "mysql: fix timezone"
+  * travis: fix mysql install
+  * travis: remove tarball from cache
+  * ceph: last jewel point release have lowered object max len
+  * Clean variables on exit
+  * gnocchi: fix statds waiting line for gnocchi <3.0
+  * Revert "Partial revert for debug output"
+  * mysql: fix timezone
+  * gnocchi: set auth_mode = noauth
+  * tests: fix env variable parsing
+  * rabbit: use process group
+  * Set a proper PS1 when invoked via eval
+  * Gnocchi >= 3.1 will not ship any config by default
+  * rabbitmq: fix waiting line
+  * gnocchi: Add support to s3 backend
+  * ceph: use shm and try to not really write data
+  * Update influxdb.py
+  * gnocchi: Reduce the metricd delays
+  * gnocchi: allow to run with an existing indexer URL
+  * tests: Capture logging and print it on failure
+  * ceph: don't use /dev/shm
+  * Bump hacking to 0.12
+  * Implements the Kafka driver
+  * Fix mysql gate
+  * Fix a typo in README: gobal -> global
+  * Add support for Python 3.6
+  * cli: fully segment all environ vars
+  * aodh: fix authentication disabling with newer Aodh
+  * tests: fix s3rver tests
+  * gnocchi: stop setting old statsd conf var
+
+-------------------------------------------------------------------

Old:
----
  pifpaf-0.12.0.tar.gz

New:
----
  pifpaf-0.24.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-pifpaf.spec ++++++
--- /var/tmp/diff_new_pack.5kiEMb/_old  2017-03-24 02:08:44.901636799 +0100
+++ /var/tmp/diff_new_pack.5kiEMb/_new  2017-03-24 02:08:44.905636233 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-pifpaf
 #
-# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,7 +17,7 @@
 
 
 Name:           python-pifpaf
-Version:        0.12.0
+Version:        0.24.1
 Release:        0
 Summary:        Suite of tools and fixtures to manage daemons for testing
 License:        Apache-2.0

++++++ pifpaf-0.12.0.tar.gz -> pifpaf-0.24.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/.testr.conf 
new/pifpaf-0.24.1/.testr.conf
--- old/pifpaf-0.12.0/.testr.conf       2016-03-21 14:39:34.000000000 +0100
+++ new/pifpaf-0.24.1/.testr.conf       2016-11-09 14:41:24.000000000 +0100
@@ -2,3 +2,4 @@
 test_command=${PYTHON:-python} -m subunit.run discover -t . pifpaf/tests 
$LISTOPT $IDOPTION
 test_id_option=--load-list $IDFILE
 test_list_option=--list
+group_regex=pifpaf\.tests\.test_drivers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/.travis.yml 
new/pifpaf-0.24.1/.travis.yml
--- old/pifpaf-0.12.0/.travis.yml       2016-08-09 14:29:37.000000000 +0200
+++ new/pifpaf-0.24.1/.travis.yml       2017-02-03 12:11:38.000000000 +0100
@@ -1,34 +1,29 @@
 language: python
-dist: trusty
+dist: xenial
 sudo: required
-cache: 
-  - apt
+cache:
   - pip
 python:
     - 2.7
     - 3.4
     - 3.5
-addons:
-  apt:
-    packages:
-    - mongodb-server
-    - mysql-server
-    - rabbitmq-server
-    - redis-server
-    - zookeeper
-    - mongodb
-    - couchdb
-    - couchdb-bin
-    - npm
+    - 3.6
 before_install:
-  - curl 'https://git.ceph.com/?p=ceph.git;a=blob_plain;f=keys/release.asc' | 
sudo apt-key add -
-  - echo deb http://download.ceph.com/debian-hammer/ $(lsb_release -sc) main | 
sudo tee /etc/apt/sources.list.d/ceph.list
-  - sudo apt-get update -yq
-  - sudo apt-get install -yq ceph
-  - sudo gem install fakes3
+  # Always redownload tarball
+  - find ~/.cache/pip -name '*.dev*' -delete
+  - sudo apt-get -qq update
+  - sudo apt-get purge -y mysql-server-5.6 mysql-server-core-5.6 
mysql-client-core-5.6 mysql-client-5.6
+  - sudo rm -rf /var/lib/mysql
+  - sudo apt-get install -y mongodb-server mysql-server-5.5 rabbitmq-server 
redis-server zookeeper mongodb couchdb couchdb-bin npm ceph librados-dev 
python-dev gcc
+  # - sudo gem install fakes3  # NOTE(sileht): fakes3 looks not installed 
correctly
   - sudo npm install s3rver -g
   - wget https://dl.influxdata.com/influxdb/releases/influxdb_0.13.0_amd64.deb
   - sudo dpkg -i influxdb_0.13.0_amd64.deb
+  # zkEnv.sh can't be overriden with the deb version of zookeeper, this 
workaround that
+  - sudo chmod 777 /var/log/zookeeper
+  - wget http://apache.crihan.fr/dist/kafka/0.10.1.0/kafka_2.11-0.10.1.0.tgz 
-O /opt/kafka.tar.gz
+  - tar -xzf /opt/kafka.tar.gz -C /opt
+  - ln -s /opt/kafka_2.11-0.10.1.0 /opt/kafka
 install:
     # The install requirements in travis virtualenv that will be cached
   - pip install tox-travis .[test]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/AUTHORS new/pifpaf-0.24.1/AUTHORS
--- old/pifpaf-0.12.0/AUTHORS   2016-08-09 14:30:25.000000000 +0200
+++ new/pifpaf-0.24.1/AUTHORS   2017-02-03 13:52:27.000000000 +0100
@@ -1,6 +1,8 @@
+Elancheran Subramanian <[email protected]>
 Felix Yan <[email protected]>
 Joshua Harlow <[email protected]>
 Julien Danjou <[email protected]>
 Mehdi ABAAKOUK <[email protected]>
 Mehdi Abaakouk <[email protected]>
 Rémy HUBSCHER <[email protected]>
+Sam Morrison <[email protected]>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/ChangeLog new/pifpaf-0.24.1/ChangeLog
--- old/pifpaf-0.12.0/ChangeLog 2016-08-09 14:30:25.000000000 +0200
+++ new/pifpaf-0.24.1/ChangeLog 2017-02-03 13:52:27.000000000 +0100
@@ -1,6 +1,111 @@
 CHANGES
 =======
 
+0.24.1
+------
+
+* gnocchi: Set statd old vars
+
+0.24.0
+------
+
+* cli: fully segment all environ vars
+* gnocchi: Add support to s3 backend
+* tests: Fix a python3 bug
+* travis: remove tarball from cache
+* gnocchi: Reduce the metricd delays
+* tests: fix env variable parsing
+* Set a proper PS1 when invoked via eval
+* Clean variables on exit
+* gnocchi: stop setting old statsd conf var
+
+0.23.0
+------
+
+* aodh: auth with basic against Gnocchi
+* Revert "gnocchi: set auth_mode = noauth"
+
+0.22.0
+------
+
+* gnocchi: set auth_mode = noauth
+
+0.21.0
+------
+
+* Add support for Python 3.6
+* aodh: fix authentication disabling with newer Aodh
+
+0.20.0
+------
+
+* Revert "mysql: fix timezone"
+
+0.19.0
+------
+
+* mysql: fix timezone
+
+0.18.1
+------
+
+* Gnocchi >= 3.1 will not ship any config by default
+
+0.18.0
+------
+
+* rabbit: use process group
+* rabbit: put password for all hosts in the url
+* Fix mysql gate
+* travis: fix mysql install
+
+0.17.0
+------
+
+* Implements the Kafka driver
+
+0.16.0
+------
+
+* correctly decode application stdout
+
+0.15.1
+------
+
+* doc: rabbitmq cluster is supported
+* gnocchi: fix statds waiting line for gnocchi <3.0
+
+0.15.0
+------
+
+* tests: allow to find zookeeper bins
+* tests: fix s3rver tests
+* tests: Don't run them in //
+* ceph: don't use /dev/shm
+* tests: Capture logging and print it on failure
+* rabbitmq: fix waiting line
+* ceph: last jewel point release have lowered object max len
+* Revert "Partial revert for debug output"
+* Make travis-ci happy again
+
+0.14.0
+------
+
+* Bump hacking to 0.12
+* Update influxdb.py
+* gnocchi: start statsd daemon
+* ceph: use shm and try to not really write data
+
+0.13.0
+------
+
+* gnocchi: allow to run with an existing storage URL
+* gnocchi: allow to run with an existing indexer URL
+* ceph: increase rados object name limit for ext4 fs
+* Fix a typo in README: gobal -> global
+* Add MongoDB to README
+* Set storage engine to mmapv1 when wiredtiger is not present
+
 0.12.0
 ------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/PKG-INFO new/pifpaf-0.24.1/PKG-INFO
--- old/pifpaf-0.12.0/PKG-INFO  2016-08-09 14:30:30.000000000 +0200
+++ new/pifpaf-0.24.1/PKG-INFO  2017-02-03 13:52:29.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: pifpaf
-Version: 0.12.0
+Version: 0.24.1
 Summary: Suite of tools and fixtures to manage daemons for testing
 Home-page: https://github.com/jd/pifpaf
 Author: Julien Danjou
@@ -41,12 +41,13 @@
         * `Gnocchi`_
         * `Aodh`_
         * `Ceph`_
-        * `RabbitMQ`_
+        * `RabbitMQ`_ (with clustering)
         * `FakeS3`_
         * `Consul`_
         * `Keystone`_
         * `CouchDB`_
         * `S3rver`_
+        * `MongoDB`_
         
         .. _Consul: https://www.consul.io/
         .. _PostgreSQL: http://postgresql.org
@@ -65,6 +66,7 @@
         .. _Keystone: https://launchpad.net/keystone
         .. _CouchDB: http://couchdb.apache.org/
         .. _S3rver: https://www.npmjs.com/package/s3rver
+        .. _MongoDB: https://www.mongodb.com
         
         Usage
         =====
@@ -145,7 +147,7 @@
         
         The `PIFPAF_URLS` environment variable will contain the list of all 
URLs
         detected and set-up by Pifpaf. You can override this variable name 
with the
-        `--gobal-urls-variable` option.
+        `--global-urls-variable` option.
         
         How it works under the hood
         ===========================
@@ -171,4 +173,5 @@
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Topic :: Software Development :: Testing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/README.rst new/pifpaf-0.24.1/README.rst
--- old/pifpaf-0.12.0/README.rst        2016-08-09 14:29:37.000000000 +0200
+++ new/pifpaf-0.24.1/README.rst        2016-11-10 10:26:12.000000000 +0100
@@ -33,12 +33,13 @@
 * `Gnocchi`_
 * `Aodh`_
 * `Ceph`_
-* `RabbitMQ`_
+* `RabbitMQ`_ (with clustering)
 * `FakeS3`_
 * `Consul`_
 * `Keystone`_
 * `CouchDB`_
 * `S3rver`_
+* `MongoDB`_
 
 .. _Consul: https://www.consul.io/
 .. _PostgreSQL: http://postgresql.org
@@ -57,6 +58,7 @@
 .. _Keystone: https://launchpad.net/keystone
 .. _CouchDB: http://couchdb.apache.org/
 .. _S3rver: https://www.npmjs.com/package/s3rver
+.. _MongoDB: https://www.mongodb.com
 
 Usage
 =====
@@ -137,7 +139,7 @@
 
 The `PIFPAF_URLS` environment variable will contain the list of all URLs
 detected and set-up by Pifpaf. You can override this variable name with the
-`--gobal-urls-variable` option.
+`--global-urls-variable` option.
 
 How it works under the hood
 ===========================
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/__main__.py 
new/pifpaf-0.24.1/pifpaf/__main__.py
--- old/pifpaf-0.12.0/pifpaf/__main__.py        2016-08-09 14:04:59.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf/__main__.py        2017-02-03 12:11:38.000000000 
+0100
@@ -148,24 +148,38 @@
                     signal.signal(signal.SIGPIPE, signal.SIG_IGN)
                     signal.pause()
                 else:
+                    url = driver.env['%s_URL' % driver.env_prefix]
+                    driver.env.update({
+                        "PIFPAF_PID": pid,
+                        self.app.options.env_prefix + "_PID": pid,
+                        self.app.options.env_prefix + "_DAEMON": daemon,
+                        (self.app.options.env_prefix + "_"
+                         + daemon.upper() + "_URL"): url,
+                        self.app.options.global_urls_variable:
+                        self.expand_urls_var(url),
+                        "%s_OLD_PS1" % self.app.options.env_prefix:
+                        os.getenv("PS1", ""),
+                        "PS1":
+                        "(pifpaf/" + daemon + ") " + os.getenv("PS1", ""),
+                    })
                     for k, v in six.iteritems(driver.env):
                         print("export %s=\"%s\";" % (k, v))
-                    print("export PIFPAF_PID=%d;" % pid)
-                    print("export %s_DAEMON=\"%s\";"
-                          % (self.app.options.env_prefix, daemon))
-                    url = driver.env['%s_URL' % driver.env_prefix]
-                    print("export %s_%s_URL=\"%s\";"
-                          % (self.app.options.env_prefix,
-                             daemon.upper(),
-                             url))
-                    print("export %s=\"%s\";"
-                          % (self.app.options.global_urls_variable,
-                             self.expand_urls_var(url)))
-                    print("pifpaf_stop () "
-                          "{ if test -z \"$PIFPAF_PID\"; then "
-                          "echo 'No PID found in $PIFPAF_PID'; return -1; fi; "
-                          "if kill $PIFPAF_PID; then "
-                          "unset PIFPAF_PID; unset -f pifpaf_stop; fi; }")
+                    print("%(prefix_lower)s_stop () { "
+                          "if test -z \"$%(prefix)s_PID\"; then "
+                          "echo 'No PID found in $%(prefix)s_PID'; return -1; "
+                          "fi; "
+                          "if kill $%(prefix)s_PID; then "
+                          "_PS1=$%(prefix)s_OLD_PS1; "
+                          "unset %(vars)s; "
+                          "PS1=$_PS1; unset _PS1; "
+                          "unset -f %(prefix_lower)s_stop; "
+                          "unalias pifpaf_stop 2>/dev/null || true; "
+                          "fi; } ; "
+                          "alias pifpaf_stop=%(prefix_lower)s_stop ; "
+                          % {"prefix": self.app.options.env_prefix,
+                             "prefix_lower":
+                             self.app.options.env_prefix.lower(),
+                             "vars": " ".join(driver.env)})
         run = take_action
 
     RunDaemon.__doc__ = "run %s" % daemon
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/__init__.py 
new/pifpaf-0.24.1/pifpaf/drivers/__init__.py
--- old/pifpaf-0.12.0/pifpaf/drivers/__init__.py        2016-08-03 
09:11:30.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/__init__.py        2017-01-13 
16:19:04.000000000 +0100
@@ -11,15 +11,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from distutils import spawn
 import logging
 import os
 import re
+import select
 import signal
 import subprocess
 import sys
 import threading
 
 import fixtures
+import six
+
+if six.PY3:
+    fsdecode = os.fsdecode
+else:
+    def fsdecode(s):
+        if isinstance(s, unicode):
+            return s
+        return s.decode(sys.getfilesystemencoding())
 
 
 LOG = logging.getLogger(__name__)
@@ -54,6 +65,14 @@
         self._kill(pid)
 
     @staticmethod
+    def find_executable(filename, extra_paths):
+        paths = extra_paths + os.getenv('PATH', os.defpath).split(os.pathsep)
+        for path in paths:
+            loc = spawn.find_executable(filename, path)
+            if loc is not None:
+                return loc
+
+    @staticmethod
     def find_config_file(filename):
         # NOTE(sileht): order matter, we first check into virtualenv
         # then global user installation, next system installation,
@@ -77,16 +96,21 @@
 
     @staticmethod
     def _log_output(appname, pid, data):
+        data = fsdecode(data)
         LOG.debug("%s[%d] output: %s", appname, pid, data.rstrip())
 
     def _exec(self, command, stdout=False, ignore_failure=False,
-              stdin=None, wait_for_line=None, path=[], env=None):
+              stdin=None, wait_for_line=None, path=[], env=None,
+              forbidden_line_after_start=None,
+              allow_debug=True, session=False):
         LOG.debug("executing: %s" % command)
 
         complete_env = {}
         app = command[0]
 
-        if stdout or wait_for_line:
+        debug = allow_debug and LOG.isEnabledFor(logging.DEBUG)
+
+        if stdout or wait_for_line or debug:
             stdout_fd = subprocess.PIPE
         else:
             # TODO(jd) Need to close at some point
@@ -112,7 +136,9 @@
                 stdin=stdin_fd,
                 stdout=stdout_fd,
                 stderr=subprocess.STDOUT,
-                env=complete_env or None)
+                env=complete_env or None,
+                preexec_fn=os.setsid if session else None
+            )
         except OSError as e:
             raise RuntimeError(
                 "Unable to run command `%s': %s" % (" ".join(command), e))
@@ -134,16 +160,39 @@
                             "Program did not print: `%s'\nOutput: %s"
                             % (wait_for_line, b"".join(lines)))
                     break
-                if wait_for_line and re.search(wait_for_line, line.decode()):
+                decoded_line = fsdecode(line)
+
+                if wait_for_line and re.search(wait_for_line,
+                                               decoded_line):
                     break
+            stdout_str = b"".join(lines)
+        else:
+            stdout_str = None
+
+        if (stdout or wait_for_line) and forbidden_line_after_start:
+            timeout, forbidden_output = forbidden_line_after_start
+            r, w, x = select.select([c.stdout.fileno()], [], [], timeout)
+            if r:
+                line = c.stdout.readline()
+                self._log_output(app, c.pid, line)
+                lines.append(line)
+                if c.poll() is not None:
+                    # Read the rest if the process is dead, this help debugging
+                    while line:
+                        line = c.stdout.readline()
+                        self._log_output(app, c.pid, line)
+                        lines.append(line)
+                if line and re.search(forbidden_output, fsdecode(line)):
+                    raise RuntimeError(
+                        "Program print a forbidden line: `%s'\nOutput: %s"
+                        % (forbidden_output, b"".join(lines)))
+
+        if stdout or wait_for_line or debug:
             # Continue to read
             t = threading.Thread(target=self._read_in_bg,
                                  args=(app, c.pid, c.stdout,))
             t.setDaemon(True)
             t.start()
-            stdout_str = b"".join(lines)
-        else:
-            stdout_str = None
 
         if not wait_for_line:
             status = c.wait()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/aodh.py 
new/pifpaf-0.24.1/pifpaf/drivers/aodh.py
--- old/pifpaf-0.12.0/pifpaf/drivers/aodh.py    2016-07-19 11:51:43.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/aodh.py    2017-01-21 10:50:30.000000000 
+0100
@@ -12,8 +12,6 @@
 # limitations under the License.
 
 import os
-import shutil
-import uuid
 
 from pifpaf import drivers
 from pifpaf.drivers import gnocchi
@@ -70,17 +68,6 @@
     def _setUp(self):
         super(AodhDriver, self)._setUp()
 
-        with open(self.find_config_file("aodh/api_paste.ini"), "r") as src:
-            with open(os.path.join(self.tempdir, "api_paste.ini"), "w") as dst:
-                for line in src.readlines():
-                    if line.startswith("pipeline = "):
-                        dst.write("pipeline = request_id api-server")
-                    else:
-                        dst.write(line)
-
-        shutil.copy(self.find_config_file("aodh/policy.json"),
-                    self.tempdir)
-
         pg = self.useFixture(
             postgresql.PostgreSQLDriver(port=self.database_port))
 
@@ -94,18 +81,15 @@
 
         conffile = os.path.join(self.tempdir, "aodh.conf")
 
-        user = str(uuid.uuid4())
-        project = str(uuid.uuid4())
-
         with open(conffile, "w") as f:
             f.write("""[database]
 connection = %s
+[api]
+auth_mode=
 [service_credentials]
-auth_type = gnocchi-noauth
-user_id = %s
-project_id = %s
-roles = admin
-endpoint = %s""" % (pg.url, user, project, g.http_url))
+auth_type = gnocchi-basic
+user = admin
+endpoint = %s""" % (pg.url, g.http_url))
 
         self._exec(["aodh-dbsync", "--config-file=%s" % conffile])
 
@@ -120,8 +104,7 @@
         self.addCleanup(self._kill, c.pid)
 
         self.putenv("AODH_PORT", str(self.port))
-        self.putenv("AODH_GNOCCHI_USER_ID", user)
-        self.putenv("AODH_GNOCCHI_PROJECT_ID", project)
+        self.putenv("AODH_GNOCCHI_USER", "admin")
         self.putenv("URL", "aodh://localhost:%d" % self.port)
         url = "http://localhost:%d"; % self.port
         self.putenv("AODH_HTTP_URL", url)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/ceph.py 
new/pifpaf-0.24.1/pifpaf/drivers/ceph.py
--- old/pifpaf-0.12.0/pifpaf/drivers/ceph.py    2016-07-04 23:22:12.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/ceph.py    2016-11-09 14:41:24.000000000 
+0100
@@ -61,6 +61,12 @@
         os.makedirs(mondir)
         os.makedirs(osddir)
 
+        # FIXME(sileht): check availible space on /dev/shm
+        # if os.path.exists("/dev/shm") and os.access('/dev/shm', os.W_OK):
+        #     journal_path = "/dev/shm/$cluster-$id-journal"
+        # else:
+        journal_path = "%s/osd/$cluster-$id/journal" % self.tempdir
+
         with open(conffile, "w") as f:
             f.write("""[global]
 fsid = %(fsid)s
@@ -81,7 +87,7 @@
 admin socket = %(tempdir)s/$cluster-$name.asok
 mon data = %(tempdir)s/mon/$cluster-$id
 osd data = %(tempdir)s/osd/$cluster-$id
-osd journal = %(tempdir)s/osd/$cluster-$id/journal
+osd journal = %(journal_path)s
 log file = %(tempdir)s/$cluster-$name.log
 mon cluster log file = %(tempdir)s/$cluster.log
 
@@ -89,7 +95,11 @@
 filestore xattr use omap = True
 
 # workaround for ext4 and last Jewel version
-osd max object name len = 64
+osd max object name len = 256
+osd max object namespace len = 64
+osd op threads = 10
+filestore max sync interval = 10001
+filestore min sync interval = 10000
 
 # Don't fail until it's really full
 mon_osd_nearfull_ratio = 1
@@ -108,7 +118,7 @@
 [mon.a]
 host = localhost
 mon addr = 127.0.0.1:%(port)d
-""" % dict(fsid=fsid, tempdir=self.tempdir, port=self.port))
+""" % dict(fsid=fsid, tempdir=self.tempdir, port=self.port, 
journal_path=journal_path))  # noqa
 
         ceph_opts = ["ceph", "-c", conffile]
         mon_opts = ["ceph-mon", "-c", conffile, "--id", "a", "-d"]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/gnocchi.py 
new/pifpaf-0.24.1/pifpaf/drivers/gnocchi.py
--- old/pifpaf-0.12.0/pifpaf/drivers/gnocchi.py 2016-08-03 09:11:30.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/gnocchi.py 2017-02-03 13:52:03.000000000 
+0100
@@ -13,6 +13,9 @@
 
 import os
 import shutil
+import uuid
+
+import six.moves.urllib.parse as urlparse
 
 from pifpaf import drivers
 from pifpaf.drivers import postgresql
@@ -24,11 +27,17 @@
     DEFAULT_PORT_INDEXER = 9541
 
     def __init__(self, port=DEFAULT_PORT, indexer_port=DEFAULT_PORT_INDEXER,
+                 statsd_port=None,
                  create_legacy_resource_types=False,
+                 indexer_url=None,
+                 storage_url=None,
                  **kwargs):
         super(GnocchiDriver, self).__init__(**kwargs)
         self.port = port
         self.indexer_port = indexer_port
+        self.indexer_url = indexer_url
+        self.storage_url = storage_url
+        self.statsd_port = statsd_port
         self.create_legacy_resource_types = create_legacy_resource_types
 
     @classmethod
@@ -36,7 +45,10 @@
         parser.add_argument("--port",
                             type=int,
                             default=cls.DEFAULT_PORT,
-                            help="port to use for Gnocchi")
+                            help="port to use for Gnocchi HTTP API")
+        parser.add_argument("--statsd-port",
+                            type=int,
+                            help="port to use for gnocchi-statsd")
         parser.add_argument("--indexer-port",
                             type=int,
                             default=cls.DEFAULT_PORT_INDEXER,
@@ -45,27 +57,85 @@
                             action='store_true',
                             default=False,
                             help="create legacy Ceilometer resource types")
+        parser.add_argument("--indexer-url", help="indexer URL to use")
+        parser.add_argument("--storage-url", help="storage URL to use")
         return parser
 
     def _setUp(self):
         super(GnocchiDriver, self)._setUp()
 
-        shutil.copy(self.find_config_file("gnocchi/api-paste.ini"),
-                    self.tempdir)
-        shutil.copy(self.find_config_file("gnocchi/policy.json"),
-                    self.tempdir)
+        try:
+            shutil.copy(self.find_config_file("gnocchi/api-paste.ini"),
+                        self.tempdir)
+        except RuntimeError:
+            pass
+        try:
+            shutil.copy(self.find_config_file("gnocchi/policy.json"),
+                        self.tempdir)
+        except RuntimeError:
+            pass
+
+        if self.indexer_url is None:
+            pg = self.useFixture(
+                postgresql.PostgreSQLDriver(port=self.indexer_port))
+            self.indexer_url = pg.url
 
-        pg = self.useFixture(
-            postgresql.PostgreSQLDriver(port=self.indexer_port))
+        if self.storage_url is None:
+            self.storage_url = "file://%s" % self.tempdir
 
         conffile = os.path.join(self.tempdir, "gnocchi.conf")
 
+        storage_parsed = urlparse.urlparse(self.storage_url)
+        storage_driver = storage_parsed.scheme
+
+        if storage_driver == "s3":
+            storage_config = {
+                "s3_access_key_id": (storage_parsed.username
+                                     or "gnocchi"),
+                "s3_secret_access_key": (storage_parsed.password
+                                         or "whatever"),
+                "s3_endpoint_url": "http://%s:%s/%s"; % (
+                    storage_parsed.hostname,
+                    storage_parsed.port,
+                    storage_parsed.path,
+                )
+            }
+        elif storage_driver == "ceph":
+            storage_config = {
+                "ceph_conffile": storage_parsed.path,
+            }
+        elif storage_driver == "file":
+            storage_config = {
+                "file_basepath": (storage_parsed.path
+                                  or self.tempdir),
+            }
+        else:
+            raise RuntimeError("Storage driver %s is not supported" %
+                               storage_driver)
+
+        storage_config_string = "\n".join(
+            "%s = %s" % (k, v)
+            for k, v in storage_config.items()
+        )
+        statsd_resource_id = str(uuid.uuid4())
+
         with open(conffile, "w") as f:
             f.write("""[storage]
-file_basepath = %s
-driver = file
+driver = %s
+%s
+[metricd]
+metric_processing_delay = 1
+metric_cleanup_delay = 1
+[statsd]
+resource_id = %s
+creator = admin
+user_id = admin
+project_id = admin
 [indexer]
-url = %s""" % (self.tempdir, pg.url))
+url = %s""" % (storage_driver,
+               storage_config_string,
+               statsd_resource_id,
+               self.indexer_url))
 
         gnocchi_upgrade = ["gnocchi-upgrade", "--config-file=%s" % conffile]
         if self.create_legacy_resource_types:
@@ -76,6 +146,11 @@
                           wait_for_line="metrics wait to be processed")
         self.addCleanup(self._kill, c.pid)
 
+        c, _ = self._exec(["gnocchi-statsd", "--config-file=%s" % conffile],
+                          wait_for_line=("(Resource .* already exists"
+                                         "|Created resource )"))
+        self.addCleanup(self._kill, c.pid)
+
         c, _ = self._exec(
             ["gnocchi-api", "--port", str(self.port),
              "--", "--config-file=%s" % conffile],
@@ -89,5 +164,6 @@
         self.putenv("GNOCCHI_HTTP_URL", self.http_url)
         self.putenv("GNOCCHI_ENDPOINT", self.http_url, True)
         self.putenv("OS_AUTH_TYPE", "gnocchi-noauth", True)
+        self.putenv("GNOCCHI_STATSD_RESOURCE_ID", statsd_resource_id, True)
         self.putenv("GNOCCHI_USER_ID", "admin", True)
         self.putenv("GNOCCHI_PROJECT_ID", "admin", True)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/influxdb.py 
new/pifpaf-0.24.1/pifpaf/drivers/influxdb.py
--- old/pifpaf-0.12.0/pifpaf/drivers/influxdb.py        2016-07-04 
23:22:16.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/influxdb.py        2016-11-07 
23:25:33.000000000 +0100
@@ -51,6 +51,7 @@
             cfg.write("""[meta]
    dir = "%(tempdir)s/meta"
    bind-address = ":51233"
+   http-bind-address = ":51232"
 [admin]
   enabled = false
 [data]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/kafka.py 
new/pifpaf-0.24.1/pifpaf/drivers/kafka.py
--- old/pifpaf-0.12.0/pifpaf/drivers/kafka.py   1970-01-01 01:00:00.000000000 
+0100
+++ new/pifpaf-0.24.1/pifpaf/drivers/kafka.py   2016-12-06 16:20:04.000000000 
+0100
@@ -0,0 +1,97 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pifpaf import drivers
+
+
+class KafkaDriver(drivers.Driver):
+    DEFAULT_KAFKA_PORT = 9092
+    DEFAULT_ZOOKEEPER_PORT = 2181
+    DEFAULT_PATH = ["/opt/kafka/bin",
+                    "/usr/local/opt/kafka/bin"]
+
+    def __init__(self, port=DEFAULT_KAFKA_PORT,
+                 zookeeper_port=DEFAULT_ZOOKEEPER_PORT,
+                 **kwargs):
+        super(KafkaDriver, self).__init__(**kwargs)
+        self.port = port
+        self.zookeeper_port = zookeeper_port
+
+    def _setUp(self):
+        super(KafkaDriver, self)._setUp()
+
+        suffix = ".sh"
+        if self.find_executable("zookeeper-server-start", self.DEFAULT_PATH):
+            suffix = ""
+
+        # This is use explicitly byu kafka AND implicitly by zookeeper
+        logdir = os.path.join(self.tempdir, "log")
+        os.makedirs(logdir)
+
+        zookeeper_conf = os.path.join(self.tempdir, "zookeeper.properties")
+        kafka_conf = os.path.join(self.tempdir, "kafka.properties")
+
+        with open(zookeeper_conf, "w") as f:
+            f.write("""
+dataDir=%s
+clientPort=%s
+maxClientCnxns=0
+""" % (self.tempdir, self.zookeeper_port))
+
+        with open(kafka_conf, "w") as f:
+            f.write("""
+port=%d
+broker.id=0
+host.name=127.0.0.1
+advertised.host.name=127.0.0.1
+listeners=PLAINTEXT://localhost:%d
+num.network.threads=3
+num.io.threads=8
+socket.send.buffer.bytes=102400
+socket.receive.buffer.bytes=102400
+socket.request.max.bytes=104857600
+log.dirs=%s
+num.partitions=1
+num.recovery.threads.per.data.dir=1
+log.retention.hours=168
+log.segment.bytes=1073741824
+log.retention.check.interval.ms=300000
+zookeeper.connect=localhost:%d
+zookeeper.connection.timeout.ms=6000
+""" % (self.port, self.port, logdir, self.zookeeper_port))
+
+        # NOTE(sileht): The wait_for_line is the best we can do
+        # but we error can occur after the last line we see when it works...
+        env = {"LOG_DIR": logdir}
+        self._exec(['zookeeper-server-start%s' % suffix, zookeeper_conf],
+                   wait_for_line='binding to port .*:%s' % self.zookeeper_port,
+                   path=self.DEFAULT_PATH, env=env,
+                   forbidden_line_after_start=(2, "Unexpected exception"))
+        # We ignore failure because stop script kill all zookeeper pids
+        # (even the system one)
+        self.addCleanup(self._exec, ['zookeeper-server-stop%s' % suffix],
+                        path=self.DEFAULT_PATH, env=env, ignore_failure=True)
+
+        self._exec(['kafka-server-start%s' % suffix, kafka_conf],
+                   wait_for_line='Kafka Server 0.*started',
+                   path=self.DEFAULT_PATH, env=env,
+                   forbidden_line_after_start=(2,
+                                               "kafka.common.KafkaException"))
+        self.addCleanup(self._exec, ['kafka-server-stop%s' % suffix],
+                        path=self.DEFAULT_PATH, env=env, ignore_failure=True)
+
+        self.putenv("KAFKA_PORT", str(self.port))
+        self.putenv("KAFKA_PROTOCOL", "PLAINTEXT")
+        self.putenv("KAFKA_URL", "PLAINTEXT://localhost:%s" % self.port)
+        self.putenv("URL", "kafka://localhost:%s" % self.port)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/mongodb.py 
new/pifpaf-0.24.1/pifpaf/drivers/mongodb.py
--- old/pifpaf-0.12.0/pifpaf/drivers/mongodb.py 2016-07-04 23:22:12.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/mongodb.py 2016-09-12 14:25:41.000000000 
+0200
@@ -33,6 +33,16 @@
     def _setUp(self):
         super(MongoDBDriver, self)._setUp()
 
+        c, output = self._exec(["mongod", "--help"], stdout=True)
+
+        # We need to specify the storage engine if --storageEngine is present \
+        # but WiredTiger isn't.
+        if b"WiredTiger options:" not in output and \
+           b"--storageEngine" in output:
+            storage_engine = ["--storageEngine", "mmapv1"]
+        else:
+            storage_engine = []
+
         c, _ = self._exec(
             ["mongod",
              "--nojournal",
@@ -43,7 +53,7 @@
              "--port", str(self.port),
              "--dbpath", self.tempdir,
              "--bind_ip", "localhost",
-             "--config", "/dev/null"],
+             "--config", "/dev/null"] + storage_engine,
             wait_for_line="waiting for connections on port %d" % self.port)
 
         self.addCleanup(self._kill, c.pid)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/postgresql.py 
new/pifpaf-0.24.1/pifpaf/drivers/postgresql.py
--- old/pifpaf-0.12.0/pifpaf/drivers/postgresql.py      2016-05-20 
16:32:59.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/postgresql.py      2016-11-09 
14:41:24.000000000 +0100
@@ -51,7 +51,7 @@
         self._exec([self.pgctl, "-w", "-o",
                     "-k %s -p %d -h \"%s\""
                     % (self.tempdir, self.port, self.host),
-                    "start"])
+                    "start"], allow_debug=False)
         self.addCleanup(self._exec, [self.pgctl, "-w", "stop"])
         self.url = "postgresql://localhost/postgres?host=%s&port=%d" % (
             self.tempdir, self.port)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/rabbitmq.py 
new/pifpaf-0.24.1/pifpaf/drivers/rabbitmq.py
--- old/pifpaf-0.12.0/pifpaf/drivers/rabbitmq.py        2016-07-04 
23:22:12.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/rabbitmq.py        2016-12-17 
16:21:36.000000000 +0100
@@ -77,7 +77,8 @@
             complete_env.update(self.env)
             c, _ = self._exec(["rabbitmq-server"], env=complete_env,
                               path=self._path,
-                              wait_for_line="Starting broker\.\.\. completed")
+                              wait_for_line=("completed with .* plugins"),
+                              session=True)
             self.addCleanup(self.kill_node, nodename, ignore_not_exists=True)
             self._process[nodename] = c
         return port
@@ -96,7 +97,7 @@
 
         c = self._process.pop(nodename)
         try:
-            self._kill(c.pid, signal=signal)
+            os.killpg(c.pid, signal)
             os.waitpid(c.pid, 0)
         except OSError:
             pass
@@ -147,11 +148,15 @@
             self.putenv("RABBITMQ_NODENAME1", n1)
             self.putenv("RABBITMQ_NODENAME2", n2)
             self.putenv("RABBITMQ_NODENAME3", n3)
-            self.putenv(
-                "URL",
-                "rabbit://%s:%s@localhost:%d,localhost:%d,localhost:%d//" % (
-                    self.username, self.password,
-                    self.port, self.port + 1, self.port + 2))
+            self.putenv("URL", "rabbit://"
+                        "%(username)s:%(password)s@localhost:%(port1)d,"
+                        "%(username)s:%(password)s@localhost:%(port2)d,"
+                        "%(username)s:%(password)s@localhost:%(port3)d//" % {
+                            'username': self.username,
+                            'password': self.password,
+                            'port1': self.port,
+                            'port2': self.port + 1,
+                            'port3': self.port + 2})
         else:
             self.putenv("RABBITMQ_NODENAME", n1)
             self.putenv("URL", "rabbit://%s:%s@localhost:%d//" % (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/drivers/zookeeper.py 
new/pifpaf-0.24.1/pifpaf/drivers/zookeeper.py
--- old/pifpaf-0.12.0/pifpaf/drivers/zookeeper.py       2016-07-04 
23:22:12.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf/drivers/zookeeper.py       2016-11-09 
14:41:24.000000000 +0100
@@ -20,6 +20,9 @@
 
     DEFAULT_PORT = 2181
 
+    PATH = ["/usr/share/zookeeper/bin",
+            "/usr/local/opt/zookeeper/libexec/bin"]
+
     def __init__(self, port=DEFAULT_PORT,
                  **kwargs):
         super(ZooKeeperDriver, self).__init__(**kwargs)
@@ -48,17 +51,14 @@
         self.putenv("ZOOCFG", cfgfile, True)
         self.putenv("ZOO_LOG_DIR", logdir, True)
 
-        path = ["/usr/share/zookeeper/bin",
-                "/usr/local/opt/zookeeper/libexec/bin"]
-
         c, _ = self._exec(
             ["zkServer.sh", "start", cfgfile],
             wait_for_line="STARTED",
-            path=path)
+            path=self.PATH)
 
         self.addCleanup(self._exec,
                         ["zkServer.sh", "stop", cfgfile],
-                        path=path)
+                        path=self.PATH)
 
         self.putenv("ZOOKEEPER_PORT", str(self.port))
         self.putenv("URL", "zookeeper://localhost:%d" % self.port)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/tests/test_cli.py 
new/pifpaf-0.24.1/pifpaf/tests/test_cli.py
--- old/pifpaf-0.12.0/pifpaf/tests/test_cli.py  2016-07-04 23:23:19.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf/tests/test_cli.py  2017-02-03 12:11:38.000000000 
+0100
@@ -27,21 +27,28 @@
         self.assertEqual(0, os.system(
             "pifpaf run memcached --port 11216 echo >/dev/null 2>&1"))
 
+    @staticmethod
+    def _read_stdout_and_kill(stdout):
+        env = {}
+        for line in stdout.split(b'\n'):
+            k, _, v = line.partition(b"=")
+            env[k] = v
+        os.kill(int(env[b"export PIFPAF_PID"].strip(b"\" \n;")),
+                signal.SIGTERM)
+        return env
+
     @testtools.skipUnless(spawn.find_executable("memcached"),
                           "memcached not found")
     def test_eval(self):
         c = subprocess.Popen(["pifpaf", "run", "memcached", "--port", "11219"],
                              stdout=subprocess.PIPE)
+        (stdout, stderr) = c.communicate()
         self.assertEqual(0, c.wait())
-        env = {}
-        for line in c.stdout.readlines():
-            k, _, v = line.partition(b"=")
-            env[k] = v
-        os.kill(int(env[b"export PIFPAF_PID"].strip()[:-1]), signal.SIGTERM)
+        env = self._read_stdout_and_kill(stdout)
 
-        self.assertEqual(b"\"memcached://localhost:11219\";\n",
+        self.assertEqual(b"\"memcached://localhost:11219\";",
                          env[b"export PIFPAF_URL"])
-        self.assertEqual(b"\"memcached://localhost:11219\";\n",
+        self.assertEqual(b"\"memcached://localhost:11219\";",
                          env[b"export PIFPAF_MEMCACHED_URL"])
 
     @testtools.skipUnless(spawn.find_executable("memcached"),
@@ -54,16 +61,14 @@
                              stdout=subprocess.PIPE)
         (stdout, stderr) = c.communicate()
         self.assertEqual(0, c.wait())
-        env = {}
-        for line in stdout.split(b"\n"):
-            k, _, v = line.partition(b"=")
-            env[k] = v
-        os.kill(int(env[b"export PIFPAF_PID"].strip()[:-1]), signal.SIGTERM)
+        env = self._read_stdout_and_kill(stdout)
 
         self.assertEqual(b"\"memcached://localhost:11215\";",
                          env[b"export FOOBAR_URL"])
         self.assertEqual(b"\"memcached://localhost:11215\";",
                          env[b"export FOOBAR_MEMCACHED_URL"])
+        self.assertEqual(env[b"export PIFPAF_PID"],
+                         env[b"export FOOBAR_PID"])
 
     @testtools.skipUnless(spawn.find_executable("memcached"),
                           "memcached not found")
@@ -78,11 +83,7 @@
                              stdout=subprocess.PIPE)
         (stdout, stderr) = c.communicate()
         self.assertEqual(0, c.wait())
-        env = {}
-        for line in stdout.split(b"\n"):
-            k, _, v = line.partition(b"=")
-            env[k] = v
-        os.kill(int(env[b"export PIFPAF_PID"].strip()[:-1]), signal.SIGTERM)
+        env = self._read_stdout_and_kill(stdout)
 
         self.assertEqual(b"\"memcached://localhost:11218\";",
                          env[b"export PIFPAF_URL"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf/tests/test_drivers.py 
new/pifpaf-0.24.1/pifpaf/tests/test_drivers.py
--- old/pifpaf-0.12.0/pifpaf/tests/test_drivers.py      2016-08-09 
14:29:37.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf/tests/test_drivers.py      2017-02-03 
12:11:38.000000000 +0100
@@ -31,6 +31,7 @@
 from pifpaf.drivers import fakes3
 from pifpaf.drivers import gnocchi
 from pifpaf.drivers import influxdb
+from pifpaf.drivers import kafka
 from pifpaf.drivers import keystone
 from pifpaf.drivers import memcached
 from pifpaf.drivers import mongodb
@@ -38,6 +39,7 @@
 from pifpaf.drivers import postgresql
 from pifpaf.drivers import rabbitmq
 from pifpaf.drivers import redis
+from pifpaf.drivers import s3rver
 from pifpaf.drivers import zookeeper
 
 
@@ -48,15 +50,20 @@
     "/opt/influxdb",
     "/usr/share/elasticsearch/bin",
     "/usr/local/sbin",
+    "/opt/kafka/bin",
 ))
 
 
 class TestDrivers(testtools.TestCase):
     def setUp(self):
         super(TestDrivers, self).setUp()
-        if os.getenv('PIFPAF_DEBUG'):
-            logging.basicConfig(format="%(levelname)8s [%(name)s] %(message)s",
-                                level=logging.DEBUG)
+        self.logger = self.useFixture(
+            fixtures.FakeLogger(
+                format="%(levelname)8s [%(name)s] %(message)s",
+                level=logging.DEBUG,
+                nuke_handlers=True,
+            )
+        )
 
     def _run(self, cmd):
         self.assertEqual(0, os.system(cmd + " >/dev/null 2>&1"))
@@ -145,7 +152,7 @@
                           "s3rver not found")
     def test_s3rver(self):
         port = 4569
-        self.useFixture(fakes3.FakeS3Driver(port=port))
+        self.useFixture(s3rver.S3rverDriver(port=port))
         self.assertEqual("s3://localhost:%d" % port,
                          os.getenv("PIFPAF_URL"))
         self.assertEqual("http://localhost:%d"; % port,
@@ -209,8 +216,9 @@
         self.assertEqual("6380", os.getenv("PIFPAF_REDIS_SENTINEL_PORT"))
         self._run("redis-cli -p %d sentinel master pifpaf" % f.sentinel_port)
 
-    @testtools.skipUnless(spawn.find_executable("zkServer"),
-                          "ZooKeeper not found")
+    @testtools.skipUnless(spawn.find_executable(
+        "zkServer.sh", path=":".join(zookeeper.ZooKeeperDriver.PATH)),
+        "ZooKeeper not found")
     def test_zookeeper(self):
         port = 2182
         f = self.useFixture(zookeeper.ZooKeeperDriver(port=port))
@@ -234,6 +242,63 @@
 
     @testtools.skipUnless(spawn.find_executable("gnocchi-api"),
                           "Gnocchi not found")
+    def test_gnocchi_with_existing_indexer(self):
+        port = gnocchi.GnocchiDriver.DEFAULT_PORT + 10
+        pg = self.useFixture(postgresql.PostgreSQLDriver(port=9833))
+        self.useFixture(gnocchi.GnocchiDriver(
+            indexer_url=pg.url, port=port))
+        self.assertEqual("gnocchi://localhost:%d" % port,
+                         os.getenv("PIFPAF_URL"))
+        r = requests.get("http://localhost:%d/"; % port)
+        self.assertEqual(200, r.status_code)
+
+    @testtools.skipUnless(spawn.find_executable("gnocchi-api"),
+                          "Gnocchi not found")
+    @testtools.skipUnless(spawn.find_executable("s3rver"),
+                          "s3rver not found")
+    def test_gnocchi_with_existing_s3rver(self):
+        s3 = self.useFixture(s3rver.S3rverDriver(port=4569))
+        port = gnocchi.GnocchiDriver.DEFAULT_PORT + 12
+        self.useFixture(gnocchi.GnocchiDriver(
+            storage_url="s3://gnocchi:pass@localhost:%d" % s3.port,
+            port=port))
+        self.assertEqual("gnocchi://localhost:%d" % port,
+                         os.getenv("PIFPAF_URL"))
+        r = requests.get("http://localhost:%d/"; % port)
+        self.assertEqual(200, r.status_code)
+
+    @testtools.skipUnless(spawn.find_executable("gnocchi-api"),
+                          "Gnocchi not found")
+    @testtools.skipUnless(spawn.find_executable("ceph-mon"),
+                          "Ceph Monitor not found")
+    @testtools.skipUnless(spawn.find_executable("ceph-osd"),
+                          "Ceph OSD not found")
+    @testtools.skipUnless(spawn.find_executable("ceph"),
+                          "Ceph client not found")
+    def test_gnocchi_with_existing_ceph(self):
+        port = gnocchi.GnocchiDriver.DEFAULT_PORT + 10
+        tempdir = self.useFixture(fixtures.TempDir()).path
+
+        ceph_driver = ceph.CephDriver()
+        try:
+            ceph_driver._ensure_xattr_support(tempdir)
+        except RuntimeError as e:
+            self.skipTest(str(e))
+        self.useFixture(ceph_driver)
+
+        ceph_driver._exec(["rados", "-c", os.getenv("CEPH_CONF"), "mkpool",
+                           "gnocchi"]),
+
+        self.useFixture(gnocchi.GnocchiDriver(
+            storage_url="ceph://%s" % os.getenv("CEPH_CONF"),
+            port=port))
+        self.assertEqual("gnocchi://localhost:%d" % port,
+                         os.getenv("PIFPAF_URL"))
+        r = requests.get("http://localhost:%d/"; % port)
+        self.assertEqual(200, r.status_code)
+
+    @testtools.skipUnless(spawn.find_executable("gnocchi-api"),
+                          "Gnocchi not found")
     def test_gnocchi_legacy(self):
         port = gnocchi.GnocchiDriver.DEFAULT_PORT + 10
         self.useFixture(gnocchi.GnocchiDriver(
@@ -321,8 +386,14 @@
     def test_rabbitmq_cluster(self):
         a = self.useFixture(rabbitmq.RabbitMQDriver(cluster=True, port=12345))
         self.assertEqual(
-            "rabbit://%s:%s@localhost:%d,localhost:%d,localhost:%d//" % (
-                a.username, a.password, a.port, a.port + 1, a.port + 2),
+            "rabbit://%(user)s:%(pass)s@localhost:%(port1)d,"
+            "%(user)s:%(pass)s@localhost:%(port2)d,"
+            "%(user)s:%(pass)s@localhost:%(port3)d//" % {
+                "user": a.username,
+                "pass": a.password,
+                "port1": a.port,
+                "port2": a.port + 1,
+                "port3": a.port + 2},
             os.getenv("PIFPAF_URL"))
         self.assertEqual(a.nodename + "-1@localhost",
                          os.getenv("PIFPAF_RABBITMQ_NODENAME"))
@@ -337,6 +408,7 @@
         a.kill_node(a.nodename + "-2@localhost")
         a.stop_node(a.nodename + "-3@localhost")
         a.start_node(a.nodename + "-3@localhost")
+        a.start_node(a.nodename + "-2@localhost")
 
     @testtools.skipUnless(spawn.find_executable("couchdb"),
                           "CouchDB not found")
@@ -347,3 +419,16 @@
                          os.getenv("PIFPAF_URL"))
         r = requests.get("http://localhost:%d/"; % port)
         self.assertEqual(r.json()["couchdb"], "Welcome")
+
+    @testtools.skipUnless(spawn.find_executable("kafka-server-start.sh"),
+                          "Kafka not found")
+    def test_kafka(self):
+        a = self.useFixture(kafka.KafkaDriver(port=54321,
+                                              zookeeper_port=12345))
+        self.assertEqual("kafka://localhost:54321",
+                         os.getenv("PIFPAF_URL"))
+        self.assertEqual(12345, a.zookeeper_port)
+        self.assertEqual("54321", os.getenv("PIFPAF_KAFKA_PORT"))
+        self.assertEqual("PLAINTEXT", os.getenv("PIFPAF_KAFKA_PROTOCOL"))
+        self.assertEqual("PLAINTEXT://localhost:54321",
+                         os.getenv("PIFPAF_KAFKA_URL"))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf.egg-info/PKG-INFO 
new/pifpaf-0.24.1/pifpaf.egg-info/PKG-INFO
--- old/pifpaf-0.12.0/pifpaf.egg-info/PKG-INFO  2016-08-09 14:30:25.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf.egg-info/PKG-INFO  2017-02-03 13:52:27.000000000 
+0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: pifpaf
-Version: 0.12.0
+Version: 0.24.1
 Summary: Suite of tools and fixtures to manage daemons for testing
 Home-page: https://github.com/jd/pifpaf
 Author: Julien Danjou
@@ -41,12 +41,13 @@
         * `Gnocchi`_
         * `Aodh`_
         * `Ceph`_
-        * `RabbitMQ`_
+        * `RabbitMQ`_ (with clustering)
         * `FakeS3`_
         * `Consul`_
         * `Keystone`_
         * `CouchDB`_
         * `S3rver`_
+        * `MongoDB`_
         
         .. _Consul: https://www.consul.io/
         .. _PostgreSQL: http://postgresql.org
@@ -65,6 +66,7 @@
         .. _Keystone: https://launchpad.net/keystone
         .. _CouchDB: http://couchdb.apache.org/
         .. _S3rver: https://www.npmjs.com/package/s3rver
+        .. _MongoDB: https://www.mongodb.com
         
         Usage
         =====
@@ -145,7 +147,7 @@
         
         The `PIFPAF_URLS` environment variable will contain the list of all 
URLs
         detected and set-up by Pifpaf. You can override this variable name 
with the
-        `--gobal-urls-variable` option.
+        `--global-urls-variable` option.
         
         How it works under the hood
         ===========================
@@ -171,4 +173,5 @@
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Topic :: Software Development :: Testing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf.egg-info/SOURCES.txt 
new/pifpaf-0.24.1/pifpaf.egg-info/SOURCES.txt
--- old/pifpaf-0.12.0/pifpaf.egg-info/SOURCES.txt       2016-08-09 
14:30:30.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf.egg-info/SOURCES.txt       2017-02-03 
13:52:29.000000000 +0100
@@ -28,6 +28,7 @@
 pifpaf/drivers/fakes3.py
 pifpaf/drivers/gnocchi.py
 pifpaf/drivers/influxdb.py
+pifpaf/drivers/kafka.py
 pifpaf/drivers/keystone.py
 pifpaf/drivers/memcached.py
 pifpaf/drivers/mongodb.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf.egg-info/entry_points.txt 
new/pifpaf-0.24.1/pifpaf.egg-info/entry_points.txt
--- old/pifpaf-0.12.0/pifpaf.egg-info/entry_points.txt  2016-08-09 
14:30:25.000000000 +0200
+++ new/pifpaf-0.24.1/pifpaf.egg-info/entry_points.txt  2017-02-03 
13:52:27.000000000 +0100
@@ -11,6 +11,7 @@
 fakes3 = pifpaf.drivers.fakes3:FakeS3Driver
 gnocchi = pifpaf.drivers.gnocchi:GnocchiDriver
 influxdb = pifpaf.drivers.influxdb:InfluxDBDriver
+kafka = pifpaf.drivers.kafka:KafkaDriver
 keystone = pifpaf.drivers.keystone:KeystoneDriver
 memcached = pifpaf.drivers.memcached:MemcachedDriver
 mongodb = pifpaf.drivers.mongodb:MongoDBDriver
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/pifpaf.egg-info/pbr.json 
new/pifpaf-0.24.1/pifpaf.egg-info/pbr.json
--- old/pifpaf-0.12.0/pifpaf.egg-info/pbr.json  2016-08-09 14:30:25.000000000 
+0200
+++ new/pifpaf-0.24.1/pifpaf.egg-info/pbr.json  2017-02-03 13:52:27.000000000 
+0100
@@ -1 +1 @@
-{"is_release": true, "git_version": "643816c"}
\ No newline at end of file
+{"is_release": true, "git_version": "7bc6449"}
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/setup.cfg new/pifpaf-0.24.1/setup.cfg
--- old/pifpaf-0.12.0/setup.cfg 2016-08-09 14:30:30.000000000 +0200
+++ new/pifpaf-0.24.1/setup.cfg 2017-02-03 13:52:29.000000000 +0100
@@ -15,6 +15,7 @@
        Programming Language :: Python :: 3
        Programming Language :: Python :: 3.4
        Programming Language :: Python :: 3.5
+       Programming Language :: Python :: 3.6
        Topic :: Software Development :: Testing
 
 [extras]
@@ -36,6 +37,7 @@
        ceph = pifpaf.drivers.ceph:CephDriver
        gnocchi = pifpaf.drivers.gnocchi:GnocchiDriver
        keystone = pifpaf.drivers.keystone:KeystoneDriver
+       kafka = pifpaf.drivers.kafka:KafkaDriver
        influxdb = pifpaf.drivers.influxdb:InfluxDBDriver
        memcached = pifpaf.drivers.memcached:MemcachedDriver
        mongodb = pifpaf.drivers.mongodb:MongoDBDriver
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pifpaf-0.12.0/tox.ini new/pifpaf-0.24.1/tox.ini
--- old/pifpaf-0.12.0/tox.ini   2016-07-19 11:51:43.000000000 +0200
+++ new/pifpaf-0.24.1/tox.ini   2017-02-03 12:11:38.000000000 +0100
@@ -1,19 +1,18 @@
 [tox]
-envlist = py27,py34,py35,pep8,pypy
+envlist = py27,py34,py35,py36,pep8,pypy
 
 [testenv]
 usedevelop = True
 sitepackages = False
 deps = .[test]
-       
http://tarballs.openstack.org/gnocchi/gnocchi-master.tar.gz#egg=gnocchi[postgresql,file]
+       
http://tarballs.openstack.org/gnocchi/gnocchi-master.tar.gz#egg=gnocchi[postgresql,file,ceph,ceph_recommended_lib,s3]
        
http://tarballs.openstack.org/aodh/aodh-master.tar.gz#egg=aodh[postgresql]
        http://tarballs.openstack.org/keystone/keystone-master.tar.gz
-passenv = PIFPAF_DEBUG
 commands =
     {toxinidir}/tools/pretty_tox.sh '{posargs}'
 
 [testenv:pep8]
-deps = hacking>=0.10,<0.11
+deps = hacking>=0.12,<0.13
 commands = flake8
 
 [flake8]


Reply via email to