This commit adds a functional test suite that utilizes lxc
containers to guarantee a non-destructive test environment.

The tests can be invoked individually, as a group of related
tests, or from automake via the standard 'make check'
command.

No tests are included as part of this commit.

Example test invocations:

    Run a single test (first cd to tests/ftests):
        ./001-cgget-basic_cgget.py
        or
        ./ftests.py -N 15      # Run test #015

    Run a suite of tests (first cd to tests/ftests):
        ./ftests.py -s cgget   # Run all cgget tests

    Run all the tests by hand
        ./ftests.py
        # This may be advantageous over running make check
        # because it will try to re-use the same lxc
        # container for all of the tests.  This should
        # provide a significant performance increase

    Run the tests from automake
        make check
        # Then examine the *.trs and *.log files for
        # specifics regarding each test result

Example output from a test run:

Test Results:
        Run Date:                     Jun 03 13:41:35
        Passed:                               1  test
        Skipped:                              0 tests
        Failed:                               0 tests
-----------------------------------------------------------------
Timing Results:
        Test                               Time (sec)
        ---------------------------------------------------------
        setup                                    6.95
        001-cgget-basic_cgget.py                 0.07
        teardown                                 0.00
        ---------------------------------------------------------
        Total Run Time                           7.02

Signed-off-by: Tom Hromatka <tom.hroma...@oracle.com>
---
 tests/ftests/config.py |  55 +++++++++
 tests/ftests/consts.py |  48 ++++++++
 tests/ftests/ftests.py | 300 +++++++++++++++++++++++++++++++++++++++++++++++++
 3 files changed, 403 insertions(+)
 create mode 100644 tests/ftests/config.py
 create mode 100644 tests/ftests/consts.py
 create mode 100755 tests/ftests/ftests.py

diff --git a/tests/ftests/config.py b/tests/ftests/config.py
new file mode 100644
index 0000000..5a8b225
--- /dev/null
+++ b/tests/ftests/config.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Config class for the libcgroup functional tests
+#
+# Copyright (c) 2019 Oracle and/or its affiliates.  All rights reserved.
+# Author: Tom Hromatka <tom.hroma...@oracle.com>
+#
+
+#
+# This library is free software; you can redistribute it and/or modify it
+# under the terms of version 2.1 of the GNU Lesser General Public License as
+# published by the Free Software Foundation.
+#
+# This library is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this library; if not, see <http://www.gnu.org/licenses>.
+#
+
+import consts
+from container import Container
+import os
+
+class Config(object):
+    def __init__(self, container=None):
+        if container:
+            self.container = container
+        else:
+            # Use the default container settings
+            self.container = Container(consts.DEFAULT_CONTAINER_NAME)
+
+        self.ftest_dir = os.path.dirname(os.path.abspath(__file__))
+        self.libcg_dir = os.path.dirname(self.ftest_dir)
+
+        self.test_suite = consts.TESTS_RUN_ALL_SUITES
+        self.test_num = consts.TESTS_RUN_ALL
+        self.verbose = False
+
+    def __str__(self):
+        out_str = "Configuration"
+        out_str += "\n\tcontainer = %d" % self.container
+
+        return out_str
+
+
+class ConfigError(Exception):
+    def __init__(self, message):
+        super(ConfigError, self).__init__(message)
+
+    def __str__(self):
+        out_str = "ConfigError:\n\tmessage = %s" % self.message
+        return out_str
diff --git a/tests/ftests/consts.py b/tests/ftests/consts.py
new file mode 100644
index 0000000..f6a58ee
--- /dev/null
+++ b/tests/ftests/consts.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+#
+# Constants for the libcgroup functional tests
+#
+# Copyright (c) 2019 Oracle and/or its affiliates.  All rights reserved.
+# Author: Tom Hromatka <tom.hroma...@oracle.com>
+#
+
+#
+# This library is free software; you can redistribute it and/or modify it
+# under the terms of version 2.1 of the GNU Lesser General Public License as
+# published by the Free Software Foundation.
+#
+# This library is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this library; if not, see <http://www.gnu.org/licenses>.
+#
+
+import os
+
+DEFAULT_CONTAINER_NAME = 'test_libcg'
+DEFAULT_CONTAINER_DISTRO = 'oracle'
+DEFAULT_CONTAINER_RELEASE = '7'
+DEFAULT_CONTAINER_ARCH = 'amd64'
+DEFAULT_CONTAINER_STOP_TIMEOUT = 5
+DEFAULT_CONTAINER_CFG_PATH=os.path.join(
+    os.path.dirname(os.path.abspath(__file__)),
+    'default.conf')
+TEMP_CONTAINER_CFG_FILE='tmp.conf'
+
+DEFAULT_LOG_FILE = 'libcgroup-ftests.log'
+
+LOG_CRITICAL = 1
+LOG_WARNING = 5
+LOG_DEBUG = 8
+DEFAULT_LOG_LEVEL = 5
+
+LIBCG_MOUNT_POINT = 'libcg'
+
+TESTS_RUN_ALL = -1
+TESTS_RUN_ALL_SUITES = "allsuites"
+TEST_PASSED = "passed"
+TEST_FAILED = "failed"
+TEST_SKIPPED = "skipped"
diff --git a/tests/ftests/ftests.py b/tests/ftests/ftests.py
new file mode 100755
index 0000000..32e0363
--- /dev/null
+++ b/tests/ftests/ftests.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+#
+# Main entry point for the libcgroup functional tests
+#
+# Copyright (c) 2019 Oracle and/or its affiliates.  All rights reserved.
+# Author: Tom Hromatka <tom.hroma...@oracle.com>
+#
+
+#
+# This library is free software; you can redistribute it and/or modify it
+# under the terms of version 2.1 of the GNU Lesser General Public License as
+# published by the Free Software Foundation.
+#
+# This library is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this library; if not, see <http://www.gnu.org/licenses>.
+#
+
+import argparse
+from cgroup import Cgroup
+from config import Config
+import consts
+import container
+import datetime
+import log
+from log import Log
+import os
+from run import Run
+import sys
+import time
+
+setup_time = 0.0
+teardown_time = 0.0
+
+def parse_args():
+    parser = argparse.ArgumentParser("Libcgroup Functional Tests")
+    parser.add_argument('-n', '--name',
+                        help='name of the container',
+                        required=False, type=str, default=None)
+    parser.add_argument('-f', '--config',
+                        help='initial configuration file',
+                        required=False, type=str, default=None)
+    parser.add_argument('-d', '--distro',
+                        help='linux distribution to use as a template',
+                        required=False, type=str, default=None)
+    parser.add_argument('-r', '--release',
+                        help='distribution release, e.g.\'trusty\'',
+                        required=False, type=str, default=None)
+    parser.add_argument('-a', '--arch',
+                        help='processor architecture',
+                        required=False, type=str, default=None)
+    parser.add_argument('-t', '--timeout',
+                        help='wait timeout (sec) before stopping the 
container',
+                        required=False, type=int, default=None)
+
+    parser.add_argument('-l', '--loglevel',
+                        help='log level',
+                        required=False, type=int, default=None)
+    parser.add_argument('-L', '--logfile',
+                        help='log file',
+                        required=False, type=str, default=None)
+
+    parser.add_argument('-N', '--num',
+                        help='Test number to run.  If unspecified, all tests 
are run',
+                        required=False, default=consts.TESTS_RUN_ALL, type=int)
+    parser.add_argument('-s', '--suite',
+                        help='Test suite to run, e.g. cpuset', required=False,
+                        default=consts.TESTS_RUN_ALL_SUITES, type=str)
+    parser.add_argument('-u', '--unpriv',
+                        help='Run the tests in an unprivileged container',
+                        required=False, action="store_true")
+    parser.add_argument('-v', '--verbose',
+                        help='Print all information about this test run',
+                        default=True, required=False, action="store_false")
+
+    args = parser.parse_args()
+
+    config = Config()
+
+    if args.name:
+        config.name = args.name
+    if args.config:
+        config.container.cfg_path = args.config
+    if args.distro:
+        config.container.distro = args.distro
+    if args.release:
+        config.container.release = args.release
+    if args.arch:
+        config.container.arch = args.arch
+    if args.timeout:
+        config.container.stop_timeout = args.timeout
+    if args.loglevel:
+        log.log_level = args.loglevel
+    if args.logfile:
+        log.log_file = args.logfile
+    if args.num:
+        config.test_num = args.num
+    if args.suite:
+        config.test_suite = args.suite
+    if args.unpriv:
+        raise ValueError('Unprivileged containers are not currently supported')
+        config.container.privileged = False
+    config.verbose = args.verbose
+
+    return config
+
+def setup(config, do_teardown=True, record_time=False):
+    global setup_time
+    start_time = time.time()
+    if do_teardown:
+        # belt and suspenders here.  In case a previous run wasn't properly
+        # cleaned up, let's try and clean it up here
+        try:
+            teardown(config)
+        except Exception as e:
+            # log but ignore all exceptions
+            Log.log_debug(e)
+
+    config.container.create()
+
+    # make the /libcg directory in the container's rootfs
+    rootfs = config.container.rootfs()
+    container_rootfs_path = rootfs.split('=')[1].strip()
+    Run.run(['sudo', 'mkdir', os.path.join(container_rootfs_path,
+                                   consts.LIBCG_MOUNT_POINT)])
+
+    config.container.start()
+
+    # add the libcgroup library to the container's ld
+    echo_cmd = ['bash', '-c', 'echo %s >> /etc/ld.so.conf.d/libcgroup.conf' % \
+               os.path.join('/', consts.LIBCG_MOUNT_POINT, 'src/.libs')]
+    config.container.run(echo_cmd)
+    config.container.run('ldconfig')
+    if record_time:
+        setup_time = time.time() - start_time
+
+def run_tests(config):
+    passed_tests = []
+    failed_tests = []
+    skipped_tests = []
+
+    for root, dirs, filenames in os.walk(config.ftest_dir):
+        for filename in filenames:
+            if os.path.splitext(filename)[-1] != ".py":
+                # ignore non-python files
+                continue
+
+            filenum = filename.split('-')[0]
+
+            try:
+                filenum_int = int(filenum)
+            except ValueError:
+                # D'oh.  This file must not be a test.  Skip it
+                Log.log_debug('Skipping %s.  It doesn\'t start with an int' %
+                              filename)
+                continue
+
+            try:
+                filesuite = filename.split('-')[1]
+            except IndexError:
+                Log.log_error('Skipping %s.  It doesn\'t conform to the 
filename format' %
+                              filename)
+                continue
+
+            if config.test_suite == consts.TESTS_RUN_ALL_SUITES or \
+               config.test_suite == filesuite:
+                if config.test_num == consts.TESTS_RUN_ALL or \
+                   config.test_num == filenum_int:
+                    test = __import__(os.path.splitext(filename)[0])
+
+                    failure_cause = None
+                    start_time = time.time()
+                    try:
+                        [ret, failure_cause] = test.main(config)
+                    except Exception as e:
+                        # catch all exceptions.  you never know when there's
+                        # a crummy test
+                        failure_cause = e
+                        Log.log_debug(e)
+                        ret = consts.TEST_FAILED
+
+                        # if the test does cause an exception, it may not have
+                        # cleaned up after itself.  re-create the container
+                        teardown(config)
+                        setup(config, do_teardown=False)
+                    finally:
+                        run_time = time.time() - start_time
+                        if ret == consts.TEST_PASSED:
+                            passed_tests.append([filename, run_time])
+                        elif ret == consts.TEST_FAILED:
+                            failed_tests.append([filename, run_time])
+                        elif ret == consts.TEST_SKIPPED:
+                            skipped_tests.append([filename, run_time])
+                        else:
+                            raise ValueException('Unexpected ret: %s' % ret)
+
+    passed_cnt = len(passed_tests)
+    failed_cnt = len(failed_tests)
+    skipped_cnt = len(skipped_tests)
+
+    print("-----------------------------------------------------------------")
+    print("Test Results:")
+    date_str = datetime.datetime.now().strftime('%b %d %H:%M:%S')
+    print('\t%s%s' % ('{0: <30}'.format("Run Date:"), '{0: 
>15}'.format(date_str)))
+    if passed_cnt == 1:
+        test_str = "1  test"
+        print('\t%s%s' % ('{0: <30}'.format("Passed:"), '{0: 
>15}'.format(test_str)))
+    else:
+        test_str = "%d tests" % passed_cnt
+        print('\t%s%s' % ('{0: <30}'.format("Passed:"), '{0: 
>15}'.format(test_str)))
+
+    if skipped_cnt == 1:
+        test_str = "1  test"
+        print('\t%s%s' % ('{0: <30}'.format("Skipped:"), '{0: 
>15}'.format(test_str)))
+    else:
+        test_str = "%d tests" % skipped_cnt
+        print('\t%s%s' % ('{0: <30}'.format("Skipped:"), '{0: 
>15}'.format(test_str)))
+
+    if failed_cnt == 1:
+        test_str = "1  test"
+        print('\t%s%s' % ('{0: <30}'.format("Failed:"), '{0: 
>15}'.format(test_str)))
+    else:
+        test_str = "%d tests" % failed_cnt
+        print('\t%s%s' % ('{0: <30}'.format("Failed:"), '{0: 
>15}'.format(test_str)))
+    for test in failed_tests:
+        print("\t\tTest:\t\t\t\t%s - %s" % (test[0], str(failure_cause)))
+    print("-----------------------------------------------------------------")
+
+    global setup_time
+    global teardown_time
+    if config.verbose:
+        print("Timing Results:")
+        print('\t%s%s' % ('{0: <30}'.format("Test"), '{0: >15}'.format("Time 
(sec)")))
+        print("\t---------------------------------------------------------")
+        time_str = "%2.2f" % setup_time
+        print('\t%s%s' % ('{0: <30}'.format('setup'), '{0: 
>15}'.format(time_str)))
+        for test in passed_tests:
+            time_str = "%2.2f" % test[1]
+            print('\t%s%s' % ('{0: <30}'.format(test[0]), '{0: 
>15}'.format(time_str)))
+        for test in failed_tests:
+            time_str = "%2.2f" % test[1]
+            print('\t%s%s' % ('{0: <30}'.format(test[0]), '{0: 
>15}'.format(time_str)))
+        time_str = "%2.2f" % teardown_time
+        print('\t%s%s' % ('{0: <30}'.format('teardown'), '{0: 
>15}'.format(time_str)))
+
+        total_run_time = setup_time + teardown_time
+        for test in passed_tests:
+            total_run_time += test[1]
+        for test in failed_tests:
+            total_run_time += test[1]
+        total_str = "%5.2f" % total_run_time
+        print("\t---------------------------------------------------------")
+        print('\t%s%s' % ('{0: <30}'.format("Total Run Time"), '{0: 
>15}'.format(total_str)))
+
+    return [passed_cnt, failed_cnt, skipped_cnt]
+
+def teardown(config, record_time=False):
+    global teardown_time
+    start_time = time.time()
+    try:
+        config.container.stop()
+    except Exception as e:
+        # log but ignore all exceptions
+        Log.log_debug(e)
+    try:
+        config.container.destroy()
+    except Exception as e:
+        # log but ignore all exceptions
+        Log.log_debug(e)
+
+    if record_time:
+        teardown_time = time.time() - start_time
+
+def main(config):
+    AUTOMAKE_SKIPPED = 77
+    AUTOMAKE_HARD_ERROR = 99
+    AUTOMAKE_PASSED = 0
+
+    try:
+        setup(config, record_time=True)
+        [passed_cnt, failed_cnt, skipped_cnt] = run_tests(config)
+    finally:
+        teardown(config, record_time=True)
+
+    if failed_cnt > 0:
+        return failed_cnt
+    if skipped_cnt > 0:
+        return AUTOMAKE_SKIPPED
+    if passed_cnt > 0:
+        return AUTOMAKE_PASSED
+
+    return AUTOMAKE_HARD_ERROR
+
+if __name__ == '__main__':
+    config = parse_args()
+    sys.exit(main(config))
-- 
1.8.3.1



_______________________________________________
Libcg-devel mailing list
Libcg-devel@lists.sourceforge.net
https://lists.sourceforge.net/lists/listinfo/libcg-devel

Reply via email to