xfstests was originally developed to test SGI's XFS filesystem.

Changes from v2:
* Updated instructions and document some caveats wrt device-mapper
  based block devices

Changes from v1:
* Fix NAME entry in control file

Signed-off-by: Cleber Rosa <[email protected]>
---
 client/tests/xfstests/README      |   41 +++++++++++++++++
 client/tests/xfstests/control     |   62 +++++++++++++++++++++++++
 client/tests/xfstests/xfstests.py |   89 +++++++++++++++++++++++++++++++++++++
 3 files changed, 192 insertions(+), 0 deletions(-)
 create mode 100644 client/tests/xfstests/README
 create mode 100644 client/tests/xfstests/control
 create mode 100644 client/tests/xfstests/xfstests.py

diff --git a/client/tests/xfstests/README b/client/tests/xfstests/README
new file mode 100644
index 0000000..a57062d
--- /dev/null
+++ b/client/tests/xfstests/README
@@ -0,0 +1,41 @@
+xfstests in autotest
+====================
+
+This is simple wrapper for running xfstests inside autotest. The steps to get
+started are really simple:
+
+1) Edit the configuration variables on the control file.
+
+1.1) The variables 'TEST_DEV' and 'TEST_DIR' are mandatory and should be set to
+     a block device path and mount point path, respectively, that will be used
+     *exclusively* for xfstests. It must have the filesystem of your choice
+     previously created.
+
+     DO NOT USE A BLOCK DEVICE WITH IMPORTANT DATA!!!
+
+1.2) Set the range of tests you want to run setting the TEST_RANGE variable.
+     Please notice that python's range() function may not work as you expect,
+     that is, if you want a range from 0-255, use: range(0, 256)
+
+2) Run the tests (assuming autotest installed in /usr/local/autotest):
+
+   # cd /usr/local/autotest/client/tests/xfstests
+   # ../../bin/autotest control
+
+3) Check the HTML report at 
/usr/local/autotest/client/results/default/job_report.html
+
+General notes
+=============
+
+* As autotest includes a setup phase for client tests, this step is 
encapsulated in
+a dummy xfstests number 000.
+
+* XFS utilities, system libraries and header files are checked early, before 
trying to
+build xfstests. Make sure you resolve those dependencies.
+
+* Some tests are not relevant to filesystems other than XFS, so they will 
return as
+TEST_NA.
+
+* Be extra careful when using TEST_DEV with device-mapper based block devices.
+For instance, xfstests may not be able to figure out that /dev/<vg>/<lv> is 
actually
+a link to /dev/mapper/vg-lv. Tests will then fail to check that the device is 
mounted.
diff --git a/client/tests/xfstests/control b/client/tests/xfstests/control
new file mode 100644
index 0000000..5112c3c
--- /dev/null
+++ b/client/tests/xfstests/control
@@ -0,0 +1,62 @@
+TIME="SHORT"
+AUTHOR = "Cleber Rosa <[email protected]>"
+DOC = """
+xfstests is a filesystem QA suite, originally developed to test SGI's XFS
+filesystem.
+"""
+NAME = 'xfs filesystem test suite'
+TEST_CLASS = 'kernel'
+TEST_CATEGORY = 'Functional'
+TEST_TYPE = 'client'
+
+#
+# Job configuration, instead of editing xfstests config files, set them
+# right here as environment variables
+#
+
+# TEST_DEV: "device containing TEST PARTITION"
+os.environ['TEST_DEV'] = '/dev/null'
+
+# TEST_DIR: "mount point of TEST PARTITION"
+os.environ['TEST_DIR'] = '/mnt/null'
+
+# SCRATCH_DEV "device containing SCRATCH PARTITION"
+# os.environ['SCRATCH_DEV'] = ''
+
+# SCRATCH_MNT "mount point for SCRATCH PARTITION"
+# os.environ['SCRATCH_MNT'] = ''
+
+# TAPE_DEV "tape device for testing xfsdump"
+# os.environ['TAPE_DEV'] = ''
+
+# RMT_TAPE_DEV "remote tape device for testing xfsdump"
+# os.environ['RMT_TAPE_DEV'] = ''
+
+# RMT_IRIXTAPE_DEV "remote IRIX tape device for testing xfsdump"
+# os.environ['RMT_IRIXTAPE_DEV'] = ''
+
+# SCRATCH_LOGDEV "device for scratch-fs external log"
+# os.environ['SCRATCH_LOGDEV'] = ''
+
+# SCRATCH_RTDEV "device for scratch-fs realtime data"
+# os.environ['SCRATCH_RTDEV'] = ''
+
+# TEST_LOGDEV "device for test-fs external log"
+# os.environ['TEST_LOGDEV'] = ''
+
+# TEST_RTDEV "device for test-fs realtime data"
+# os.environ['TEST_RTDEV'] = ''
+
+# Whether UDF tests are disable
+# os.environ['DISABLE_UDF_TEST'] = '1'
+
+#
+# Adapt to the list of tests you want to run
+#
+TEST_RANGE = ['%03i' % t for t in range(0, 256)]
+
+#
+# Finally, run the tests
+#
+for test in TEST_RANGE:
+    result = job.run_test_detail('xfstests', test_number=test, tag=test)
diff --git a/client/tests/xfstests/xfstests.py 
b/client/tests/xfstests/xfstests.py
new file mode 100644
index 0000000..1dea294
--- /dev/null
+++ b/client/tests/xfstests/xfstests.py
@@ -0,0 +1,89 @@
+import os, re, glob, logging
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.bin import test, utils, os_dep
+
+class xfstests(test.test):
+
+    version = 1
+
+    PASSED_RE = re.compile(r'Passed all \d+ tests')
+    FAILED_RE = re.compile(r'Failed \d+ of \d+ tests')
+    NA_RE = re.compile(r'Passed all 0 tests')
+    NA_DETAIL_RE = re.compile(r'(\d{3})\s*(\[not run\])\s*(.*)')
+
+
+    def _get_available_tests(self):
+        tests = glob.glob('???.out')
+        tests_list = [t[:-4] for t in tests if os.path.exists(t[:-4])]
+        tests_list.sort()
+        return tests_list
+
+
+    def _run_sub_test(self, test):
+        os.chdir(self.srcdir)
+        output = utils.system_output('./check %s' % test,
+                                     ignore_status=True,
+                                     retain_output=True)
+        lines = output.split('\n')
+        result_line = lines[-1]
+
+        if self.NA_RE.match(result_line):
+            detail_line = lines[-3]
+            match = self.NA_DETAIL_RE.match(detail_line)
+            if match is not None:
+                error_msg = match.groups()[2]
+            else:
+                error_msg = 'Test dependency failed, test not run'
+            raise error.TestNAError(error_msg)
+
+        elif self.FAILED_RE.match(result_line):
+            raise error.TestError('Test error, check debug logs for complete '
+                                  'test output')
+
+        elif self.PASSED_RE.match(result_line):
+            return
+
+        else:
+            raise error.TestError('Could not assert test success or failure, '
+                                  'assuming failure. Please check debug logs')
+
+
+    def setup(self, tarball = 'xfstests.tar.bz2'):
+        #
+        # Anticipate failures due to missing devel tools, libraries, headers
+        # and xfs commands
+        #
+        os_dep.command('autoconf')
+        os_dep.command('autoheader')
+        os_dep.command('libtool')
+        os_dep.library('libuuid.so.1')
+        os_dep.header('xfs/xfs.h')
+        os_dep.header('attr/xattr.h')
+        os_dep.header('sys/acl.h')
+        os_dep.command('mkfs.xfs')
+        os_dep.command('xfs_db')
+        os_dep.command('xfs_bmap')
+        os_dep.command('xfsdump')
+
+        self.job.require_gcc()
+
+        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
+        utils.extract_tarball_to_dir(tarball, self.srcdir)
+        os.chdir(self.srcdir)
+        utils.make()
+
+        logging.debug("Available tests in srcdir: %s" %
+                      ", ".join(self._get_available_tests()))
+
+
+    def run_once(self, test_number):
+        os.chdir(self.srcdir)
+        if test_number == '000':
+            logging.debug('Dummy test to setup xfstests')
+            return
+
+        if test_number not in self._get_available_tests():
+            raise error.TestError('test file %s not found' % test_number)
+
+        logging.debug("Running test: %s" % test_number)
+        self._run_sub_test(test_number)
-- 
1.7.4.4

_______________________________________________
Autotest mailing list
[email protected]
http://test.kernel.org/cgi-bin/mailman/listinfo/autotest

Reply via email to