Hello community,

here is the log from the commit of package perl-DBD-CSV for openSUSE:Factory 
checked in at 2014-09-22 09:23:35
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/perl-DBD-CSV (Old)
 and      /work/SRC/openSUSE:Factory/.perl-DBD-CSV.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "perl-DBD-CSV"

Changes:
--------
--- /work/SRC/openSUSE:Factory/perl-DBD-CSV/perl-DBD-CSV.changes        
2013-11-26 19:25:09.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.perl-DBD-CSV.new/perl-DBD-CSV.changes   
2014-09-22 09:23:36.000000000 +0200
@@ -1,0 +2,20 @@
+Fri Sep 19 12:15:51 UTC 2014 - [email protected]
+
+- updated to 0.44
+     * Table names case sensitiveness not tested case-problematic FS's
+     * Fix defaults in doc (annocpan)
+     * Fix typo in SYNOPSIS (RT#97313)
+     * Workaround eof bug in Text::CSV_XS-1.10
+ 
+ 0.43   - 2014-06-30, H.Merijn Brand
+     * Updated copyright to 2014
+     * Unquote schema's in test for cygwin
+     * Extra guards in Makefile.PL for unmet requirements
+ 
+ 0.42   - 2013-08-14, H.Merijn Brand
+     * Optionally skip tests using File::Spec->tempdir () RT#87684
+     * And document the use of $TMPDIR in README
+     * Make the SYNOPSIS more reflect real-world usage
+     * Detect DBI::Test and use it if available
+
+-------------------------------------------------------------------

Old:
----
  DBD-CSV-0.41.tgz

New:
----
  DBD-CSV-0.44.tgz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ perl-DBD-CSV.spec ++++++
--- /var/tmp/diff_new_pack.r8qkpp/_old  2014-09-22 09:23:37.000000000 +0200
+++ /var/tmp/diff_new_pack.r8qkpp/_new  2014-09-22 09:23:37.000000000 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package perl-DBD-CSV
 #
-# Copyright (c) 2013 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2014 SUSE LINUX Products GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,7 +17,7 @@
 
 
 Name:           perl-DBD-CSV
-Version:        0.41
+Version:        0.44
 Release:        0
 %define cpan_name DBD-CSV
 Summary:        DBI driver for CSV files
@@ -34,14 +34,14 @@
 BuildRequires:  perl(SQL::Statement) >= 1.405
 BuildRequires:  perl(Test::More) >= 0.9
 BuildRequires:  perl(Text::CSV_XS) >= 1.01
-#BuildRequires: perl(DBD::CSV)
-#BuildRequires: perl(version)
 Requires:       perl(DBD::File) >= 0.42
 Requires:       perl(DBI) >= 1.628
 Requires:       perl(SQL::Statement) >= 1.405
 Requires:       perl(Test::More) >= 0.9
 Requires:       perl(Text::CSV_XS) >= 1.01
-Recommends:     perl(Test::More) >= 0.98
+Recommends:     perl(DBI) >= 1.631
+Recommends:     perl(Test::More) >= 1.001003
+Recommends:     perl(Text::CSV_XS) >= 1.10
 %{perl_requires}
 
 %description

++++++ DBD-CSV-0.41.tgz -> DBD-CSV-0.44.tgz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/ChangeLog new/DBD-CSV-0.44/ChangeLog
--- old/DBD-CSV-0.41/ChangeLog  2013-07-29 09:34:07.000000000 +0200
+++ new/DBD-CSV-0.44/ChangeLog  2014-08-04 21:27:01.000000000 +0200
@@ -1,3 +1,20 @@
+0.44   - 2014-08-04, H.Merijn Brand
+    * Table names case sensitiveness not tested case-problematic FS's
+    * Fix defaults in doc (annocpan)
+    * Fix typo in SYNOPSIS (RT#97313)
+    * Workaround eof bug in Text::CSV_XS-1.10
+
+0.43   - 2014-06-30, H.Merijn Brand
+    * Updated copyright to 2014
+    * Unquote schema's in test for cygwin
+    * Extra guards in Makefile.PL for unmet requirements
+
+0.42   - 2013-08-14, H.Merijn Brand
+    * Optionally skip tests using File::Spec->tempdir () RT#87684
+    * And document the use of $TMPDIR in README
+    * Make the SYNOPSIS more reflect real-world usage
+    * Detect DBI::Test and use it if available
+
 0.41   - 2013-07-29, H.Merijn Brand
     * Use File::Spec->tmpdir () for universal existing folder
       Note that huge $TMP folders may cause the test to run slow
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/MANIFEST new/DBD-CSV-0.44/MANIFEST
--- old/DBD-CSV-0.41/MANIFEST   2013-07-29 15:15:28.000000000 +0200
+++ new/DBD-CSV-0.44/MANIFEST   2014-08-04 21:44:20.000000000 +0200
@@ -5,6 +5,14 @@
 README
 lib/Bundle/DBD/CSV.pm
 lib/DBD/CSV.pm
+lib/DBD/CSV/TypeInfo.pm
+lib/DBD/CSV/GetInfo.pm
+lib/DBI/Test/Case/DBD/CSV/t10_base.pm
+lib/DBI/Test/Case/DBD/CSV/t11_dsnlist.pm
+lib/DBI/Test/Case/DBD/CSV/t20_createdrop.pm
+lib/DBI/Test/Case/DBD/CSV/t85_error.pm
+lib/DBI/Test/DBD/CSV/Conf.pm
+lib/DBI/Test/DBD/CSV/List.pm
 t/00_meta.t
 t/00_pod_cov.t
 t/00_pod.t
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/MANIFEST.SKIP 
new/DBD-CSV-0.44/MANIFEST.SKIP
--- old/DBD-CSV-0.41/MANIFEST.SKIP      2013-06-11 18:37:29.000000000 +0200
+++ new/DBD-CSV-0.44/MANIFEST.SKIP      2013-08-13 13:06:06.000000000 +0200
@@ -1,5 +1,5 @@
 \.aspell\.local.pws
-\.dbi-svn
+\.dbi-git
 \bCVS\b
 ~$
 \.tgz$
@@ -17,4 +17,9 @@
 ^xx
 META.yml
 valgrind.log
+tests.skip
 xt/
+t/basic/
+t/DBI/
+t/DBD/
+t/SQL/
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/META.json new/DBD-CSV-0.44/META.json
--- old/DBD-CSV-0.41/META.json  2013-07-29 15:15:28.000000000 +0200
+++ new/DBD-CSV-0.44/META.json  2014-08-04 21:44:20.000000000 +0200
@@ -1,72 +1,76 @@
 {
-   "author" : [
-      "Jochen Wiedmann",
-      "Jeff Zucker",
-      "H.Merijn Brand <[email protected]>",
-      "Jens Rehsack <[email protected]>"
+   "version" : "0.44",
+   "generated_by" : "Author",
+   "resources" : {
+      "license" : [
+         "http://dev.perl.org/licenses/";
       ],
-   "dynamic_config" : 1,
-   "provides" : {
-      "DBD::CSV" : {
-         "version" : "0.41",
-         "file" : "lib/DBD/CSV.pm"
+      "repository" : {
+         "type" : "git",
+         "web" : "https://github.com/perl5-dbi/DBD-CSV.git";,
+         "url" : "https://github.com/perl5-dbi/DBD-CSV.git";
+      }
+   },
+   "prereqs" : {
+      "build" : {
+         "requires" : {
+            "Config" : "0"
+         }
+      },
+      "test" : {
+         "requires" : {
+            "Test::Harness" : "0",
+            "charnames" : "0",
+            "Cwd" : "0",
+            "Encode" : "0",
+            "Test::More" : "0.90"
+         },
+         "recommends" : {
+            "Test::More" : "1.001003"
          }
       },
-   "x_installdirs" : "site",
-   "prereqs" : {
       "runtime" : {
          "recommends" : {
-            "Test::More" : "0.98",
-            "perl" : "5.016003"
-            },
+            "Text::CSV_XS" : "1.10",
+            "perl" : "5.020000",
+            "DBI" : "1.631"
+         },
          "requires" : {
+            "SQL::Statement" : "1.405",
+            "DBI" : "1.628",
             "Text::CSV_XS" : "1.01",
-            "perl" : "5.008001",
             "DBD::File" : "0.42",
-            "SQL::Statement" : "1.405",
-            "DBI" : "1.628"
-            }
-         },
+            "perl" : "5.008001"
+         }
+      },
       "configure" : {
          "requires" : {
             "ExtUtils::MakeMaker" : "0"
-            }
-         },
-      "build" : {
-         "requires" : {
-            "Config" : "0"
-            }
-         },
-      "test" : {
-         "requires" : {
-            "Encode" : "0",
-            "Test::Harness" : "0",
-            "charnames" : "0",
-            "Test::More" : "0.90",
-            "Cwd" : "0"
-            }
          }
-      },
-   "generated_by" : "Author",
+      }
+   },
+   "provides" : {
+      "DBD::CSV" : {
+         "version" : "0.44",
+         "file" : "lib/DBD/CSV.pm"
+      }
+   },
+   "meta-spec" : {
+      "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec";,
+      "version" : "2"
+   },
    "license" : [
       "perl_5"
-      ],
-   "resources" : {
-      "license" : [
-         "http://dev.perl.org/licenses/";
-         ],
-      "repository" : {
-         "url" : "https://github.com/perl5-dbi/DBD-CSV.git";,
-         "web" : "https://github.com/perl5-dbi/DBD-CSV.git";,
-         "type" : "git"
-         }
-      },
-   "version" : "0.41",
+   ],
+   "author" : [
+      "Jochen Wiedmann",
+      "Jeff Zucker",
+      "H.Merijn Brand <[email protected]>",
+      "Jens Rehsack <[email protected]>"
+   ],
    "abstract" : "DBI driver for CSV files",
-   "meta-spec" : {
-      "version" : "2",
-      "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec";
-      },
    "name" : "DBD-CSV",
+   "x_installdirs" : "site",
+   "dynamic_config" : 1,
    "release_status" : "stable"
-   }
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/META.yml new/DBD-CSV-0.44/META.yml
--- old/DBD-CSV-0.41/META.yml   2013-07-29 15:15:28.000000000 +0200
+++ new/DBD-CSV-0.44/META.yml   2014-08-04 21:44:20.000000000 +0200
@@ -10,7 +10,7 @@
 configure_requires: 
   ExtUtils::MakeMaker: 0
 dynamic_config: 1
-generated_by: Author, CPAN::Meta::Converter version 2.131560
+generated_by: Author, CPAN::Meta::Converter version 2.142060
 license: perl
 meta-spec: 
   url: http://module-build.sourceforge.net/META-spec-v1.4.html
@@ -19,10 +19,12 @@
 provides: 
   DBD::CSV: 
     file: lib/DBD/CSV.pm
-    version: '0.41'
+    version: '0.44'
 recommends: 
-  Test::More: '0.98'
-  perl: '5.016003'
+  DBI: '1.631'
+  Test::More: '1.001003'
+  Text::CSV_XS: '1.10'
+  perl: '5.020000'
 requires: 
   Cwd: 0
   DBD::File: '0.42'
@@ -37,5 +39,5 @@
 resources: 
   license: http://dev.perl.org/licenses/
   repository: https://github.com/perl5-dbi/DBD-CSV.git
-version: '0.41'
+version: '0.44'
 x_installdirs: site
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/Makefile.PL new/DBD-CSV-0.44/Makefile.PL
--- old/DBD-CSV-0.41/Makefile.PL        2013-06-27 09:31:20.000000000 +0200
+++ new/DBD-CSV-0.44/Makefile.PL        2014-06-30 12:39:45.000000000 +0200
@@ -1,20 +1,41 @@
 # -*- perl -*-
 
-# Copyright (c) 2009-2013 H.Merijn Brand
+# Copyright (c) 2009-2014 H.Merijn Brand
 
 require 5.008001;
 
 use strict;
 
 use ExtUtils::MakeMaker;
+use File::Spec;
 
+eval { require DBI;      };
+if ($@) {
+    print <<"MSG";
+
+DBD::CSV requires DBI and it cannot be loaded:
+$@
+
+MSG
+    exit 1;
+    }
+if ($DBI::VERSION < 1.628) {
+    print <<"MSG";
+
+Trying to use DBD::CSV with DBI-$DBI::VERSION is heading for failure.
+DBD::CSV is relying on DBD::File, bundled in the DBI release and does
+require features not present in this version of DBI.
+
+MSG
+    exit 0;
+    }
 eval { require DBD::CSV; };
 if (!$@ && $DBD::CSV::VERSION < 0.1010) {
     print <<'MSG';
 
-WARNING! You seem to have installed a recent version of the DBD::CSV module.
+WARNING!  You seem to have installed a recent version of the DBD::CSV module.
 Note that the API (in particular attribute names) has changed, to conform to
-the DBI specifications. For example $dbh->{directory} has been renamed to
+the DBI specifications.  For example  $dbh->{directory}  has been renamed to
 $dbh->{f_dir} and $dbh->{eol}, $dbh->{quote_char}, ... are gone in favour of
 $dbh->{tables}{$table}{csv}, which is used for storing meta information. You
 might need to modify existing sources before doing a "make install". See the
@@ -22,15 +43,26 @@
 
 MSG
     sleep 5;
-    };
+    }
 
 use vars qw( $DBI_INC_DIR );
 
+{   my $tmp_dir = File::Spec->tmpdir ();
+    my $default = $ENV{AUTOMATED_TESTING} ? "n" : "y";
+    if (prompt ("Enable the use of $tmp_dir for tests?", $default) =~ m/[Yy]/) 
{
+       unlink "tests.skip";
+       }
+    else {
+       open my $fh, ">", "tests.skip";
+       print $fh "tmpdir\n";
+       close $fh;
+       }
+    }
 my %wm = (
     NAME         => "DBD::CSV",
     DISTNAME     => "DBD-CSV",
     ABSTRACT     => "DBI driver for CSV and similar structured files",
-    AUTHOR       => "H.Merijn Brand <h.merijn\@xs4all.nl>",
+    AUTHOR       => "H.Merijn Brand <h.m.brand\@xs4all.nl>",
     VERSION_FROM => "lib/DBD/CSV.pm",
     PREREQ_PM    => {
        "DBI"            => 1.628,
@@ -54,6 +86,26 @@
     );
 $ExtUtils::MakeMaker::VERSION > 6.30 and $wm{LICENSE} = "perl";
 
+# Windows is case-insensitive! Do not remove lib.pl and tmp.csv
+$File::Path::VERSION > 2.06 and File::Path::remove_tree (glob 
"t/[bA-KM-SU-Z]*");
+
+eval "use DBI::Test::Conf ();";
+if ($@) {
+    warn "******\n",
+         "******\tDBI::Test is not installed.\n",
+         "******\tIt will be required in one of the upcoming releases.\n",
+         "******\n";
+    }
+else {
+    use lib "lib";
+
+    local $" = " ";
+    $wm{PREREQ_PM}{"DBI::Test"} = "0.001";
+    my @nt = DBI::Test::Conf->setup (CONTAINED_DBDS => [qw( CSV )]);
+    $wm{test} = { TESTS => join " " => (sort glob "t/*.t"), @nt };
+    $wm{clean}{FILES} .= " @nt";
+    }
+
 my $rv = WriteMakefile (%wm);
 
 1;
@@ -65,7 +117,7 @@
     my $min_vsn = ($] >= 5.010 && -d "xt" && ($ENV{AUTOMATED_TESTING} || 0) != 
1)
        ? join "\n" =>
            'test ::',
-           '   -@env DBI_SQL_NANO=1 TEST_FILES="t/[012367]*.t t/4[038]*.t 
t/5[05]*.t t/85*.t" make -e test_dynamic',
+           '   -@env DBI_SQL_NANO=1 make -e test_dynamic 
TEST_FILES=t/[1-9]*.t',
            '',
            'test ::',
            '   -@env TEST_FILES="xt/*.t" make -e test_dynamic',
@@ -76,7 +128,7 @@
        '       cover -test',
        '',
        'spellcheck:',
-       '       pod-spell-check --aspell',
+       '       pod-spell-check --aspell --ispell',
        '',
        'checkmeta:     spellcheck',
        '       perl sandbox/genMETA.pl -c',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/README new/DBD-CSV-0.44/README
--- old/DBD-CSV-0.41/README     2013-05-22 16:28:10.000000000 +0200
+++ new/DBD-CSV-0.44/README     2014-01-01 11:42:19.000000000 +0100
@@ -8,13 +8,8 @@
     and implements access to so-called CSV files (Comma separated
     values).
 
-Warning
-    THIS IS ALPHA SOFTWARE. It is *only* 'Alpha' because the
-    interface (API) is not finalized. The Alpha status does not
-    reflect code quality or stability.
-
 Copying
-    Copyright (C) 2009-2013 by H.Merijn Brand
+    Copyright (C) 2009-2014 by H.Merijn Brand
     Copyright (C) 2004-2009 by Jeff Zucker
     Copyright (C) 1998-2004 by Jochen Wiedmann
  
@@ -48,8 +43,8 @@
       cpan DBD::CSV
 
     Or standard build/installation:
-      gzip -cd DBD-CSV-0.39.tar.gz | tar xf -
-      cd DBD-CSV-0.39
+      gzip -cd DBD-CSV-0.43.tar.gz | tar xf -
+      cd DBD-CSV-0.43
       perl Makefile.PL
       make test
       make install
@@ -57,6 +52,19 @@
     (this is for Unix users, Windows users would prefer PowerArchiver,
     WinZip or something similar).
 
+    The test suite contains extensive tests for all features provided
+    by DBD::CSV. Some of them include the use of what is set to be the
+    default temporary directory on the system. Even though the tests
+    do not use the folder to read or modify data, using the folder will
+    imply the scanning of that folder to see if files would qualify for
+    use in DBD::CSV. When the folder contains many files, the scanning
+    will seriously slow down the testing duration. The configure phase
+    therefor asks the user if using the folder is allowed. The default
+    answer is yes unless $AUTOMATED_TESTING is set.
+    As File::Spec->tmpdir () honors the environment, you can enable
+    these tests using another folder by setting $TMPDIR or whatever
+    controls tmpdir () or your OS.
+
 Author:
     This module is currently maintained by
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/Bundle/DBD/CSV.pm 
new/DBD-CSV-0.44/lib/Bundle/DBD/CSV.pm
--- old/DBD-CSV-0.41/lib/Bundle/DBD/CSV.pm      2013-07-23 09:05:35.000000000 
+0200
+++ new/DBD-CSV-0.44/lib/Bundle/DBD/CSV.pm      2014-08-04 21:28:26.000000000 
+0200
@@ -5,7 +5,7 @@
 use strict;
 use warnings;
 
-our $VERSION = "1.09";
+our $VERSION = "1.12";
 
 1;
 
@@ -21,15 +21,15 @@
 
 =head1 CONTENTS
 
-DBI 1.628
+DBI 1.631
 
-Text::CSV_XS 1.01
+Text::CSV_XS 1.10
 
 SQL::Statement 1.405
 
 DBD::File 0.42
 
-DBD::CSV 0.41
+DBD::CSV 0.44
 
 =head1 DESCRIPTION
 
@@ -44,7 +44,7 @@
 
 =head1 COPYRIGHT AND LICENSE
 
-Copyright (C) 2009-2013 by H.Merijn Brand
+Copyright (C) 2009-2014 by H.Merijn Brand
 Copyright (C) 2004-2009 by Jeff Zucker
 Copyright (C) 1998-2004 by Jochen Wiedmann
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBD/CSV/GetInfo.pm 
new/DBD-CSV-0.44/lib/DBD/CSV/GetInfo.pm
--- old/DBD-CSV-0.41/lib/DBD/CSV/GetInfo.pm     1970-01-01 01:00:00.000000000 
+0100
+++ new/DBD-CSV-0.44/lib/DBD/CSV/GetInfo.pm     2013-07-29 17:41:21.000000000 
+0200
@@ -0,0 +1,257 @@
+#!/usr/bin/perl
+
+# The %info hash was automatically generated by
+# DBI::DBD::Metadata::write_getinfo_pm v2.014214.
+
+package DBD::CSV::GetInfo;
+
+use strict;
+use DBD::CSV;
+
+# Beware: not officially documented interfaces...
+# use DBI::Const::GetInfoType qw(%GetInfoType);
+# use DBI::Const::GetInfoReturn qw(%GetInfoReturnTypes %GetInfoReturnValues);
+
+my $sql_driver  = "CSV";               # DBD::CSV uses tw-partr version string
+my $sql_ver_fmt = "%02d.%02d.0000";    # ODBC version string: ##.##.#####
+my $sql_driver_ver = sprintf $sql_ver_fmt, split /\./ => $DBD::CSV::VERSION;
+
+sub sql_data_source_name
+{
+    my $dbh = shift;
+    return "dbi:$sql_driver:" . $dbh->{Name};
+    } # sql_data_source_name
+
+sub sql_user_name
+{
+    my $dbh = shift;
+    # CURRENT_USER is a non-standard attribute, probably undef
+    # Username is a standard DBI attribute
+    return $dbh->{CURRENT_USER} || $dbh->{Username};
+    } # sql_user_name
+
+our %info = (
+#    20 => undef,                      # SQL_ACCESSIBLE_PROCEDURES
+#    19 => undef,                      # SQL_ACCESSIBLE_TABLES
+#     0 => undef,                      # SQL_ACTIVE_CONNECTIONS
+#   116 => undef,                      # SQL_ACTIVE_ENVIRONMENTS
+#     1 => undef,                      # SQL_ACTIVE_STATEMENTS
+#   169 => undef,                      # SQL_AGGREGATE_FUNCTIONS
+#   117 => undef,                      # SQL_ALTER_DOMAIN
+#    86 => undef,                      # SQL_ALTER_TABLE
+# 10021 => undef,                      # SQL_ASYNC_MODE
+#   120 => undef,                      # SQL_BATCH_ROW_COUNT
+#   121 => undef,                      # SQL_BATCH_SUPPORT
+#    82 => undef,                      # SQL_BOOKMARK_PERSISTENCE
+#   114 => undef,                      # SQL_CATALOG_LOCATION
+# 10003 => undef,                      # SQL_CATALOG_NAME
+#    41 => undef,                      # SQL_CATALOG_NAME_SEPARATOR
+#    42 => undef,                      # SQL_CATALOG_TERM
+#    92 => undef,                      # SQL_CATALOG_USAGE
+# 10004 => undef,                      # SQL_COLLATING_SEQUENCE
+# 10004 => undef,                      # SQL_COLLATION_SEQ
+#    87 => undef,                      # SQL_COLUMN_ALIAS
+#    22 => undef,                      # SQL_CONCAT_NULL_BEHAVIOR
+#    53 => undef,                      # SQL_CONVERT_BIGINT
+#    54 => undef,                      # SQL_CONVERT_BINARY
+#    55 => undef,                      # SQL_CONVERT_BIT
+#    56 => undef,                      # SQL_CONVERT_CHAR
+#    57 => undef,                      # SQL_CONVERT_DATE
+#    58 => undef,                      # SQL_CONVERT_DECIMAL
+#    59 => undef,                      # SQL_CONVERT_DOUBLE
+#    60 => undef,                      # SQL_CONVERT_FLOAT
+#    48 => undef,                      # SQL_CONVERT_FUNCTIONS
+#   173 => undef,                      # SQL_CONVERT_GUID
+#    61 => undef,                      # SQL_CONVERT_INTEGER
+#   123 => undef,                      # SQL_CONVERT_INTERVAL_DAY_TIME
+#   124 => undef,                      # SQL_CONVERT_INTERVAL_YEAR_MONTH
+#    71 => undef,                      # SQL_CONVERT_LONGVARBINARY
+#    62 => undef,                      # SQL_CONVERT_LONGVARCHAR
+#    63 => undef,                      # SQL_CONVERT_NUMERIC
+#    64 => undef,                      # SQL_CONVERT_REAL
+#    65 => undef,                      # SQL_CONVERT_SMALLINT
+#    66 => undef,                      # SQL_CONVERT_TIME
+#    67 => undef,                      # SQL_CONVERT_TIMESTAMP
+#    68 => undef,                      # SQL_CONVERT_TINYINT
+#    69 => undef,                      # SQL_CONVERT_VARBINARY
+#    70 => undef,                      # SQL_CONVERT_VARCHAR
+#   122 => undef,                      # SQL_CONVERT_WCHAR
+#   125 => undef,                      # SQL_CONVERT_WLONGVARCHAR
+#   126 => undef,                      # SQL_CONVERT_WVARCHAR
+#    74 => undef,                      # SQL_CORRELATION_NAME
+#   127 => undef,                      # SQL_CREATE_ASSERTION
+#   128 => undef,                      # SQL_CREATE_CHARACTER_SET
+#   129 => undef,                      # SQL_CREATE_COLLATION
+#   130 => undef,                      # SQL_CREATE_DOMAIN
+#   131 => undef,                      # SQL_CREATE_SCHEMA
+#   132 => undef,                      # SQL_CREATE_TABLE
+#   133 => undef,                      # SQL_CREATE_TRANSLATION
+#   134 => undef,                      # SQL_CREATE_VIEW
+#    23 => undef,                      # SQL_CURSOR_COMMIT_BEHAVIOR
+#    24 => undef,                      # SQL_CURSOR_ROLLBACK_BEHAVIOR
+# 10001 => undef,                      # SQL_CURSOR_SENSITIVITY
+#    16 => undef,                      # SQL_DATABASE_NAME
+      2 => \&sql_data_source_name,     # SQL_DATA_SOURCE_NAME
+#    25 => undef,                      # SQL_DATA_SOURCE_READ_ONLY
+#   119 => undef,                      # SQL_DATETIME_LITERALS
+#    17 => undef,                      # SQL_DBMS_NAME
+#    18 => undef,                      # SQL_DBMS_VER
+#    18 => undef,                      # SQL_DBMS_VERSION
+#   170 => undef,                      # SQL_DDL_INDEX
+#    26 => undef,                      # SQL_DEFAULT_TRANSACTION_ISOLATION
+#    26 => undef,                      # SQL_DEFAULT_TXN_ISOLATION
+# 10002 => undef,                      # SQL_DESCRIBE_PARAMETER
+#   171 => undef,                      # SQL_DM_VER
+#     3 => undef,                      # SQL_DRIVER_HDBC
+#   135 => undef,                      # SQL_DRIVER_HDESC
+#     4 => undef,                      # SQL_DRIVER_HENV
+#    76 => undef,                      # SQL_DRIVER_HLIB
+#     5 => undef,                      # SQL_DRIVER_HSTMT
+      6 => $INC{"DBD/CSV.pm"},         # SQL_DRIVER_NAME
+#    77 => undef,                      # SQL_DRIVER_ODBC_VER
+      7 => $sql_driver_ver,            # SQL_DRIVER_VER
+#   136 => undef,                      # SQL_DROP_ASSERTION
+#   137 => undef,                      # SQL_DROP_CHARACTER_SET
+#   138 => undef,                      # SQL_DROP_COLLATION
+#   139 => undef,                      # SQL_DROP_DOMAIN
+#   140 => undef,                      # SQL_DROP_SCHEMA
+#   141 => undef,                      # SQL_DROP_TABLE
+#   142 => undef,                      # SQL_DROP_TRANSLATION
+#   143 => undef,                      # SQL_DROP_VIEW
+#   144 => undef,                      # SQL_DYNAMIC_CURSOR_ATTRIBUTES1
+#   145 => undef,                      # SQL_DYNAMIC_CURSOR_ATTRIBUTES2
+#    27 => undef,                      # SQL_EXPRESSIONS_IN_ORDERBY
+#     8 => undef,                      # SQL_FETCH_DIRECTION
+#    84 => undef,                      # SQL_FILE_USAGE
+#   146 => undef,                      # SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES1
+#   147 => undef,                      # SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES2
+#    81 => undef,                      # SQL_GETDATA_EXTENSIONS
+#    88 => undef,                      # SQL_GROUP_BY
+#    28 => undef,                      # SQL_IDENTIFIER_CASE
+#    29 => undef,                      # SQL_IDENTIFIER_QUOTE_CHAR
+#   148 => undef,                      # SQL_INDEX_KEYWORDS
+#   149 => undef,                      # SQL_INFO_SCHEMA_VIEWS
+#   172 => undef,                      # SQL_INSERT_STATEMENT
+#    73 => undef,                      # SQL_INTEGRITY
+#   150 => undef,                      # SQL_KEYSET_CURSOR_ATTRIBUTES1
+#   151 => undef,                      # SQL_KEYSET_CURSOR_ATTRIBUTES2
+#    89 => undef,                      # SQL_KEYWORDS
+#   113 => undef,                      # SQL_LIKE_ESCAPE_CLAUSE
+#    78 => undef,                      # SQL_LOCK_TYPES
+#    34 => undef,                      # SQL_MAXIMUM_CATALOG_NAME_LENGTH
+#    97 => undef,                      # SQL_MAXIMUM_COLUMNS_IN_GROUP_BY
+#    98 => undef,                      # SQL_MAXIMUM_COLUMNS_IN_INDEX
+#    99 => undef,                      # SQL_MAXIMUM_COLUMNS_IN_ORDER_BY
+#   100 => undef,                      # SQL_MAXIMUM_COLUMNS_IN_SELECT
+#   101 => undef,                      # SQL_MAXIMUM_COLUMNS_IN_TABLE
+#    30 => undef,                      # SQL_MAXIMUM_COLUMN_NAME_LENGTH
+#     1 => undef,                      # SQL_MAXIMUM_CONCURRENT_ACTIVITIES
+#    31 => undef,                      # SQL_MAXIMUM_CURSOR_NAME_LENGTH
+#     0 => undef,                      # SQL_MAXIMUM_DRIVER_CONNECTIONS
+# 10005 => undef,                      # SQL_MAXIMUM_IDENTIFIER_LENGTH
+#   102 => undef,                      # SQL_MAXIMUM_INDEX_SIZE
+#   104 => undef,                      # SQL_MAXIMUM_ROW_SIZE
+#    32 => undef,                      # SQL_MAXIMUM_SCHEMA_NAME_LENGTH
+#   105 => undef,                      # SQL_MAXIMUM_STATEMENT_LENGTH
+# 20000 => undef,                      # SQL_MAXIMUM_STMT_OCTETS
+# 20001 => undef,                      # SQL_MAXIMUM_STMT_OCTETS_DATA
+# 20002 => undef,                      # SQL_MAXIMUM_STMT_OCTETS_SCHEMA
+#   106 => undef,                      # SQL_MAXIMUM_TABLES_IN_SELECT
+#    35 => undef,                      # SQL_MAXIMUM_TABLE_NAME_LENGTH
+#   107 => undef,                      # SQL_MAXIMUM_USER_NAME_LENGTH
+# 10022 => undef,                      # SQL_MAX_ASYNC_CONCURRENT_STATEMENTS
+#   112 => undef,                      # SQL_MAX_BINARY_LITERAL_LEN
+#    34 => undef,                      # SQL_MAX_CATALOG_NAME_LEN
+#   108 => undef,                      # SQL_MAX_CHAR_LITERAL_LEN
+#    97 => undef,                      # SQL_MAX_COLUMNS_IN_GROUP_BY
+#    98 => undef,                      # SQL_MAX_COLUMNS_IN_INDEX
+#    99 => undef,                      # SQL_MAX_COLUMNS_IN_ORDER_BY
+#   100 => undef,                      # SQL_MAX_COLUMNS_IN_SELECT
+#   101 => undef,                      # SQL_MAX_COLUMNS_IN_TABLE
+#    30 => undef,                      # SQL_MAX_COLUMN_NAME_LEN
+#     1 => undef,                      # SQL_MAX_CONCURRENT_ACTIVITIES
+#    31 => undef,                      # SQL_MAX_CURSOR_NAME_LEN
+#     0 => undef,                      # SQL_MAX_DRIVER_CONNECTIONS
+# 10005 => undef,                      # SQL_MAX_IDENTIFIER_LEN
+#   102 => undef,                      # SQL_MAX_INDEX_SIZE
+#    32 => undef,                      # SQL_MAX_OWNER_NAME_LEN
+#    33 => undef,                      # SQL_MAX_PROCEDURE_NAME_LEN
+#    34 => undef,                      # SQL_MAX_QUALIFIER_NAME_LEN
+#   104 => undef,                      # SQL_MAX_ROW_SIZE
+#   103 => undef,                      # SQL_MAX_ROW_SIZE_INCLUDES_LONG
+#    32 => undef,                      # SQL_MAX_SCHEMA_NAME_LEN
+#   105 => undef,                      # SQL_MAX_STATEMENT_LEN
+#   106 => undef,                      # SQL_MAX_TABLES_IN_SELECT
+#    35 => undef,                      # SQL_MAX_TABLE_NAME_LEN
+#   107 => undef,                      # SQL_MAX_USER_NAME_LEN
+#    37 => undef,                      # SQL_MULTIPLE_ACTIVE_TXN
+#    36 => undef,                      # SQL_MULT_RESULT_SETS
+#   111 => undef,                      # SQL_NEED_LONG_DATA_LEN
+#    75 => undef,                      # SQL_NON_NULLABLE_COLUMNS
+#    85 => undef,                      # SQL_NULL_COLLATION
+#    49 => undef,                      # SQL_NUMERIC_FUNCTIONS
+#     9 => undef,                      # SQL_ODBC_API_CONFORMANCE
+#   152 => undef,                      # SQL_ODBC_INTERFACE_CONFORMANCE
+#    12 => undef,                      # SQL_ODBC_SAG_CLI_CONFORMANCE
+#    15 => undef,                      # SQL_ODBC_SQL_CONFORMANCE
+#    73 => undef,                      # SQL_ODBC_SQL_OPT_IEF
+#    10 => undef,                      # SQL_ODBC_VER
+#   115 => undef,                      # SQL_OJ_CAPABILITIES
+#    90 => undef,                      # SQL_ORDER_BY_COLUMNS_IN_SELECT
+#    38 => undef,                      # SQL_OUTER_JOINS
+#   115 => undef,                      # SQL_OUTER_JOIN_CAPABILITIES
+#    39 => undef,                      # SQL_OWNER_TERM
+#    91 => undef,                      # SQL_OWNER_USAGE
+#   153 => undef,                      # SQL_PARAM_ARRAY_ROW_COUNTS
+#   154 => undef,                      # SQL_PARAM_ARRAY_SELECTS
+#    80 => undef,                      # SQL_POSITIONED_STATEMENTS
+#    79 => undef,                      # SQL_POS_OPERATIONS
+#    21 => undef,                      # SQL_PROCEDURES
+#    40 => undef,                      # SQL_PROCEDURE_TERM
+#   114 => undef,                      # SQL_QUALIFIER_LOCATION
+#    41 => undef,                      # SQL_QUALIFIER_NAME_SEPARATOR
+#    42 => undef,                      # SQL_QUALIFIER_TERM
+#    92 => undef,                      # SQL_QUALIFIER_USAGE
+#    93 => undef,                      # SQL_QUOTED_IDENTIFIER_CASE
+#    11 => undef,                      # SQL_ROW_UPDATES
+#    39 => undef,                      # SQL_SCHEMA_TERM
+#    91 => undef,                      # SQL_SCHEMA_USAGE
+#    43 => undef,                      # SQL_SCROLL_CONCURRENCY
+#    44 => undef,                      # SQL_SCROLL_OPTIONS
+#    14 => undef,                      # SQL_SEARCH_PATTERN_ESCAPE
+#    13 => undef,                      # SQL_SERVER_NAME
+#    94 => undef,                      # SQL_SPECIAL_CHARACTERS
+#   155 => undef,                      # SQL_SQL92_DATETIME_FUNCTIONS
+#   156 => undef,                      # SQL_SQL92_FOREIGN_KEY_DELETE_RULE
+#   157 => undef,                      # SQL_SQL92_FOREIGN_KEY_UPDATE_RULE
+#   158 => undef,                      # SQL_SQL92_GRANT
+#   159 => undef,                      # SQL_SQL92_NUMERIC_VALUE_FUNCTIONS
+#   160 => undef,                      # SQL_SQL92_PREDICATES
+#   161 => undef,                      # SQL_SQL92_RELATIONAL_JOIN_OPERATORS
+#   162 => undef,                      # SQL_SQL92_REVOKE
+#   163 => undef,                      # SQL_SQL92_ROW_VALUE_CONSTRUCTOR
+#   164 => undef,                      # SQL_SQL92_STRING_FUNCTIONS
+#   165 => undef,                      # SQL_SQL92_VALUE_EXPRESSIONS
+#   118 => undef,                      # SQL_SQL_CONFORMANCE
+#   166 => undef,                      # SQL_STANDARD_CLI_CONFORMANCE
+#   167 => undef,                      # SQL_STATIC_CURSOR_ATTRIBUTES1
+#   168 => undef,                      # SQL_STATIC_CURSOR_ATTRIBUTES2
+#    83 => undef,                      # SQL_STATIC_SENSITIVITY
+#    50 => undef,                      # SQL_STRING_FUNCTIONS
+#    95 => undef,                      # SQL_SUBQUERIES
+#    51 => undef,                      # SQL_SYSTEM_FUNCTIONS
+#    45 => undef,                      # SQL_TABLE_TERM
+#   109 => undef,                      # SQL_TIMEDATE_ADD_INTERVALS
+#   110 => undef,                      # SQL_TIMEDATE_DIFF_INTERVALS
+#    52 => undef,                      # SQL_TIMEDATE_FUNCTIONS
+#    46 => undef,                      # SQL_TRANSACTION_CAPABLE
+#    72 => undef,                      # SQL_TRANSACTION_ISOLATION_OPTION
+#    46 => undef,                      # SQL_TXN_CAPABLE
+#    72 => undef,                      # SQL_TXN_ISOLATION_OPTION
+#    96 => undef,                      # SQL_UNION
+#    96 => undef,                      # SQL_UNION_STATEMENT
+     47 => \&sql_user_name,            # SQL_USER_NAME
+# 10000 => undef,                      # SQL_XOPEN_CLI_YEAR
+    );
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBD/CSV/TypeInfo.pm 
new/DBD-CSV-0.44/lib/DBD/CSV/TypeInfo.pm
--- old/DBD-CSV-0.41/lib/DBD/CSV/TypeInfo.pm    1970-01-01 01:00:00.000000000 
+0100
+++ new/DBD-CSV-0.44/lib/DBD/CSV/TypeInfo.pm    2013-07-29 17:41:42.000000000 
+0200
@@ -0,0 +1,64 @@
+#!/usr/bin/perl
+
+# Don't forget to add version and intellectual property control information.
+
+# The %type_info_all hash was automatically generated by
+# DBI::DBD::Metadata::write_typeinfo_pm v2.014214.
+
+package DBD::CSV::TypeInfo;
+
+{
+    require Exporter;
+    require DynaLoader;
+    @ISA    = qw(Exporter DynaLoader);
+    @EXPORT = qw(type_info_all);
+    use DBI qw(:sql_types);
+
+    $type_info_all = [
+       {   TYPE_NAME          => 0,
+           DATA_TYPE          => 1,
+           COLUMN_SIZE        => 2,
+           LITERAL_PREFIX     => 3,
+           LITERAL_SUFFIX     => 4,
+           CREATE_PARAMS      => 5,
+           NULLABLE           => 6,
+           CASE_SENSITIVE     => 7,
+           SEARCHABLE         => 8,
+           UNSIGNED_ATTRIBUTE => 9,
+           FIXED_PREC_SCALE   => 10,
+           AUTO_UNIQUE_VALUE  => 11,
+           LOCAL_TYPE_NAME    => 12,
+           MINIMUM_SCALE      => 13,
+           MAXIMUM_SCALE      => 14,
+           SQL_DATA_TYPE      => 15,
+           SQL_DATETIME_SUB   => 16,
+           NUM_PREC_RADIX     => 17,
+           INTERVAL_PRECISION => 18,
+           },
+       [   "VARCHAR", SQL_VARCHAR, undef, "'", "'", undef, 0, 1, 1, 0, undef,
+           undef, undef, 1, 999999, undef, undef, undef, undef,
+           ],
+       [   "CHAR", DBIstcf_DISCARD_STRING, undef, "'", "'", undef, 0, 1, 1, 0,
+           undef, undef, undef, 1, 999999, undef, undef, undef, undef,
+           ],
+       [   "INTEGER", SQL_INTEGER, undef, "", "", undef, 0, 0, 1, 0, undef,
+           undef, undef, 0, 0, undef, undef, undef, undef,
+           ],
+       [   "REAL", SQL_REAL, undef, "",    "",    undef,
+           0,      0,        1,     0,     undef, undef,
+           undef,  0,        0,     undef, undef, undef,
+           undef,
+           ],
+       [   "BLOB", SQL_LONGVARBINARY, undef, "'", "'", undef, 0, 1, 1, 0,
+           undef, undef, undef, 1, 999999, undef, undef, undef, undef,
+           ],
+       [   "BLOB", SQL_LONGVARBINARY, undef, "'", "'", undef, 0, 1, 1, 0,
+           undef, undef, undef, 1, 999999, undef, undef, undef, undef,
+           ],
+       [   "TEXT", SQL_LONGVARCHAR, undef, "'", "'", undef, 0, 1, 1, 0, undef,
+           undef, undef, 1, 999999, undef, undef, undef, undef,
+           ],
+       ];
+
+    1;
+    }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBD/CSV.pm 
new/DBD-CSV-0.44/lib/DBD/CSV.pm
--- old/DBD-CSV-0.41/lib/DBD/CSV.pm     2013-07-23 09:05:40.000000000 +0200
+++ new/DBD-CSV-0.44/lib/DBD/CSV.pm     2014-08-04 18:10:42.000000000 +0200
@@ -23,7 +23,7 @@
 
 @ISA =   qw( DBD::File );
 
-$VERSION  = "0.41";
+$VERSION  = "0.44";
 $ATTRIBUTION = "DBD::CSV $DBD::CSV::VERSION by H.Merijn Brand";
 
 $err      = 0;         # holds error code   for DBI::err
@@ -138,6 +138,22 @@
     return sprintf "%s using %s", $dbh->{csv_version}, $dtype;
     } # get_csv_versions 
 
+sub get_info
+{
+    my ($dbh, $info_type) = @_;
+    require  DBD::CSV::GetInfo;
+    my $v = $DBD::CSV::GetInfo::info{int ($info_type)};
+    ref $v eq "CODE" and $v = $v->($dbh);
+    return $v;
+    } # get_info
+
+sub type_info_all
+{
+    my $dbh = shift;
+    require   DBD::CSV::TypeInfo;
+    return [@$DBD::CSV::TypeInfo::type_info_all];
+    } # type_info_all
+
 # --- STATEMENT 
----------------------------------------------------------------
 
 package DBD::CSV::st;
@@ -320,6 +336,8 @@
        $csv->eof and return;
 
        my @diag = _csv_diag ($csv);
+       $diag[0] == 2012 and return; # Also EOF (broken in Text::CSV_XS-1.10)
+
        my $file = $tbl->{f_fqfn};
        croak "Error $diag[0] while reading file $file: $diag[1] \@ line 
$diag[3] pos $diag[2]";
        }
@@ -359,23 +377,24 @@
 
     use DBI;
     # See "Creating database handle" below
-    $dbh = DBI->connect ("dbi:CSV:") or
-       die "Cannot connect: $DBI::errstr";
+    $dbh = DBI->connect ("dbi:CSV:", undef, undef, {
+        f_ext      => ".csv/r",
+        RaiseError => 1,
+        }) or die "Cannot connect: $DBI::errstr";
 
     # Simple statements
-    $dbh->do ("CREATE TABLE a (id INTEGER, name CHAR (10))") or
-       die "Cannot prepare: " . $dbh->errstr ();
+    $dbh->do ("CREATE TABLE foo (id INTEGER, name CHAR (10))");
 
     # Selecting
-    $dbh->{RaiseError} = 1;
     my $sth = $dbh->prepare ("select * from foo");
     $sth->execute;
-    while (my @row = $sth->fetchrow_array) {
-       print "id: $row[0], name: $row[1]\n";
+    $sth->bind_columns (\my ($id, $name));
+    while ($sth->fetch) {
+       print "id: $id, name: $name\n";
        }
 
     # Updates
-    my $sth = $dbh->prepare ("UPDATE a SET name = ? WHERE id = ?");
+    my $sth = $dbh->prepare ("UPDATE foo SET name = ? WHERE id = ?");
     $sth->execute ("DBI rocks!", 1);
     $sth->finish;
 
@@ -879,12 +898,12 @@
 I<csv_class> (usually Text::CSV_CS) object. You may want to set these
 attributes if you have unusual CSV files like F</etc/passwd> or MS Excel
 generated CSV files with a semicolon as separator. Defaults are
-"\015\012", ';', '"' and '"', respectively.
+C<\015\012>", C<,>, C<"> and C<">, respectively.
 
 The I<csv_eol> attribute defines the end-of-line pattern, which is better
 known as a record separator pattern since it separates records.  The default
-is windows-style end-of-lines "\015\012" for output (writing) and unset for
-input (reading), so if on unix you may want to set this to newline ("\n")
+is windows-style end-of-lines C<\015\012> for output (writing) and unset for
+input (reading), so if on unix you may want to set this to newline (C<\n>)
 like this:
 
   $dbh->{csv_eol} = "\n";
@@ -1177,7 +1196,7 @@
 
 =head1 COPYRIGHT AND LICENSE
 
-Copyright (C) 2009-2013 by H.Merijn Brand
+Copyright (C) 2009-2014 by H.Merijn Brand
 Copyright (C) 2004-2009 by Jeff Zucker
 Copyright (C) 1998-2004 by Jochen Wiedmann
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t10_base.pm 
new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t10_base.pm
--- old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t10_base.pm      1970-01-01 
01:00:00.000000000 +0100
+++ new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t10_base.pm      2013-08-14 
13:42:26.000000000 +0200
@@ -0,0 +1,56 @@
+package DBI::Test::Case::DBD::CSV::t10_base;
+
+use strict;
+use warnings;
+
+use parent qw( DBI::Test::DBD::CSV::Case);
+
+use Test::More;
+use DBI::Test;
+use DBI;
+
+sub supported_variant
+{
+    my ($self,    $test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred,  $options) = @_;
+
+    $self->is_test_for_mocked ($test_confs) and return;
+
+    return $self->SUPER::supported_variant ($test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred, $options);
+    } # supported_variant
+
+sub run_test
+{
+    my ($self, $dbc) = @_;
+    my @DB_CREDS = @$dbc;
+    $DB_CREDS[3]->{PrintError} = 0;
+    $DB_CREDS[3]->{RaiseError} = 0;
+    if ($ENV{DBI_PUREPERL}) {
+       eval "use Text::CSV;";
+       $@ or $DB_CREDS[3]->{csv_class}  = "Text::CSV"
+       }
+
+    defined $ENV{DBI_SQL_NANO} or
+       eval "use SQL::Statement;";
+
+    ok (my $switch = DBI->internal, "DBI->internal");
+    is (ref $switch, "DBI::dr", "Driver class");
+
+    # This is a special case. install_driver should not normally be used.
+    ok (my $drh = DBI->install_driver ("CSV"), "Install driver");
+
+    is (ref $drh, "DBI::dr", "Driver class installed");
+
+    ok ($drh->{Version}, "Driver version $drh->{Version}");
+
+    my $dbh = connect_ok (@DB_CREDS, "Connect with dbi:CSV:");
+
+    my $csv_version_info = $dbh->csv_versions ();
+    ok ($csv_version_info, "csv_versions");
+    diag ($csv_version_info);
+
+    done_testing ();
+    }
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t11_dsnlist.pm 
new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t11_dsnlist.pm
--- old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t11_dsnlist.pm   1970-01-01 
01:00:00.000000000 +0100
+++ new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t11_dsnlist.pm   2013-08-14 
14:24:37.000000000 +0200
@@ -0,0 +1,64 @@
+package DBI::Test::Case::DBD::CSV::t11_dsnlist;
+
+use strict;
+use warnings;
+
+use parent qw( DBI::Test::DBD::CSV::Case);
+
+use Test::More;
+use DBI::Test;
+use DBI;
+
+sub supported_variant
+{
+    my ($self,    $test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred,  $options) = @_;
+
+    $self->is_test_for_mocked ($test_confs) and return;
+
+    return $self->SUPER::supported_variant ($test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred, $options);
+    } # supported_variant
+
+use vars q{$AUTOLOAD};
+sub AUTOLOAD
+{
+    (my $sub = $AUTOLOAD) =~ s/.*:/DBI::Test::DBD::CSV::Case::/;
+    {  no strict "refs";
+       $sub->(@_);
+       }
+    } # AUTOLOAD
+
+sub run_test
+{
+    my ($self, $dbc) = @_;
+    my @DB_CREDS = @$dbc;
+    $DB_CREDS[3]->{PrintError} = 0;
+    $DB_CREDS[3]->{RaiseError} = 0;
+    if ($ENV{DBI_PUREPERL}) {
+       eval "use Text::CSV;";
+       $@ or $DB_CREDS[3]->{csv_class}  = "Text::CSV"
+       }
+
+    defined $ENV{DBI_SQL_NANO} or
+       eval "use SQL::Statement;";
+
+    my $dbh = connect_ok (@DB_CREDS,           "Connect with dbi:CSV:");
+
+    ok ($dbh->ping,                            "ping");
+
+    # This returns at least ".", "lib", and "t"
+    ok (my @dsn = DBI->data_sources ("CSV"),   "data_sources");
+    ok (@dsn >= 2,                             "more than one");
+    ok ($dbh->disconnect,                      "disconnect");
+
+    # Try different DSN's
+    foreach my $d (qw( . example lib t )) {
+       ok (my $dns = Connect ("dbi:CSV:f_dir=$d"), "use $d as f_dir");
+       ok ($dbh->disconnect,                   "disconnect");
+       }
+
+    done_testing ();
+    } # run_test
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t20_createdrop.pm 
new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t20_createdrop.pm
--- old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t20_createdrop.pm        
1970-01-01 01:00:00.000000000 +0100
+++ new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t20_createdrop.pm        
2013-09-17 20:38:13.000000000 +0200
@@ -0,0 +1,68 @@
+package DBI::Test::Case::DBD::CSV::t20_createdrop;
+
+use strict;
+use warnings;
+
+use parent qw( DBI::Test::DBD::CSV::Case );
+
+use Test::More;
+use DBI::Test;
+use DBI;
+
+sub supported_variant
+{
+    my ($self,    $test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred,  $options) = @_;
+
+    $self->is_test_for_mocked ($test_confs) and return;
+
+    return $self->SUPER::supported_variant ($test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred, $options);
+    } # supported_variant
+
+my @tbl_def = (
+    [ "id",   "INTEGER",  4, 0 ],
+    [ "name", "CHAR",    64, 0 ],
+    );
+
+use vars q{$AUTOLOAD};
+sub AUTOLOAD
+{
+    (my $sub = $AUTOLOAD) =~ s/.*:/DBI::Test::DBD::CSV::Case::/;
+    {  no strict "refs";
+       $sub->(@_);
+       }
+    } # AUTOLOAD
+
+sub run_test
+{
+    my ($self, $dbc) = @_;
+    my @DB_CREDS = @$dbc;
+    $DB_CREDS[3]->{PrintError} = 0;
+    $DB_CREDS[3]->{RaiseError} = 0;
+    $DB_CREDS[3]->{f_dir} = DbDir ();
+    if ($ENV{DBI_PUREPERL}) {
+       eval "use Text::CSV;";
+       $@ or $DB_CREDS[3]->{csv_class}  = "Text::CSV"
+       }
+
+    defined $ENV{DBI_SQL_NANO} or
+       eval "use SQL::Statement;";
+
+    my $dbh = connect_ok (@DB_CREDS,   "Connect with dbi:CSV:");
+
+    ok (my $tbl = FindNewTable ($dbh), "find new test table");
+
+    like (my $def = TableDefinition ($tbl, @tbl_def),
+           qr{^create table $tbl}i,    "table definition");
+    do_ok ($dbh, $def,                 "create table");
+    my $tbl_file = DbFile ($tbl);
+    ok (-s $tbl_file,                  "file exists");
+    do_ok ($dbh, "drop table $tbl",    "drop table");
+    ok ($dbh->disconnect,              "disconnect");
+    ok (!-f $tbl_file,                 "file removed");
+
+    done_testing ();
+    } # run_test
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t85_error.pm 
new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t85_error.pm
--- old/DBD-CSV-0.41/lib/DBI/Test/Case/DBD/CSV/t85_error.pm     1970-01-01 
01:00:00.000000000 +0100
+++ new/DBD-CSV-0.44/lib/DBI/Test/Case/DBD/CSV/t85_error.pm     2013-09-17 
21:12:17.000000000 +0200
@@ -0,0 +1,83 @@
+package DBI::Test::Case::DBD::CSV::t85_error;
+
+use strict;
+use warnings;
+
+use parent qw( DBI::Test::DBD::CSV::Case );
+
+use Test::More;
+use DBI::Test;
+use DBI;
+
+sub supported_variant
+{
+    my ($self,    $test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred,  $options) = @_;
+
+    $self->is_test_for_mocked ($test_confs) and return;
+
+    return $self->SUPER::supported_variant ($test_case, $cfg_pfx, $test_confs,
+       $dsn_pfx, $dsn_cred, $options);
+    } # supported_variant
+
+my @tbl_def = (
+    [ "id",   "INTEGER",  4, 0 ],
+    [ "name", "CHAR",    64, 0 ],
+    );
+
+use vars q{$AUTOLOAD};
+sub AUTOLOAD
+{
+    (my $sub = $AUTOLOAD) =~ s/.*:/DBI::Test::DBD::CSV::Case::/;
+    {  no strict "refs";
+       $sub->(@_);
+       }
+    } # AUTOLOAD
+
+sub run_test
+{
+    my ($self, $dbc) = @_;
+    my @DB_CREDS = @$dbc;
+    $DB_CREDS[3]->{PrintError} = 0;
+    $DB_CREDS[3]->{RaiseError} = 0;
+    $DB_CREDS[3]->{f_dir} = DbDir ();
+    if ($ENV{DBI_PUREPERL}) {
+       eval "use Text::CSV;";
+       $@ or $DB_CREDS[3]->{csv_class}  = "Text::CSV"
+       }
+
+    defined $ENV{DBI_SQL_NANO} or
+       eval "use SQL::Statement;";
+
+    my $dbh = connect_ok (@DB_CREDS,   "Connect with dbi:CSV:");
+
+    ok (my $tbl = FindNewTable ($dbh), "find new test table");
+
+    like (my $def = TableDefinition ($tbl, @tbl_def),
+           qr{^create table $tbl}i,    "table definition");
+    do_ok ($dbh, $def,                 "create table");
+    my $tbl_file = DbFile ($tbl);
+    ok (-s $tbl_file,                  "file exists");
+    ok ($dbh->disconnect,              "disconnect");
+    undef $dbh;
+
+    ok (-f $tbl_file,                  "file still there");
+    open my $fh, ">>", $tbl_file;
+    print $fh qq{1, "p0wnd",",""",0\n};        # Very bad content
+    close $fh;
+
+    ok ($dbh = connect_ok (@DB_CREDS,  "Connect with dbi:CSV:"));
+    {   local $dbh->{PrintError} = 0;
+       local $dbh->{RaiseError} = 0;
+       my $sth = prepare_ok ($dbh, "select * from $tbl", "prepare");
+       is ($sth->execute, undef,       "execute should fail");
+       # It is safe to regex on this text, as it is NOT local dependant
+       like ($dbh->errstr, qr{\w+ \@ line [0-9?]+ pos [0-9?]+}, "error 
message");
+       };
+    do_ok ($dbh, "drop table $tbl",    "drop");
+    ok ($dbh->disconnect,              "disconnect");
+
+    done_testing ();
+    } # run_test
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBI/Test/DBD/CSV/Conf.pm 
new/DBD-CSV-0.44/lib/DBI/Test/DBD/CSV/Conf.pm
--- old/DBD-CSV-0.41/lib/DBI/Test/DBD/CSV/Conf.pm       1970-01-01 
01:00:00.000000000 +0100
+++ new/DBD-CSV-0.44/lib/DBI/Test/DBD/CSV/Conf.pm       2013-08-13 
13:03:51.000000000 +0200
@@ -0,0 +1,9 @@
+#!/usr/bin/perl
+
+package DBI::Test::DBD::CSV::Conf;
+
+use strict;
+use warnings;
+use parent qw( DBI::Test::Conf );
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/lib/DBI/Test/DBD/CSV/List.pm 
new/DBD-CSV-0.44/lib/DBI/Test/DBD/CSV/List.pm
--- old/DBD-CSV-0.41/lib/DBI/Test/DBD/CSV/List.pm       1970-01-01 
01:00:00.000000000 +0100
+++ new/DBD-CSV-0.44/lib/DBI/Test/DBD/CSV/List.pm       2013-08-12 
14:13:07.000000000 +0200
@@ -0,0 +1,16 @@
+#!/usr/bin/perl
+
+package DBI::Test::DBD::CSV::List;
+
+use strict;
+use warnings;
+use parent "DBI::Test::List";
+
+sub test_cases
+{
+    my @pm = glob "lib/DBI/Test/Case/DBD/CSV/*.pm";
+    s{lib/DBI/Test/Case/DBD/CSV/(\S+)\.pm}{DBD::CSV::$1} for @pm;
+    return @pm;
+    } # test_cases
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/41_nulls.t 
new/DBD-CSV-0.44/t/41_nulls.t
--- old/DBD-CSV-0.41/t/41_nulls.t       2010-08-06 13:42:10.000000000 +0200
+++ new/DBD-CSV-0.44/t/41_nulls.t       2013-07-29 16:54:15.000000000 +0200
@@ -9,6 +9,7 @@
 BEGIN { use_ok ("DBI") }
 do "t/lib.pl";
 
+my $nano = $ENV{DBI_SQL_NANO};
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_NULLABLE     ],
     [ "name", "CHAR",    64, &COL_NULLABLE     ],
@@ -25,11 +26,15 @@
 
 ok ($dbh->do ("insert into $tbl values (NULL, 'NULL-id', ' ')"), "insert");
 
+my $row;
+
 ok (my $sth = $dbh->prepare ("select * from $tbl where id is NULL"), 
"prepare");
 ok ($sth->execute,                             "execute");
-ok (my $row = $sth->fetch,                     "fetch");
-
-is_deeply ($row, [ "", "NULL-id", " " ],       "default content");
+TODO: {
+    local $TODO = $nano ? "SQL::Nano does not yet support this syntax" : undef;
+    ok ($row = $sth->fetch,                    "fetch");
+    is_deeply ($row, [ "", "NULL-id", " " ],   "default content");
+    }
 ok ($sth->finish,                              "finish");
 undef $sth;
 
@@ -37,9 +42,11 @@
 
 ok ($sth = $dbh->prepare ("select * from $tbl where id is NULL"), "prepare");
 ok ($sth->execute,                             "execute");
-ok ($row = $sth->fetch,                                "fetch");
-is_deeply ($row, [ undef, "NULL-id", " " ],    "NULL content");
-
+TODO: {
+    local $TODO = $nano ? "SQL::Nano does not yet support this syntax" : undef;
+    ok ($row = $sth->fetch,                            "fetch");
+    is_deeply ($row, [ undef, "NULL-id", " " ],        "NULL content");
+    }
 ok ($sth->finish,                              "finish");
 undef $sth;
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/42_bindparam.t 
new/DBD-CSV-0.44/t/42_bindparam.t
--- old/DBD-CSV-0.41/t/42_bindparam.t   2010-08-06 13:44:09.000000000 +0200
+++ new/DBD-CSV-0.44/t/42_bindparam.t   2013-07-29 17:00:48.000000000 +0200
@@ -7,6 +7,13 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
+
+if ($ENV{DBI_SQL_NANO}) {
+    diag ("These tests are not yet supported for SQL::Nano");
+    done_testing (1);
+    exit 0;
+    }
+
 do "t/lib.pl";
 
 defined &SQL_VARCHAR or *SQL_VARCHAR = sub { 12 };
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/44_listfields.t 
new/DBD-CSV-0.44/t/44_listfields.t
--- old/DBD-CSV-0.41/t/44_listfields.t  2010-08-06 13:42:18.000000000 +0200
+++ new/DBD-CSV-0.44/t/44_listfields.t  2013-07-30 12:32:17.000000000 +0200
@@ -9,9 +9,11 @@
 BEGIN { use_ok ("DBI") }
 do "t/lib.pl";
 
+defined &SQL_CHAR    or *SQL_CHAR    = sub {  1 };
 defined &SQL_VARCHAR or *SQL_VARCHAR = sub { 12 };
 defined &SQL_INTEGER or *SQL_INTEGER = sub {  4 };
 
+my $nano = $ENV{DBI_SQL_NANO};
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_KEY          ],
     [ "name", "CHAR",    64, &COL_NULLABLE     ],
@@ -34,16 +36,17 @@
 is ($sth->{NAME_uc}[1], uc $tbl_def[1][0],     "NAME_uc");
 is_deeply ($sth->{NAME_lc_hash},
     { map { ( lc $tbl_def[$_][0] => $_ ) } 0 .. $#tbl_def }, "NAME_lc_hash");
-# TODO tests
-#s ($sth->{TYPE}[0], &SQL_INTEGER,             "TYPE 1");
-#s ($sth->{TYPE}[1], &SQL_VARCHAR,             "TYPE 2");
-is ($sth->{PRECISION}[0],      0,              "PRECISION 1");
-is ($sth->{PRECISION}[1],      64,             "PRECISION 2");
-is ($sth->{NULLABLE}[0],       0,              "NULLABLE 1");
-is ($sth->{NULLABLE}[1],       1,              "NULLABLE 2");
+if ($DBD::File::VERSION gt "0.42") {
+    is ($sth->{TYPE}[0], $nano ? &SQL_VARCHAR : &SQL_INTEGER,  "TYPE 1");
+    is ($sth->{TYPE}[1], $nano ? &SQL_VARCHAR : &SQL_CHAR,     "TYPE 2");
+    is ($sth->{PRECISION}[0],  0,              "PRECISION 1");
+    is ($sth->{PRECISION}[1],  $nano ? 0 : 64, "PRECISION 2");
+    is ($sth->{NULLABLE}[0],   $nano ? 1 : 0,  "NULLABLE 1");
+    is ($sth->{NULLABLE}[1],   1,              "NULLABLE 2");
+    }
 
 ok ($sth->finish,                              "finish");
-#s ($sth->{NUM_OF_FIELDS}, 0,                  "NUM_OF_FIELDS");
+#s ($sth->{NUM_OF_FIELDS},     0,              "NUM_OF_FIELDS");
 undef $sth;
 
 ok ($dbh->do ("drop table $tbl"),              "drop table");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/51_commit.t 
new/DBD-CSV-0.44/t/51_commit.t
--- old/DBD-CSV-0.41/t/51_commit.t      2010-08-06 13:42:26.000000000 +0200
+++ new/DBD-CSV-0.44/t/51_commit.t      2013-07-29 16:58:27.000000000 +0200
@@ -9,6 +9,7 @@
 BEGIN { use_ok ("DBI") }
 do "t/lib.pl";
 
+my $nano = $ENV{DBI_SQL_NANO};
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0 ],
     [ "name", "CHAR",    64, 0 ],
@@ -18,6 +19,11 @@
 {
     my ($dbh, $tbl) = @_;
 
+    if ($nano) {
+       diag ("SQL::Nano does not support count (*)");
+       return 0;
+       }
+
     local $dbh->{PrintError} = 1;
     my $sth = $dbh->prepare ("SELECT count (*) FROM $tbl") or return;
     $sth->execute or return;
@@ -41,12 +47,12 @@
 
 # Check whether AutoCommit mode works.
 ok ($dbh->do ("insert into $tbl values (1, 'Jochen')"), "insert 1");
-is (RowCount ($dbh, $tbl), 1,                  "1 row");
+is (RowCount ($dbh, $tbl), $nano ? 0 : 1,      "1 row");
 
 ok ($dbh->disconnect,                          "disconnect");
 
 ok ($dbh = Connect (),                         "connect");
-is (RowCount ($dbh, $tbl), 1,                  "still 1 row");
+is (RowCount ($dbh, $tbl), $nano ? 0 : 1,      "still 1 row");
 
 # Check whether commit issues a warning in AutoCommit mode
 ok ($dbh->do ("insert into $tbl values (2, 'Tim')"), "insert 2");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/55_dir_search.t 
new/DBD-CSV-0.44/t/55_dir_search.t
--- old/DBD-CSV-0.41/t/55_dir_search.t  2013-07-26 13:46:15.000000000 +0200
+++ new/DBD-CSV-0.44/t/55_dir_search.t  2013-08-11 10:59:42.000000000 +0200
@@ -8,12 +8,15 @@
 BEGIN { use_ok ("DBI") }
 require "t/lib.pl";
 
-my $tmpdir = File::Spec->tmpdir ();
 my $tstdir = DbDir ();
+my @extdir = ("t", File::Spec->tmpdir ());
+if (open my $fh, "<", "tests.skip") {
+    grep m/\b tmpdir \b/x => <$fh> and pop @extdir;
+    }
 my $dbh = DBI->connect ("dbi:CSV:", undef, undef, {
     f_schema         => undef,
     f_dir            => DbDir (),
-    f_dir_search     => [ "t", $tmpdir ],
+    f_dir_search     => \@extdir,
     f_ext            => ".csv/r",
     f_lock           => 2,
     f_encoding       => "utf8",
@@ -36,7 +39,7 @@
 $dbh->do ("create table foo (c_foo integer, foo char (1))");
 $dbh->do ("insert into foo values ($_, $_)") for 1, 2, 3;
 
-my @test_dirs = ($tstdir, "t", $tmpdir);
+my @test_dirs = ($tstdir, @extdir);
 is ($dir{$_}, 1, "DSN for $_") for @test_dirs;
 
 my %tbl = map { $_ => 1 } $dbh->tables (undef, undef, undef, undef);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/61_meta.t new/DBD-CSV-0.44/t/61_meta.t
--- old/DBD-CSV-0.41/t/61_meta.t        2013-07-26 17:11:29.000000000 +0200
+++ new/DBD-CSV-0.44/t/61_meta.t        2014-05-12 08:05:36.000000000 +0200
@@ -2,7 +2,6 @@
 
 use strict;
 use warnings;
-use version;
 
 use Test::More;
 use DBI qw(:sql_types);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/72_csv-schema.t 
new/DBD-CSV-0.44/t/72_csv-schema.t
--- old/DBD-CSV-0.41/t/72_csv-schema.t  2013-07-25 18:21:55.000000000 +0200
+++ new/DBD-CSV-0.44/t/72_csv-schema.t  2014-03-31 08:09:42.000000000 +0200
@@ -23,14 +23,13 @@
     qr{^create table $tbl}i,                   "table definition");
 ok ($dbh->do ($def),                           "create table");
 
+my @tbl = $dbh->tables ();
 if (my $usr = eval { getpwuid $< }) {
-    $usr = qq{"$usr"};
-    is_deeply ([ $dbh->tables () ],
-              [ qq{$usr.$tbl}   ],             "tables");
+    s/^(['"`])(.+)\1\./$2./ for @tbl;
+    is_deeply (\@tbl, [ qq{$usr.$tbl} ],       "tables");
     }
 else {
-    is_deeply ([ $dbh->tables () ],
-              [ qq{$tbl}        ],             "tables");
+    is_deeply (\@tbl, [ qq{$tbl}      ],       "tables");
     }
 
 ok ($dbh->disconnect,                          "disconnect");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/73_csv-case.t 
new/DBD-CSV-0.44/t/73_csv-case.t
--- old/DBD-CSV-0.41/t/73_csv-case.t    2013-07-25 18:21:42.000000000 +0200
+++ new/DBD-CSV-0.44/t/73_csv-case.t    2014-08-04 21:42:29.000000000 +0200
@@ -36,10 +36,7 @@
 ok ($dbh = Connect (),                         "connect");
 ok ($dbh->{ignore_missing_table} = 1,          "ignore missing tables");
 
-# I have not yet found an *easy* way to test the case sensitivity of
-# the target FS, which might not prove at all that things will work
-# on all folders, as case in-sensitive and case-sensitive FS's might
-# co-exist.
+my $case_ok = isSaneCase ();
 for (qw( foo foO fOo fOO Foo FoO FOo FOO )) {
     ok (my $sth = $dbh->prepare (qq{select * from "$_"}), "prepare \"$_\"");
 
@@ -48,7 +45,7 @@
        }
     else {
        TODO: {
-           local $TODO = "Filesystem has to be case-aware";
+           local $TODO = "Filesystem has to be case-aware" unless $case_ok;
            local $sth->{PrintError} = 0;
            ok (!$sth->execute,                 "table name '$_' should not 
match 'foo'");
            }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/80_rt.t new/DBD-CSV-0.44/t/80_rt.t
--- old/DBD-CSV-0.41/t/80_rt.t  2013-07-25 18:25:21.000000000 +0200
+++ new/DBD-CSV-0.44/t/80_rt.t  2013-08-13 13:20:36.000000000 +0200
@@ -5,6 +5,13 @@
 
 use Test::More;
 use DBI qw(:sql_types);
+
+if ($ENV{DBI_SQL_NANO}) {
+    ok ($ENV{DBI_SQL_NANO}, "These tests are not suit for SQL::Nano");
+    done_testing ();
+    exit 0;
+    }
+
 do "t/lib.pl";
 
 my ($rt, %input, %desc);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.41/t/lib.pl new/DBD-CSV-0.44/t/lib.pl
--- old/DBD-CSV-0.41/t/lib.pl   2013-07-25 18:57:00.000000000 +0200
+++ new/DBD-CSV-0.44/t/lib.pl   2014-08-04 21:41:59.000000000 +0200
@@ -154,6 +154,22 @@
        } # FindNewTable
     }
 
+sub isSaneCase
+{
+    my @f = glob "??????.???";
+    foreach my $try (qw( FrUbLl BlURgH wOngOs )) {
+       my $fn = "$try.csv";
+       grep m{^$fn$}i => @f and next;
+       open my $fh, ">", $fn or return 1;
+       close $fh;
+       my $sane = (-f $fn && ! -f lc $fn && ! -f uc $fn);
+       unlink $fn;
+       return $sane;
+       }
+    # Assume insane
+    return 0;
+    } # isSaneCase
+
 sub ServerError
 {
     die "# Cannot connect: $DBI::errstr\n";

-- 
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to