Hello community,

here is the log from the commit of package perl-DBD-CSV for openSUSE:Factory 
checked in at 2018-07-28 12:43:08
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/perl-DBD-CSV (Old)
 and      /work/SRC/openSUSE:Factory/.perl-DBD-CSV.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "perl-DBD-CSV"

Sat Jul 28 12:43:08 2018 rev:33 rq:625592 version:0.53

Changes:
--------
--- /work/SRC/openSUSE:Factory/perl-DBD-CSV/perl-DBD-CSV.changes        
2017-10-09 19:48:13.011595231 +0200
+++ /work/SRC/openSUSE:Factory/.perl-DBD-CSV.new/perl-DBD-CSV.changes   
2018-07-28 12:43:16.972758089 +0200
@@ -1,0 +2,17 @@
+Wed Jul 25 09:08:20 UTC 2018 - co...@suse.com
+
+- updated to 0.53
+0.53   - 2018-05-20, H.Merijn Brand
+    * No folder scanning during automated tests
+    * Fix col_names set to empty [] incorrectly skipping first row
+      (issue#6) (Ronald Schmidt)
+    * Small doc fix (Ronald Schmidt)
+    * Tested on FreeBSD
+
+0.52   - 2018-04-05, H.Merijn Brand
+    * Dot-in-inc (cpantesters) - I missed two
+
+0.51   - 2018-03-24, H.Merijn Brand
+    * Dot-in-inc (cpantesters)
+
+-------------------------------------------------------------------

Old:
----
  DBD-CSV-0.49.tgz

New:
----
  DBD-CSV-0.53.tgz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ perl-DBD-CSV.spec ++++++
--- /var/tmp/diff_new_pack.nZVKF4/_old  2018-07-28 12:43:17.568759236 +0200
+++ /var/tmp/diff_new_pack.nZVKF4/_new  2018-07-28 12:43:17.572759243 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package perl-DBD-CSV
 #
-# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,11 +17,11 @@
 
 
 Name:           perl-DBD-CSV
-Version:        0.49
+Version:        0.53
 Release:        0
 %define cpan_name DBD-CSV
 Summary:        DBI driver for CSV files
-License:        Artistic-1.0 or GPL-1.0+
+License:        Artistic-1.0 OR GPL-1.0-or-later
 Group:          Development/Libraries/Perl
 Url:            http://search.cpan.org/dist/DBD-CSV/
 Source0:        
https://cpan.metacpan.org/authors/id/H/HM/HMBRAND/%{cpan_name}-%{version}.tgz
@@ -41,10 +41,10 @@
 Requires:       perl(Test::More) >= 0.9
 Requires:       perl(Text::CSV_XS) >= 1.01
 Recommends:     perl(DBD::File) >= 0.44
-Recommends:     perl(DBI) >= 1.636
-Recommends:     perl(SQL::Statement) >= 1.410
-Recommends:     perl(Test::More) >= 1.302015
-Recommends:     perl(Text::CSV_XS) >= 1.23
+Recommends:     perl(DBI) >= 1.641
+Recommends:     perl(SQL::Statement) >= 1.412
+Recommends:     perl(Test::More) >= 1.302136
+Recommends:     perl(Text::CSV_XS) >= 1.35
 %{perl_requires}
 
 %description

++++++ DBD-CSV-0.49.tgz -> DBD-CSV-0.53.tgz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/ChangeLog new/DBD-CSV-0.53/ChangeLog
--- old/DBD-CSV-0.49/ChangeLog  2016-05-12 11:27:47.000000000 +0200
+++ new/DBD-CSV-0.53/ChangeLog  2018-05-20 17:45:35.000000000 +0200
@@ -1,3 +1,26 @@
+0.53   - 2018-05-20, H.Merijn Brand
+    * No folder scanning during automated tests
+    * Fix col_names set to empty [] incorrectly skipping first row
+      (issue#6) (Ronald Schmidt)
+    * Small doc fix (Ronald Schmidt)
+    * Tested on FreeBSD
+
+0.52   - 2018-04-05, H.Merijn Brand
+    * Dot-in-inc (cpantesters) - I missed two
+
+0.51   - 2018-03-24, H.Merijn Brand
+    * Dot-in-inc (cpantesters)
+
+0.50   - 2018-03-21, H.Merijn Brand
+    * Explain more about header folding
+    * It's 2017
+    * BOM handling
+    * Some documentation enhancements
+    * Ignore DBI_DSN if it is not CSV
+    * It's 2018
+    * Test with perl-5.26, DBI-1.641, SQL::Statement-1.412, and
+      Text::CSV_XS-1.35
+
 0.49   - 2016-05-12, H.Merijn Brand
     * Simplified test-table-name generation
     * Prefer quote_empty over quote_always for size (Text::CSV_XS => 1.18)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/MANIFEST new/DBD-CSV-0.53/MANIFEST
--- old/DBD-CSV-0.49/MANIFEST   2016-05-12 11:58:56.000000000 +0200
+++ new/DBD-CSV-0.53/MANIFEST   2018-05-20 17:50:37.000000000 +0200
@@ -1,6 +1,5 @@
 ChangeLog
 MANIFEST
-MANIFEST.SKIP
 Makefile.PL
 CONTRIBUTING.md
 README
@@ -8,9 +7,6 @@
 lib/DBD/CSV.pm
 lib/DBD/CSV/TypeInfo.pm
 lib/DBD/CSV/GetInfo.pm
-t/00_meta.t
-t/00_pod_cov.t
-t/00_pod.t
 t/10_base.t
 t/11_dsnlist.t
 t/20_createdrop.t
@@ -33,6 +29,7 @@
 t/72_csv-schema.t
 t/73_csv-case.t
 t/80_rt.t
+t/82_free_unref_scalar.t
 t/85_error.t
 t/lib.pl
 t/tmp.csv
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/MANIFEST.SKIP 
new/DBD-CSV-0.53/MANIFEST.SKIP
--- old/DBD-CSV-0.49/MANIFEST.SKIP      2015-05-15 15:49:33.000000000 +0200
+++ new/DBD-CSV-0.53/MANIFEST.SKIP      1970-01-01 01:00:00.000000000 +0100
@@ -1,27 +0,0 @@
-\.aspell\.local.pws
-\.dbi-git
-\bCVS\b
-~$
-\.tgz$
-\.tar\.gz$
-\.git
-\.travis.yml
-blib/
-cover_db/
-lib/DBI
-genMETA.pl
-Makefile
-MANIFEST.SKIP
-pm_to_blib
-sandbox/
-tmp/
-.releaserc
-^xx
-META.yml
-valgrind.log
-tests.skip
-xt/
-t/basic/
-t/DBI/
-t/DBD/
-t/SQL/
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/META.json new/DBD-CSV-0.53/META.json
--- old/DBD-CSV-0.49/META.json  2016-05-12 11:58:56.000000000 +0200
+++ new/DBD-CSV-0.53/META.json  2018-05-20 17:50:37.000000000 +0200
@@ -1,78 +1,78 @@
 {
-   "meta-spec" : {
-      "version" : "2",
-      "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec";
-   },
-   "generated_by" : "Author",
-   "license" : [
-      "perl_5"
-   ],
-   "author" : [
-      "Jochen Wiedmann",
-      "Jeff Zucker",
-      "H.Merijn Brand <h.m.br...@xs4all.nl>",
-      "Jens Rehsack <rehs...@cpan.org>"
-   ],
-   "dynamic_config" : 1,
    "release_status" : "stable",
-   "resources" : {
-      "repository" : {
-         "web" : "https://github.com/perl5-dbi/DBD-CSV.git";,
-         "url" : "https://github.com/perl5-dbi/DBD-CSV.git";,
-         "type" : "git"
+   "provides" : {
+      "DBD::CSV" : {
+         "version" : "0.53",
+         "file" : "lib/DBD/CSV.pm"
+         }
       },
-      "license" : [
-         "http://dev.perl.org/licenses/";
-      ]
-   },
-   "x_installdirs" : "site",
-   "version" : "0.49",
    "name" : "DBD-CSV",
    "prereqs" : {
+      "test" : {
+         "requires" : {
+            "Encode" : "0",
+            "Cwd" : "0",
+            "Test::Harness" : "0",
+            "charnames" : "0",
+            "Test::More" : "0.90"
+            },
+         "recommends" : {
+            "Test::More" : "1.302136"
+            }
+         },
       "configure" : {
          "requires" : {
             "DBI" : "1.628",
             "ExtUtils::MakeMaker" : "0"
-         }
-      },
-      "test" : {
-         "recommends" : {
-            "Test::More" : "1.302015"
+            }
          },
-         "requires" : {
-            "Test::Harness" : "0",
-            "Cwd" : "0",
-            "Test::More" : "0.90",
-            "Encode" : "0",
-            "charnames" : "0"
-         }
-      },
-      "build" : {
-         "requires" : {
-            "Config" : "0"
-         }
-      },
       "runtime" : {
+         "recommends" : {
+            "DBI" : "1.641",
+            "DBD::File" : "0.44",
+            "Text::CSV_XS" : "1.35",
+            "SQL::Statement" : "1.412"
+            },
          "requires" : {
-            "SQL::Statement" : "1.405",
             "perl" : "5.008001",
-            "DBD::File" : "0.42",
             "DBI" : "1.628",
+            "SQL::Statement" : "1.405",
+            "DBD::File" : "0.42",
             "Text::CSV_XS" : "1.01"
+            }
          },
-         "recommends" : {
-            "SQL::Statement" : "1.410",
-            "Text::CSV_XS" : "1.23",
-            "DBI" : "1.636",
-            "DBD::File" : "0.44"
+      "build" : {
+         "requires" : {
+            "Config" : "0"
+            }
          }
-      }
-   },
-   "provides" : {
-      "DBD::CSV" : {
-         "version" : "0.49",
-         "file" : "lib/DBD/CSV.pm"
-      }
-   },
-   "abstract" : "DBI driver for CSV files"
-}
+      },
+   "version" : "0.53",
+   "meta-spec" : {
+      "version" : 2,
+      "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec";
+      },
+   "generated_by" : "Author",
+   "resources" : {
+      "repository" : {
+         "web" : "https://github.com/perl5-dbi/DBD-CSV.git";,
+         "url" : "https://github.com/perl5-dbi/DBD-CSV.git";,
+         "type" : "git"
+         },
+      "license" : [
+         "http://dev.perl.org/licenses/";
+         ]
+      },
+   "author" : [
+      "Jochen Wiedmann",
+      "Jeff Zucker",
+      "H.Merijn Brand <h.m.br...@xs4all.nl>",
+      "Jens Rehsack <rehs...@cpan.org>"
+      ],
+   "dynamic_config" : 1,
+   "x_installdirs" : "site",
+   "abstract" : "DBI driver for CSV files",
+   "license" : [
+      "perl_5"
+      ]
+   }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/META.yml new/DBD-CSV-0.53/META.yml
--- old/DBD-CSV-0.49/META.yml   2016-05-12 11:58:56.000000000 +0200
+++ new/DBD-CSV-0.53/META.yml   2018-05-20 17:50:37.000000000 +0200
@@ -11,7 +11,7 @@
   DBI: '1.628'
   ExtUtils::MakeMaker: 0
 dynamic_config: 1
-generated_by: Author, CPAN::Meta::Converter version 2.150005
+generated_by: Author, CPAN::Meta::Converter version 2.150010
 license: perl
 meta-spec: 
   url: http://module-build.sourceforge.net/META-spec-v1.4.html
@@ -20,13 +20,13 @@
 provides: 
   DBD::CSV: 
     file: lib/DBD/CSV.pm
-    version: '0.49'
+    version: '0.53'
 recommends: 
   DBD::File: '0.44'
-  DBI: '1.636'
-  SQL::Statement: '1.410'
-  Test::More: '1.302015'
-  Text::CSV_XS: '1.23'
+  DBI: '1.641'
+  SQL::Statement: '1.412'
+  Test::More: '1.302136'
+  Text::CSV_XS: '1.35'
 requires: 
   Cwd: 0
   DBD::File: '0.42'
@@ -41,5 +41,5 @@
 resources: 
   license: http://dev.perl.org/licenses/
   repository: https://github.com/perl5-dbi/DBD-CSV.git
-version: '0.49'
+version: '0.53'
 x_installdirs: site
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/Makefile.PL new/DBD-CSV-0.53/Makefile.PL
--- old/DBD-CSV-0.49/Makefile.PL        2016-01-02 15:43:00.000000000 +0100
+++ new/DBD-CSV-0.53/Makefile.PL        2018-04-05 08:37:58.000000000 +0200
@@ -1,6 +1,6 @@
 # -*- perl -*-
 
-# Copyright (c) 2009-2016 H.Merijn Brand
+# Copyright (c) 2009-2018 H.Merijn Brand
 
 require 5.008001;
 
@@ -114,12 +114,26 @@
 
 my $rv = WriteMakefile (%wm);
 
+# perlcriticrc uses Config::Tiny, which does not support nesting
+if (-f ".perlcriticrc" && -s "$ENV{HOME}/.perlcriticrc") {
+    open my $fh, ">", ".perlcriticrc";
+    print $fh do {
+       local (@ARGV, $/) = ("$ENV{HOME}/.perlcriticrc"); <> };
+    print $fh join "\n" => "",
+       "[-Modules::ProhibitMultiplePackages]",
+       "[-Subroutines::ProhibitBuiltinHomonyms]", # connect
+       "[-ValuesAndExpressions::RestrictLongStrings]", # error message
+       "[-Variables::ProhibitPackageVars]", # very deliberate for compatability
+       "[-Variables::ProhibitReusedNames]", # @ISA
+       "";
+    close $fh;
+    }
+
 1;
 
 package MY;
 
-sub postamble
-{
+sub postamble {
     my $min_vsn = ($] >= 5.010 && -d "xt" && ($ENV{AUTOMATED_TESTING} || 0) != 
1)
        ? join "\n" =>
            'test ::',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/README new/DBD-CSV-0.53/README
--- old/DBD-CSV-0.49/README     2016-01-02 15:43:04.000000000 +0100
+++ new/DBD-CSV-0.53/README     2018-01-01 14:15:34.000000000 +0100
@@ -9,7 +9,7 @@
     values).
 
 Copying
-    Copyright (C) 2009-2016 by H.Merijn Brand
+    Copyright (C) 2009-2018 by H.Merijn Brand
     Copyright (C) 2004-2009 by Jeff Zucker
     Copyright (C) 1998-2004 by Jochen Wiedmann
  
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/lib/Bundle/DBD/CSV.pm 
new/DBD-CSV-0.53/lib/Bundle/DBD/CSV.pm
--- old/DBD-CSV-0.49/lib/Bundle/DBD/CSV.pm      2016-05-10 16:32:42.000000000 
+0200
+++ new/DBD-CSV-0.53/lib/Bundle/DBD/CSV.pm      2018-04-05 08:45:33.000000000 
+0200
@@ -5,7 +5,7 @@
 use strict;
 use warnings;
 
-our $VERSION = "1.13";
+our $VERSION = "1.14";
 
 1;
 
@@ -21,15 +21,15 @@
 
 =head1 CONTENTS
 
-DBI 1.636
+DBI 1.641
 
-Text::CSV_XS 1.23
+Text::CSV_XS 1.35
 
-SQL::Statement 1.410
+SQL::Statement 1.412
 
 DBD::File 0.44
 
-DBD::CSV 0.49
+DBD::CSV 0.53
 
 =head1 DESCRIPTION
 
@@ -44,7 +44,7 @@
 
 =head1 COPYRIGHT AND LICENSE
 
-Copyright (C) 2009-2016 by H.Merijn Brand
+Copyright (C) 2009-2018 by H.Merijn Brand
 Copyright (C) 2004-2009 by Jeff Zucker
 Copyright (C) 1998-2004 by Jochen Wiedmann
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/lib/DBD/CSV/GetInfo.pm 
new/DBD-CSV-0.53/lib/DBD/CSV/GetInfo.pm
--- old/DBD-CSV-0.49/lib/DBD/CSV/GetInfo.pm     2013-07-29 17:41:21.000000000 
+0200
+++ new/DBD-CSV-0.53/lib/DBD/CSV/GetInfo.pm     2016-08-26 12:55:41.000000000 
+0200
@@ -16,14 +16,12 @@
 my $sql_ver_fmt = "%02d.%02d.0000";    # ODBC version string: ##.##.#####
 my $sql_driver_ver = sprintf $sql_ver_fmt, split /\./ => $DBD::CSV::VERSION;
 
-sub sql_data_source_name
-{
+sub sql_data_source_name {
     my $dbh = shift;
     return "dbi:$sql_driver:" . $dbh->{Name};
     } # sql_data_source_name
 
-sub sql_user_name
-{
+sub sql_user_name {
     my $dbh = shift;
     # CURRENT_USER is a non-standard attribute, probably undef
     # Username is a standard DBI attribute
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/lib/DBD/CSV.pm 
new/DBD-CSV-0.53/lib/DBD/CSV.pm
--- old/DBD-CSV-0.49/lib/DBD/CSV.pm     2016-03-18 14:55:24.000000000 +0100
+++ new/DBD-CSV-0.53/lib/DBD/CSV.pm     2018-04-30 12:13:21.000000000 +0200
@@ -23,7 +23,7 @@
 
 @ISA =   qw( DBD::File );
 
-$VERSION     = "0.49";
+$VERSION     = "0.53";
 $ATTRIBUTION = "DBD::CSV $DBD::CSV::VERSION by H.Merijn Brand";
 
 $err      = 0;         # holds error code   for DBI::err
@@ -31,8 +31,7 @@
 $sqlstate = "";         # holds error state  for DBI::state
 $drh      = undef;     # holds driver handle once initialized
 
-sub CLONE              # empty method: prevent warnings when threads are cloned
-{
+sub CLONE {            # empty method: prevent warnings when threads are cloned
     } # CLONE
 
 # --- DRIVER 
-------------------------------------------------------------------
@@ -67,8 +66,7 @@
 our $imp_data_size     = 0;
 our $data_sources_attr = undef;
 
-sub connect
-{
+sub connect {
     my ($drh, $dbname, $user, $auth, $attr) = @_;
     my $dbh = $drh->DBD::File::dr::connect ($dbname, $user, $auth, $attr);
     $dbh and $dbh->{Active} = 1;
@@ -84,8 +82,7 @@
 our $imp_data_size = 0;
 our @ISA = qw( DBD::File::db );
 
-sub set_versions
-{
+sub set_versions {
     my $this = shift;
     $this->{csv_version} = $DBD::CSV::VERSION;
     return $this->SUPER::set_versions ();
@@ -93,8 +90,7 @@
 
 my %csv_xs_attr;
 
-sub init_valid_attributes
-{
+sub init_valid_attributes {
     my $dbh = shift;
 
     # Straight from Text::CSV_XS.pm
@@ -131,7 +127,7 @@
 
        class tables in csv_in out csv_out skip_first_row
 
-       null sep quote escape
+       null sep quote escape bom
        )};
 
     $dbh->{csv_readonly_attrs} = { };
@@ -141,8 +137,7 @@
     return $dbh->SUPER::init_valid_attributes ();
     } # init_valid_attributes
 
-sub get_csv_versions
-{
+sub get_csv_versions {
     my ($dbh, $table) = @_;
     $table ||= "";
     my $class = $dbh->{ImplementorClass};
@@ -159,8 +154,7 @@
     return sprintf "%s using %s", $dbh->{csv_version}, $dtype;
     } # get_csv_versions
 
-sub get_info
-{
+sub get_info {
     my ($dbh, $info_type) = @_;
     require  DBD::CSV::GetInfo;
     my $v = $DBD::CSV::GetInfo::info{int ($info_type)};
@@ -168,9 +162,8 @@
     return $v;
     } # get_info
 
-sub type_info_all
-{
-    my $dbh = shift;
+sub type_info_all {
+#   my $dbh = shift;
     require   DBD::CSV::TypeInfo;
     return [@$DBD::CSV::TypeInfo::type_info_all];
     } # type_info_all
@@ -182,34 +175,40 @@
 use strict;
 
 our $imp_data_size = 0;
-our @ISA = qw(DBD::File::st);
+our @ISA = qw( DBD::File::st );
 
 package DBD::CSV::Statement;
 
 use strict;
 use Carp;
 
-our @ISA = qw(DBD::File::Statement);
+our @ISA = qw( DBD::File::Statement );
 
 package DBD::CSV::Table;
 
 use strict;
 use Carp;
 
-our @ISA = qw(DBD::File::Table);
+our @ISA = qw( DBD::File::Table );
+
+#sub DESTROY {
+#    my $self = shift or return;
+#
+#    $self->{meta} and delete $self->{meta}{csv_in};
+#    } # DESTROY
 
-sub bootstrap_table_meta
-{
+sub bootstrap_table_meta {
     my ($self, $dbh, $meta, $table) = @_;
     $meta->{csv_class} ||= $dbh->{csv_class} || "Text::CSV_XS";
     $meta->{csv_eol}   ||= $dbh->{csv_eol}   || "\r\n";
     exists $meta->{csv_skip_first_row} or
        $meta->{csv_skip_first_row} = $dbh->{csv_skip_first_row};
+    exists $meta->{csv_bom} or
+       $meta->{csv_bom} = exists $dbh->{bom} ? $dbh->{bom} : $dbh->{csv_bom};
     $self->SUPER::bootstrap_table_meta ($dbh, $meta, $table);
     } # bootstrap_table_meta
 
-sub init_table_meta
-{
+sub init_table_meta {
     my ($self, $dbh, $meta, $table) = @_;
 
     $self->SUPER::init_table_meta ($dbh, $table, $meta);
@@ -252,8 +251,7 @@
 
 __PACKAGE__->register_compat_map (\%compat_map);
 
-sub table_meta_attr_changed
-{
+sub table_meta_attr_changed {
     my ($class, $meta, $attr, $value) = @_;
 
     (my $csv_attr = $attr) =~ s/^csv_//;
@@ -296,6 +294,12 @@
                    : exists $meta->{col_names} ? 0 : 1;
            defined $meta->{skip_rows} or
                $meta->{skip_rows} = $skipRows;
+           if ($meta->{csv_bom}) {
+               my @hdr = $attrs->{csv_csv_in}->header ($meta->{fh}) or
+                   croak "Failed using the header row: 
".$attrs->{csv_csv_in}->error_diag;
+               $meta->{col_names} ||= \@hdr;
+               $skipRows and $skipRows = 0;
+               }
            if ($skipRows--) {
                $array = $attrs->{csv_csv_in}->getline ($meta->{fh}) or
                    croak "Missing first row due to 
".$attrs->{csv_csv_in}->error_diag;
@@ -331,8 +335,7 @@
     *open_file = \&open_data;
 use warnings;
 
-sub _csv_diag
-{
+sub _csv_diag {
     my @diag = $_[0]->error_diag;
     for (2, 3) {
        defined $diag[$_] or $diag[$_] = "?";
@@ -340,20 +343,18 @@
     return @diag;
     } # _csv_diag
 
-sub fetch_row
-{
+sub fetch_row {
     my ($self, $data) = @_;
 
-    exists $self->{cached_row} and
-       return $self->{row} = delete $self->{cached_row};
-
     my $tbl = $self->{meta};
 
+    exists $tbl->{cached_row} and
+       return $self->{row} = delete $tbl->{cached_row};
+
     my $csv = $self->{csv_csv_in} or
        return do { $data->set_err ($DBI::stderr, "Fetch from undefined 
handle"); undef };
 
-    my $fields;
-    eval { $fields = $csv->getline ($tbl->{fh}) };
+    my $fields = eval { $csv->getline ($tbl->{fh}) };
     unless ($fields) {
        $csv->eof and return;
 
@@ -368,8 +369,7 @@
     $self->{row} = (@$fields ? $fields : undef);
     } # fetch_row
 
-sub push_row
-{
+sub push_row {
     my ($self, $data, $fields) = @_;
     my $tbl = $self->{meta};
     my $csv = $self->{csv_csv_out};
@@ -378,7 +378,8 @@
     unless ($csv->print ($fh, $fields)) {
        my @diag = _csv_diag ($csv);
        my $file = $tbl->{f_fqfn};
-       return do { $data->set_err ($DBI::stderr, "Error $diag[0] while writing 
file $file: $diag[1] \@ line $diag[3] pos $diag[2]"); undef };
+       return do { $data->set_err ($DBI::stderr,
+           "Error $diag[0] while writing file $file: $diag[1] \@ line $diag[3] 
pos $diag[2]"); undef };
        }
     1;
     } # push_row
@@ -488,7 +489,7 @@
 Installing this module (and the prerequisites from above) is quite simple.
 The simplest way is to install the bundle:
 
-    $ cpan Bundle::CSV
+    $ cpan Bundle::DBD::CSV
 
 Alternatively, you can name them all
 
@@ -559,27 +560,36 @@
 
     # specify most possible flags via driver flags
     $dbh = DBI->connect ("dbi:CSV:", undef, undef, {
-       f_schema         => undef,
-       f_dir            => "data",
-       f_dir_search     => [],
-       f_ext            => ".csv/r",
-       f_lock           => 2,
-       f_encoding       => "utf8",
-
-       csv_eol          => "\r\n",
-       csv_sep_char     => ",",
-       csv_quote_char   => '"',
-       csv_escape_char  => '"',
-       csv_class        => "Text::CSV_XS",
-       csv_null         => 1,
-       csv_tables       => {
-           info => { f_file => "info.csv" }
-           },
-
-       RaiseError       => 1,
-       PrintError       => 1,
-       FetchHashKeyName => "NAME_lc",
-       }) or die $DBI::errstr;
+        f_schema         => undef,
+        f_dir            => "data",
+        f_dir_search     => [],
+        f_ext            => ".csv/r",
+        f_lock           => 2,
+        f_encoding       => "utf8",
+
+        csv_eol          => "\r\n",
+        csv_sep_char     => ",",
+        csv_quote_char   => '"',
+        csv_escape_char  => '"',
+        csv_class        => "Text::CSV_XS",
+        csv_null         => 1,
+        csv_bom          => 0,
+        csv_tables       => {
+            syspwd => {
+                sep_char    => ":",
+                quote_char  => undef,
+                escape_char => undef,
+                file        => "/etc/passwd",
+                col_names   => [qw( login password
+                                    uid gid realname
+                                    directory shell )],
+               },
+            },
+
+        RaiseError       => 1,
+        PrintError       => 1,
+        FetchHashKeyName => "NAME_lc",
+        }) or die $DBI::errstr;
 
 but you may set these attributes in the DSN as well, separated by semicolons.
 Pay attention to the semi-colon for C<csv_sep_char> (as seen in many CSV
@@ -597,6 +607,13 @@
 whereas specifying entries in the attribute hash is easier to read and to
 maintain.
 
+The default value for C<csv_binary> is C<1> (True).
+
+The default value for C<csv_auto_diag> is <1>. Note that this might cause
+trouble on perl versions older than 5.8.9, so up to and including perl
+version 5.8.8 it might be required to use C<;csv_auto_diag=0> inside the
+C<DSN> or C<csv_auto_diag => 0> inside the attributes.
+
 =head2 Creating and dropping tables
 
 You can create and drop tables with commands like the following:
@@ -969,6 +986,16 @@
 
   $dbh->{csv_null} = 1;
 
+=item csv_bom
+X<csv_bom>
+
+With this option set, the CSV parser will try to detect BOM (Byte Order Mark)
+in the header line. This requires L<Text::CSV_XS> version 1.22 or higher.
+
+  $dbh = DBI->connect ("dbi:CSV:", "", "", { csv_bom => 1 });
+
+  $dbh->{csv_bom} = 1;
+
 =item csv_tables
 X<csv_tables>
 
@@ -976,6 +1003,22 @@
 table it contains an element with the table name as key and another
 hash ref with the following attributes:
 
+=over 4
+
+=item o
+
+All valid attributes to the CSV parsing module. Any of the can optionally
+be prefixed with C<csv_>.
+
+=item o
+
+All attributes valid to DBD::File
+
+=back
+
+If you pass it C<f_file> or its alias C<file>, C<f_ext> has no effect, but
+C<f_dir> and C<f_encoding> still have.
+
 =item csv_*
 X<csv_*>
 
@@ -1066,12 +1109,32 @@
 that any part in the toolchain will work if field names have those characters,
 and the chances are high that the SQL statements will fail.
 
+Currently, the sanitizing of headers is as simple as
+
+  s/\W/_/g;
+
+Note that headers (column names) might be folded in other parts of the code
+stack, specifically SQL::Statement, whose docs mention:
+
+ Wildcards are expanded to lower cased identifiers. This might
+ confuse some people, but it was easier to implement.
+
+That means that in
+
+ my $sth = $dbh->prepare ("select * from foo");
+ $sth->execute;
+ while (my $row = $sth->fetchrow_hashref) {
+     say for keys %$row;
+     }
+
+all keys will show as all lower case, regardless of the original header.
+
 =back
 
 It's strongly recommended to check the attributes supported by
 L<DBD::File/Metadata>.
 
-Example: Suppose you want to use /etc/passwd as a CSV file. :-)
+Example: Suppose you want to use F</etc/passwd> as a CSV file. :-)
 There simplest way is:
 
     use DBI;
@@ -1230,7 +1293,7 @@
 
 =head1 COPYRIGHT AND LICENSE
 
-Copyright (C) 2009-2016 by H.Merijn Brand
+Copyright (C) 2009-2018 by H.Merijn Brand
 Copyright (C) 2004-2009 by Jeff Zucker
 Copyright (C) 1998-2004 by Jochen Wiedmann
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/00_meta.t new/DBD-CSV-0.53/t/00_meta.t
--- old/DBD-CSV-0.49/t/00_meta.t        2010-08-06 13:35:00.000000000 +0200
+++ new/DBD-CSV-0.53/t/00_meta.t        1970-01-01 01:00:00.000000000 +0100
@@ -1,33 +0,0 @@
-#!/usr/bin/perl
-
-# Test that our META.yml file matches the specification
-
-use strict;
-use warnings;
-
-my @MODULES = ( "Test::CPAN::Meta 0.12" );
-
-my $has_meta = -f "META.yml";
-
-# Don't run tests during end-user installs
-use Test::More;
-$ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} || !$has_meta or
-    plan skip_all => "Author tests not required for installation";
-
-# Load the testing modules
-foreach my $MODULE (@MODULES) {
-    eval "use $MODULE";
-    $@ or next;
-    $ENV{RELEASE_TESTING}
-       ? die "Failed to load required release-testing module $MODULE"
-       : plan skip_all => "$MODULE not available for testing";
-    }
-
-!$has_meta && -x "sandbox/genMETA.pl" and
-    qx{ perl sandbox/genMETA.pl -v > META.yml };
-
-meta_yaml_ok ();
-
-$has_meta or unlink "META.yml";
-
-1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/00_pod.t new/DBD-CSV-0.53/t/00_pod.t
--- old/DBD-CSV-0.49/t/00_pod.t 2010-08-06 13:35:13.000000000 +0200
+++ new/DBD-CSV-0.53/t/00_pod.t 1970-01-01 01:00:00.000000000 +0100
@@ -1,12 +0,0 @@
-#!/usr/bin/perl
-
-# Test that the documentation syntax is correct
-
-use strict;
-use warnings;
-
-use Test::More;
-
-eval "use Test::Pod 1.00";
-plan skip_all => "Test::Pod 1.00 required for testing POD" if $@;
-all_pod_files_ok ();
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/00_pod_cov.t 
new/DBD-CSV-0.53/t/00_pod_cov.t
--- old/DBD-CSV-0.49/t/00_pod_cov.t     2010-08-06 13:35:05.000000000 +0200
+++ new/DBD-CSV-0.53/t/00_pod_cov.t     1970-01-01 01:00:00.000000000 +0100
@@ -1,12 +0,0 @@
-#!/usr/bin/perl
-
-# Test that all methods are documented
-
-use strict;
-use warnings;
-
-use Test::More;
-
-eval "use Test::Pod::Coverage tests => 1";
-plan skip_all => "Test::Pod::Coverage required for testing POD Coverage" if $@;
-pod_coverage_ok ("DBD::CSV", "DBD::CSV is covered");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/10_base.t new/DBD-CSV-0.53/t/10_base.t
--- old/DBD-CSV-0.49/t/10_base.t        2013-05-30 14:31:11.000000000 +0200
+++ new/DBD-CSV-0.53/t/10_base.t        2018-03-24 11:19:42.000000000 +0100
@@ -13,7 +13,7 @@
 
 ok ($SQL::Statement::VERSION, "SQL::Statement::Version 
$SQL::Statement::VERSION");
 
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my $nano = $ENV{DBI_SQL_NANO};
 defined $nano or $nano = "not set";
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/11_dsnlist.t 
new/DBD-CSV-0.53/t/11_dsnlist.t
--- old/DBD-CSV-0.49/t/11_dsnlist.t     2010-08-06 13:41:58.000000000 +0200
+++ new/DBD-CSV-0.53/t/11_dsnlist.t     2018-03-24 11:19:42.000000000 +0100
@@ -8,7 +8,7 @@
 
 # Include lib.pl
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 ok (1,                                         "Driver is CSV\n");
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/20_createdrop.t 
new/DBD-CSV-0.53/t/20_createdrop.t
--- old/DBD-CSV-0.49/t/20_createdrop.t  2013-07-25 18:22:18.000000000 +0200
+++ new/DBD-CSV-0.53/t/20_createdrop.t  2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0 ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/30_insertfetch.t 
new/DBD-CSV-0.53/t/30_insertfetch.t
--- old/DBD-CSV-0.49/t/30_insertfetch.t 2013-05-30 11:26:35.000000000 +0200
+++ new/DBD-CSV-0.53/t/30_insertfetch.t 2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0 ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/31_delete.t 
new/DBD-CSV-0.53/t/31_delete.t
--- old/DBD-CSV-0.49/t/31_delete.t      2011-08-09 09:35:41.000000000 +0200
+++ new/DBD-CSV-0.53/t/31_delete.t      2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_NULLABLE ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/32_update.t 
new/DBD-CSV-0.53/t/32_update.t
--- old/DBD-CSV-0.49/t/32_update.t      2013-05-30 11:38:02.000000000 +0200
+++ new/DBD-CSV-0.53/t/32_update.t      2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_NULLABLE ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/40_numrows.t 
new/DBD-CSV-0.53/t/40_numrows.t
--- old/DBD-CSV-0.49/t/40_numrows.t     2010-08-06 13:42:08.000000000 +0200
+++ new/DBD-CSV-0.53/t/40_numrows.t     2018-04-05 08:37:25.000000000 +0200
@@ -7,10 +7,9 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-require "t/lib.pl";
+require "./t/lib.pl";
 
-sub TrueRows
-{
+sub TrueRows {
     my $sth = shift;
     my $count = 0;
     $count++ while $sth->fetch;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/41_nulls.t 
new/DBD-CSV-0.53/t/41_nulls.t
--- old/DBD-CSV-0.49/t/41_nulls.t       2013-07-29 16:54:15.000000000 +0200
+++ new/DBD-CSV-0.53/t/41_nulls.t       2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my $nano = $ENV{DBI_SQL_NANO};
 my @tbl_def = (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/42_bindparam.t 
new/DBD-CSV-0.53/t/42_bindparam.t
--- old/DBD-CSV-0.49/t/42_bindparam.t   2016-01-25 22:57:22.000000000 +0100
+++ new/DBD-CSV-0.53/t/42_bindparam.t   2018-03-24 11:19:42.000000000 +0100
@@ -14,7 +14,7 @@
     exit 0;
     }
 
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0                 ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/43_blobs.t 
new/DBD-CSV-0.53/t/43_blobs.t
--- old/DBD-CSV-0.49/t/43_blobs.t       2013-06-11 18:34:43.000000000 +0200
+++ new/DBD-CSV-0.53/t/43_blobs.t       2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my $size = 128;
 my @tbl_def = (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/44_listfields.t 
new/DBD-CSV-0.53/t/44_listfields.t
--- old/DBD-CSV-0.49/t/44_listfields.t  2014-11-16 12:44:27.000000000 +0100
+++ new/DBD-CSV-0.53/t/44_listfields.t  2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my $nano = $ENV{DBI_SQL_NANO};
 my @tbl_def = (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/48_utf8.t new/DBD-CSV-0.53/t/48_utf8.t
--- old/DBD-CSV-0.49/t/48_utf8.t        2013-07-25 18:27:03.000000000 +0200
+++ new/DBD-CSV-0.53/t/48_utf8.t        2018-03-24 11:19:42.000000000 +0100
@@ -13,7 +13,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 ok (my $dbh = Connect ({ f_ext => ".csv/r", f_schema => undef }), "connect");
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/50_chopblanks.t 
new/DBD-CSV-0.53/t/50_chopblanks.t
--- old/DBD-CSV-0.49/t/50_chopblanks.t  2012-11-13 08:48:57.000000000 +0100
+++ new/DBD-CSV-0.53/t/50_chopblanks.t  2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_NULLABLE ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/51_commit.t 
new/DBD-CSV-0.53/t/51_commit.t
--- old/DBD-CSV-0.49/t/51_commit.t      2016-01-25 22:57:22.000000000 +0100
+++ new/DBD-CSV-0.53/t/51_commit.t      2018-04-05 08:37:27.000000000 +0200
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my $nano = $ENV{DBI_SQL_NANO};
 my @tbl_def = (
@@ -15,8 +15,7 @@
     [ "name", "CHAR",    64, 0 ],
     );
 
-sub RowCount
-{
+sub RowCount {
     my ($dbh, $tbl) = @_;
 
     if ($nano) {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/55_dir_search.t 
new/DBD-CSV-0.53/t/55_dir_search.t
--- old/DBD-CSV-0.49/t/55_dir_search.t  2015-01-07 08:22:35.000000000 +0100
+++ new/DBD-CSV-0.53/t/55_dir_search.t  2018-04-10 08:03:17.000000000 +0200
@@ -5,8 +5,10 @@
 
 use Test::More;
 
-BEGIN { use_ok ("DBI") }
-require "t/lib.pl";
+$ENV{AUTOMATED_TESTING} and plan skip_all => "No folder scanning during 
automated tests";
+
+use_ok ("DBI");
+require "./t/lib.pl";
 
 my $tstdir = DbDir ();
 my @extdir = ("t", File::Spec->tmpdir ());
@@ -24,7 +26,7 @@
     RaiseError       => 1,
     PrintError       => 1,
     FetchHashKeyName => "NAME_lc",
-    }) or die "$DBI::errstr\n";
+    }) or die $DBI::errstr || $DBI::errstr || "", "\n";
 
 my @dsn = $dbh->data_sources;
 my %dir = map {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/60_misc.t new/DBD-CSV-0.53/t/60_misc.t
--- old/DBD-CSV-0.49/t/60_misc.t        2013-06-19 07:46:43.000000000 +0200
+++ new/DBD-CSV-0.53/t/60_misc.t        2018-03-24 11:19:42.000000000 +0100
@@ -7,7 +7,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_NULLABLE ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/61_meta.t new/DBD-CSV-0.53/t/61_meta.t
--- old/DBD-CSV-0.49/t/61_meta.t        2014-11-15 16:06:27.000000000 +0100
+++ new/DBD-CSV-0.53/t/61_meta.t        2018-03-24 11:19:42.000000000 +0100
@@ -5,7 +5,7 @@
 
 use Test::More;
 use DBI qw(:sql_types);
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my $cnt = join "" => <DATA>;
 my $tbl;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/70_csv.t new/DBD-CSV-0.53/t/70_csv.t
--- old/DBD-CSV-0.49/t/70_csv.t 2013-07-25 18:49:58.000000000 +0200
+++ new/DBD-CSV-0.53/t/70_csv.t 2018-03-24 11:19:42.000000000 +0100
@@ -5,7 +5,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, &COL_KEY          ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/71_csv-ext.t 
new/DBD-CSV-0.53/t/71_csv-ext.t
--- old/DBD-CSV-0.49/t/71_csv-ext.t     2013-07-25 18:22:34.000000000 +0200
+++ new/DBD-CSV-0.53/t/71_csv-ext.t     2018-04-05 08:37:36.000000000 +0200
@@ -5,7 +5,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0 ],
@@ -18,8 +18,7 @@
 sub DbFile;
 
 my $usr = eval { getpwuid $< } || $ENV{USERNAME} || "";
-sub Tables
-{
+sub Tables {
     my @tbl = $dbh->tables ();
     if ($usr) {
        s/^['"]*$usr["']*\.//i for @tbl;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/72_csv-schema.t 
new/DBD-CSV-0.53/t/72_csv-schema.t
--- old/DBD-CSV-0.49/t/72_csv-schema.t  2014-03-31 08:09:42.000000000 +0200
+++ new/DBD-CSV-0.53/t/72_csv-schema.t  2018-03-24 11:19:42.000000000 +0100
@@ -5,7 +5,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0 ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/73_csv-case.t 
new/DBD-CSV-0.53/t/73_csv-case.t
--- old/DBD-CSV-0.49/t/73_csv-case.t    2014-08-04 21:42:29.000000000 +0200
+++ new/DBD-CSV-0.53/t/73_csv-case.t    2018-03-24 11:19:42.000000000 +0100
@@ -5,7 +5,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI"); }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 sub DbFile;
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/80_rt.t new/DBD-CSV-0.53/t/80_rt.t
--- old/DBD-CSV-0.49/t/80_rt.t  2016-01-25 22:57:22.000000000 +0100
+++ new/DBD-CSV-0.53/t/80_rt.t  2018-04-30 12:11:36.000000000 +0200
@@ -12,7 +12,7 @@
     exit 0;
     }
 
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my ($rt, %input, %desc);
 while (<DATA>) {
@@ -27,8 +27,7 @@
     push @{$input{$rt}}, $_;
     }
 
-sub rt_file
-{
+sub rt_file {
     return File::Spec->catfile (DbDir (), "rt$_[0]");
     } # rt_file
 
@@ -296,6 +295,8 @@
     eval {
        ok ($sth->execute, "execute");
        ok (!$@, "no error");
+       is (scalar @{$sth->fetchall_arrayref}, 2, # not part of 80078
+           "empty col_names treat skip_first_row as false");
        };
 
     ok ($dbh->do ("drop table $tbl"),          "drop");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/82_free_unref_scalar.t 
new/DBD-CSV-0.53/t/82_free_unref_scalar.t
--- old/DBD-CSV-0.49/t/82_free_unref_scalar.t   1970-01-01 01:00:00.000000000 
+0100
+++ new/DBD-CSV-0.53/t/82_free_unref_scalar.t   2018-05-20 11:16:26.000000000 
+0200
@@ -0,0 +1,97 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+# perl5.27.2 -DD -Mblib t/02_free_unref_scalar.t > & alloc-free.log
+#            ^^^
+# -DD  Cleaning up
+
+#use Devel::Peek;
+#use Data::Peek;
+use Test::More;
+#use Test::NoWarnings;
+
+$] < 5.026 and plan skip_all => "This is a perl5 CORE issue fixed in 
perl-5.26";
+
+use_ok "DBI";
+require "./t/lib.pl";
+
+$SIG{__WARN__} = sub {
+    $_[0] =~ m/^Attempt to free unreferenced scalar: SV (0x[0-9a-f]+)(, 
\<\w+\> line \d+)?.* during global destruction\.$/ and
+       fail ("there was an attempt to free unreferenced scalar");
+    diag "@_";
+    };
+
+sub DBD::CSV::Table::DESTROY {
+    my $self = shift;
+
+    delete $self->{meta}{csv_in};
+    } # DBD::CSV::Table::DESTROY
+
+sub test_with_options {
+    my (%opts) = @_;
+    my $dbh = DBI->connect ("dbi:CSV:", undef, undef, {
+       f_schema         => undef,
+       f_dir            => 't',
+       f_dir_search     => [],
+       f_ext            => ".csv/r",
+       f_lock           => 2,
+       f_encoding       => "utf8",
+
+       %opts,
+
+       RaiseError       => 1,
+       PrintError       => 1,
+       FetchHashKeyName => "NAME_lc",
+       }) or die "$DBI::errstr\n" || $DBI::errstr;
+
+    my %tbl = map { $_ => 1 } $dbh->tables (undef, undef, undef, undef);
+
+    is ($tbl{$_}, 1, "Table $_ found") for qw( tmp );
+
+    my %data = (
+       tmp => {                # t/tmp.csv
+           1 => "ape",
+           2 => (grep (m/^csv_callbacks$/ => keys %opts) ? "new world monkey" 
: "monkey"),
+           3 => "gorilla",
+           },
+       );
+
+    foreach my $tbl (sort keys %data) {
+       my $sth = $dbh->prepare ("select * from $tbl");
+       $sth->execute;
+       while (my $row = $sth->fetch) {
+           is ($row->[1], $data{$tbl}{$row->[0]}, "$tbl ($row->[0], ...)");
+           }
+       $sth->finish ();
+       }
+
+    $dbh->disconnect;
+    }
+
+sub new_world_monkeys {
+    my ($csv, $data) = @_;
+
+    $data->[1] =~ s/^monkey$/new world monkey/;
+
+    return;
+    }
+
+my $callbacks = {
+    csv_callbacks => {
+       after_parse => \&new_world_monkeys,
+       },
+    };
+
+test_with_options (
+    csv_tables => { tmp => { f_file => "tmp.csv"} },
+    %$callbacks,
+    );
+
+test_with_options (
+    csv_auto_diag => 0,
+    %$callbacks,
+    ) for (1 .. 100);
+
+done_testing ();
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/85_error.t 
new/DBD-CSV-0.53/t/85_error.t
--- old/DBD-CSV-0.49/t/85_error.t       2016-01-25 22:57:22.000000000 +0100
+++ new/DBD-CSV-0.53/t/85_error.t       2018-03-24 11:19:42.000000000 +0100
@@ -5,7 +5,7 @@
 use Test::More;
 
 BEGIN { use_ok ("DBI") }
-do "t/lib.pl";
+do "./t/lib.pl";
 
 my @tbl_def = (
     [ "id",   "INTEGER",  4, 0 ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/DBD-CSV-0.49/t/lib.pl new/DBD-CSV-0.53/t/lib.pl
--- old/DBD-CSV-0.49/t/lib.pl   2016-01-25 22:57:22.000000000 +0100
+++ new/DBD-CSV-0.53/t/lib.pl   2018-04-05 08:37:23.000000000 +0200
@@ -12,7 +12,8 @@
 my $testname  = "output$$";
 my $base_dir  = File::Spec->rel2abs (File::Spec->curdir ());
 my $test_dir  = File::Spec->rel2abs ($testname);
-my $test_dsn  = $ENV{DBI_DSN}  || "DBI:CSV:f_dir=$testname";
+my $test_dsn  = $ENV{DBI_DSN}  || "";
+   $test_dsn  =~ m/csv/i or $test_dsn = "dbi:CSV:f_dir=$testname";
 my $test_user = $ENV{DBI_USER} || "";
 my $test_pass = $ENV{DBI_PASS} || "";
 
@@ -36,8 +37,7 @@
        }
     }
 
-sub AnsiTypeToDb
-{
+sub AnsiTypeToDb {
     my ($type, $size) = @_;
     my $uctype = uc $type;
 
@@ -64,8 +64,7 @@
 #   COL_NULLABLE - true, if this column may contain NULL's
 #   COL_KEY      - true, if this column is part of the table's primary key
 
-sub TableDefinition
-{
+sub TableDefinition {
     my ($tablename, @cols) = @_;
 
     my @keys = ();
@@ -86,8 +85,7 @@
     } # TableDefinition
 
 # This function generates a list of tables associated to a given DSN.
-sub ListTables
-{
+sub ListTables {
     my $dbh = shift or return;
 
     my @tables = $dbh->func ("list_tables");
@@ -96,8 +94,7 @@
     @tables;
     } # ListTables
 
-sub DbCleanup
-{
+sub DbCleanup {
     chdir $base_dir;
     -d $testname or return;
     chdir $testname or BAIL_OUT ("Cleanup failed");
@@ -137,8 +134,7 @@
        } # FindNewTable
     }
 
-sub isSaneCase
-{
+sub isSaneCase {
     my @f = glob "??????.???";
     foreach my $try (qw( FrUbLl BlURgH wOngOs )) {
        my $fn = "$try.csv";
@@ -153,13 +149,11 @@
     return 0;
     } # isSaneCase
 
-sub ServerError
-{
+sub ServerError {
     die "# Cannot connect: $DBI::errstr\n";
     } # ServerError
 
-sub Connect
-{
+sub Connect {
     my $attr = @_ && ref $_[-1] eq "HASH" ? pop @_ : {};
     my ($dsn, $usr, $pass) = @_;
     $dsn  ||= $test_dsn;
@@ -169,14 +163,12 @@
     $dbh;
     } # Connect
 
-sub DbDir
-{
+sub DbDir {
     @_ and $test_dir = File::Spec->catdir ($base_dir, shift);
     $test_dir;
     } # DbDir
 
-sub DbFile
-{
+sub DbFile {
     my $file = shift or return;
     File::Spec->catdir ($test_dir, $file);
     } # DbFile


Reply via email to