Hi,

Attached please find two patchsets for review. I understand that a
release is literally knocking on the door, so if anything here looks
iffy, feel free to defer it until after the release. Any
suggestions/criticism welcome.

dbic_cascades.diff introduces two new relationship attributes. From the POD:
If you are using SQL::Translator to create SQL for you, you can use
these attributes to explicitly set the desired ON DELETE or ON UPDATE
constraint type

Nothing hairy in this patch, a slight refactor of the SQLT parser and tests.


dbic_hashrefinf.diff adds a test and fixes a longstanding bug with funny
select/as searches. The test includes a crude benchmark making sure that
I didn't introduce any latencies when reworking mk_hash. The benchmark
can (probably should) be deleted before committing.

The second part of the diff adds a global variable flag which causes
inflation on all values in hash returned by the inflator (where this is
applicable of course). This code has been used in production for more
than a year, but feel free to stab it :)

Cheers

Peter
Index: t/lib/DBICTest/Schema/CD.pm
===================================================================
--- t/lib/DBICTest/Schema/CD.pm	(revision 4759)
+++ t/lib/DBICTest/Schema/CD.pm	(working copy)
@@ -24,7 +24,11 @@
 __PACKAGE__->set_primary_key('cdid');
 __PACKAGE__->add_unique_constraint([ qw/artist title/ ]);
 
-__PACKAGE__->belongs_to( artist => 'DBICTest::Schema::Artist', undef, { is_deferrable => 1 } );
+__PACKAGE__->belongs_to( artist => 'DBICTest::Schema::Artist', undef, { 
+    is_deferrable => 1, 
+    on_delete => undef,
+    on_update => 'SET NULL',
+});
 
 __PACKAGE__->has_many( tracks => 'DBICTest::Schema::Track' );
 __PACKAGE__->has_many(
Index: t/lib/DBICTest/Schema/ArtistUndirectedMap.pm
===================================================================
--- t/lib/DBICTest/Schema/ArtistUndirectedMap.pm	(revision 4759)
+++ t/lib/DBICTest/Schema/ArtistUndirectedMap.pm	(working copy)
@@ -10,8 +10,8 @@
 );
 __PACKAGE__->set_primary_key(qw/id1 id2/);
 
-__PACKAGE__->belongs_to( 'artist1', 'DBICTest::Schema::Artist', 'id1' );
-__PACKAGE__->belongs_to( 'artist2', 'DBICTest::Schema::Artist', 'id2');
+__PACKAGE__->belongs_to( 'artist1', 'DBICTest::Schema::Artist', 'id1', { on_delete => 'RESTRICT', on_update => 'CASCADE'} );
+__PACKAGE__->belongs_to( 'artist2', 'DBICTest::Schema::Artist', 'id2', { on_delete => undef, on_update => 'CASCADE'} );
 __PACKAGE__->has_many(
   'mapped_artists', 'DBICTest::Schema::Artist',
   [ {'foreign.artistid' => 'self.id1'}, {'foreign.artistid' => 'self.id2'} ],
Index: t/86sqlt.t
===================================================================
--- t/86sqlt.t	(revision 4759)
+++ t/86sqlt.t	(working copy)
@@ -10,7 +10,7 @@
 
 my $schema = DBICTest->init_schema;
 
-plan tests => 130;
+plan tests => 131;
 
 my $translator = SQL::Translator->new( 
   parser_args => {
@@ -19,15 +19,24 @@
   producer_args => {},
 );
 
-$translator->parser('SQL::Translator::Parser::DBIx::Class');
-$translator->producer('SQLite');
+{
+    my $warn = '';
+    local $SIG{__WARN__} = sub { $warn = shift };
 
-my $output = $translator->translate();
+    my $relinfo = $schema->source('Artist')->relationship_info ('cds');
+    local $relinfo->{attrs}{on_delete} = 'restrict';
 
+    $translator->parser('SQL::Translator::Parser::DBIx::Class');
+    $translator->producer('SQLite');
 
-ok($output, "SQLT produced someoutput")
-  or diag($translator->error);
+    my $output = $translator->translate();
 
+    ok($output, "SQLT produced someoutput")
+      or diag($translator->error);
+
+    like ($warn, qr/^SQLT attribute .+? was supplied for relationship/, 'Warn about dubious on_delete/on_update attributes');
+}
+
 # Note that the constraints listed here are the only ones that are tested -- if
 # more exist in the Schema than are listed here and all listed constraints are
 # correct, the test will still pass. If you add a class with UNIQUE or FOREIGN
@@ -117,7 +126,7 @@
       'name' => 'cd_fk_artist', 'index_name' => 'cd_idx_artist',
       'selftable' => 'cd', 'foreigntable' => 'artist', 
       'selfcols'  => ['artist'], 'foreigncols' => ['artistid'],
-      on_delete => 'CASCADE', on_update => 'CASCADE', deferrable => 1,
+      on_delete => '', on_update => 'SET NULL', deferrable => 1,
     },
   ],
 
@@ -128,14 +137,14 @@
       'name' => 'artist_undirected_map_fk_id1', 'index_name' => 'artist_undirected_map_idx_id1',
       'selftable' => 'artist_undirected_map', 'foreigntable' => 'artist', 
       'selfcols'  => ['id1'], 'foreigncols' => ['artistid'],
-      on_delete => 'CASCADE', on_update => '', deferrable => 1,
+      on_delete => 'RESTRICT', on_update => 'CASCADE', deferrable => 1,
     },
     {
       'display' => 'artist_undirected_map->artist for id2',
       'name' => 'artist_undirected_map_fk_id2', 'index_name' => 'artist_undirected_map_idx_id2',
       'selftable' => 'artist_undirected_map', 'foreigntable' => 'artist', 
       'selfcols'  => ['id2'], 'foreigncols' => ['artistid'],
-      on_delete => 'CASCADE', on_update => '', deferrable => 1,
+      on_delete => '', on_update => 'CASCADE', deferrable => 1,
     },
   ],
 
@@ -203,7 +212,6 @@
       on_delete => '', on_update => '', deferrable => 1,
     },
   ],
-
 );
 
 my %unique_constraints = (
Index: lib/SQL/Translator/Parser/DBIx/Class.pm
===================================================================
--- lib/SQL/Translator/Parser/DBIx/Class.pm	(revision 4759)
+++ lib/SQL/Translator/Parser/DBIx/Class.pm	(working copy)
@@ -129,6 +129,9 @@
             my $othertable = $source->related_source($rel);
             my $rel_table = $othertable->name;
 
+            my $reverse_rels = $source->reverse_relationship_info($rel);
+            my ($otherrelname, $otherrelationship) = each %{$reverse_rels};
+
             # Force the order of @cond to match the order of ->add_columns
             my $idx;
             my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $othertable->columns;            
@@ -138,44 +141,55 @@
             my @refkeys = map {/^\w+\.(\w+)$/} @cond;
             my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} @cond;
 
+            # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
+            my $fk_constraint;
+
+            #first it can be specified explicitly
+            if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
+                $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
+            }
+            # it can not be multi
+            elsif ( $rel_info->{attrs}{accessor} eq 'multi' ) {
+                $fk_constraint = 0;
+            }
+            # if indeed single, check if all self.columns are our primary keys.
+            # this is supposed to indicate a has_one/might_have...
+            # where's the introspection!!?? :)
+            else {
+                $fk_constraint = not $source->compare_relationship_keys([EMAIL PROTECTED], [EMAIL PROTECTED]);
+            }
+
+            my $cascade;
+            for my $c (qw/delete update/) {
+                if (exists $rel_info->{attrs}{"on_$c"}) {
+                    if ($fk_constraint) {
+                        $cascade->{$c} = $rel_info->{attrs}{"on_$c"};
+                    }
+                    else {
+                        warn "SQLT attribute 'on_$c' was supplied for relationship '$moniker/$rel', which does not appear to be a foreign constraint. "
+                            . "If you are sure that SQLT must generate a constraint for this relationship, add 'is_foreign_key_constraint => 1' to the attributes.\n";
+                    }
+                }
+                elsif (defined $otherrelationship and $otherrelationship->{attrs}{$c eq 'update' ? 'cascade_copy' : 'cascade_delete'}) {
+                    $cascade->{$c} = 'CASCADE';
+                }
+            }
+
             if($rel_table)
             {
-                my $reverse_rels = $source->reverse_relationship_info($rel);
-                my ($otherrelname, $otherrelationship) = each %{$reverse_rels};
+                # Constraints are added only if applicable
+                next unless $fk_constraint;
 
-                my $on_delete = '';
-                my $on_update = '';
+                # Make sure we dont create the same foreign key constraint twice
+                my $key_test = join("\x00", @keys);
+                next if $created_FK_rels{$rel_table}->{$key_test};
 
-                if (defined $otherrelationship) {
-                    $on_delete = $otherrelationship->{'attrs'}->{cascade_delete} ? 'CASCADE' : '';
-                    $on_update = $otherrelationship->{'attrs'}->{cascade_copy} ? 'CASCADE' : '';
-                }
-
                 my $is_deferrable = $rel_info->{attrs}{is_deferrable};
                 
                 # global parser_args add_fk_index param can be overridden on the rel def
                 my $add_fk_index_rel = (exists $rel_info->{attrs}{add_fk_index}) ? $rel_info->{attrs}{add_fk_index} : $add_fk_index;
 
-                # Make sure we dont create the same foreign key constraint twice
-                my $key_test = join("\x00", @keys);
 
-                #Decide if this is a foreign key based on whether the self
-                #items are our primary columns.
-
-                # If the sets are different, then we assume it's a foreign key from
-                # us to another table.
-                # OR: If is_foreign_key_constraint attr is explicity set (or set to false) on the relation
-                next if ( exists $created_FK_rels{$rel_table}->{$key_test} );
-                if ( exists $rel_info->{attrs}{is_foreign_key_constraint}) {
-                  # not is this attr set to 0 but definitely if set to 1
-                  next unless ($rel_info->{attrs}{is_foreign_key_constraint});
-                } else {
-                  # not if might have
-                  # next if ($rel_info->{attrs}{accessor} eq 'single' && exists $rel_info->{attrs}{join_type} && uc($rel_info->{attrs}{join_type}) eq 'LEFT');
-                  # not sure about this one
-                  next if $source->compare_relationship_keys([EMAIL PROTECTED], [EMAIL PROTECTED]);
-                }
-
                 $created_FK_rels{$rel_table}->{$key_test} = 1;
                 if (scalar(@keys)) {
                   $table->add_constraint(
@@ -184,8 +198,8 @@
                                     fields           => [EMAIL PROTECTED],
                                     reference_fields => [EMAIL PROTECTED],
                                     reference_table  => $rel_table,
-                                    on_delete        => $on_delete,
-                                    on_update        => $on_update,
+                                    on_delete        => $cascade->{delete},
+                                    on_update        => $cascade->{update},
                                     (defined $is_deferrable ? ( deferrable => $is_deferrable ) : ()),
                   );
                     
Index: lib/DBIx/Class/Relationship/Base.pm
===================================================================
--- lib/DBIx/Class/Relationship/Base.pm	(revision 4759)
+++ lib/DBIx/Class/Relationship/Base.pm	(working copy)
@@ -109,6 +109,19 @@
 should, set this attribute to a true or false value to override the detection
 of when to create constraints.
 
+=item on_delete / on_update
+
+If you are using L<SQL::Translator> to create SQL for you, you can use these
+attributes to explicitly set the desired C<ON DELETE> or C<ON UPDATE> constraint 
+type. If not supplied the SQLT parser will attempt to infer the constraint type by 
+interrogating the attributes of the B<opposite> relationship. For any 'multi'
+relationship with C<< cascade_delete => 1 >>, the corresponding belongs_to 
+relationship will be created with an C<ON DELETE CASCADE> constraint. For any 
+relationship bearing C<< cascade_copy => 1 >> the resulting belongs_to constraint
+will be C<ON UPDATE CASCADE>. If you wish to disable this autodetection, and just
+use the RDBMS' default constraint type, pass C<< on_delete => undef >> or 
+C<< on_delete => '' >>, and the same for C<on_update> respectively.
+
 =item is_deferrable
 
 Tells L<SQL::Translator> that the foreign key constraint it creates should be
Index: t/68inflate_resultclass_hashrefinflator.t
===================================================================
--- t/68inflate_resultclass_hashrefinflator.t	(revision 4759)
+++ t/68inflate_resultclass_hashrefinflator.t	(working copy)
@@ -3,6 +3,9 @@
 
 use Test::More qw(no_plan);
 use lib qw(t/lib);
+use Benchmark qw/timethis cmpthese/;
+use Scalar::Util qw/blessed/;
+use DateTime;
 use DBICTest;
 use DBIx::Class::ResultClass::HashRefInflator;
 my $schema = DBICTest->init_schema();
@@ -79,9 +82,87 @@
 my @dbic        = $rs_dbic->all;
 my @hashrefinf  = $rs_hashrefinf->all;
 
-for my $index (0..scalar @hashrefinf) {
+for my $index (0 .. $#hashrefinf) {
     my $dbic_obj    = $dbic[$index];
     my $datahashref = $hashrefinf[$index];
 
     check_cols_of($dbic_obj, $datahashref);
 }
+
+# sometimes for ultra-mega-speed you want to fetch columns in esoteric ways
+# check the inflator over a non-fetching join 
+$rs_dbic = $schema->resultset ('Artist')->search ({ 'me.artistid' => 1}, {
+    prefetch => { cds => 'tracks' },
+    order_by => [qw/cds.cdid tracks.trackid/],
+});
+
+$rs_hashrefinf = $schema->resultset ('Artist')->search ({ 'me.artistid' => 1}, {
+    join     => { cds => 'tracks' },
+    select   => [qw/name   tracks.title      tracks.cd       /],
+    as       => [qw/name   cds.tracks.title  cds.tracks.cd   /],
+    order_by => [qw/cds.cdid tracks.trackid/],
+});
+$rs_hashrefinf->result_class('DBIx::Class::ResultClass::HashRefInflator');
+
[EMAIL PROTECTED] = map { $_->tracks->all } ($rs_dbic->first->cds->all);
[EMAIL PROTECTED]  = $rs_hashrefinf->all;
+
+is (scalar @dbic, scalar @hashrefinf, 'Equal number of tracks fetched');
+
+for my $index (0 .. $#hashrefinf) {
+    my $track       = $dbic[$index];
+    my $datahashref = $hashrefinf[$index];
+
+    is ($track->cd->artist->name, $datahashref->{name}, 'Brought back correct artist');
+    for my $col (keys %{$datahashref->{cds}{tracks}}) {
+        is ($track->get_column ($col), $datahashref->{cds}{tracks}{$col}, "Correct track '$col'");
+    }
+}
+
+# Test the data inflator
+
+$schema->class('CD')->inflate_column( 'year',
+    { inflate => sub { DateTime->new( year => shift ) },
+      deflate => sub { shift->year } }
+);
+
+my $cd_rs = $schema->resultset("CD")->search ({cdid => 3});
+$cd_rs->result_class('DBIx::Class::ResultClass::HashRefInflator');
+
+my $cd = $cd_rs->first;
+ok ( (not blessed $cd->{year}), "Plain string returned for year");
+is ( $cd->{year}, '1997', "We are looking at the right year");
+
+# try it again with inflation requested
+local $DBIx::Class::ResultClass::HashRefInflator::inflate_data = 1;
+my $cd2 = $cd_rs->first;
+isa_ok ($cd2->{year}, 'DateTime', "Inflated object");
+is ($cd2->{year}, DateTime->new ( year => 1997 ), "Correct year was inflated");
+
+
+# And the new inflator is not much slower either
+# (just for illustration purposes, should be thrown away together with old_mk_hash)
+{
+    my $test_sub = sub {
+        $rs_hashrefinf = $schema->resultset ('Artist')->search ({}, {
+            prefetch => { cds => 'tracks' },
+        });
+        $rs_hashrefinf->result_class('DBIx::Class::ResultClass::HashRefInflator');
+        my @stuff = $rs_hashrefinf->all;
+    };
+
+    my $new_hr = timethis (-2, $test_sub);
+
+    no warnings qw/redefine/;
+    # switch to the old inflator
+    local *DBIx::Class::ResultClass::HashRefInflator::mk_hash = \&DBIx::Class::ResultClass::HashRefInflator::old_mk_hash;
+    my $old_hr = timethis (-2, $test_sub);
+
+    warn join ("\n",
+        "\nInflator Benchmarks", 
+        (map { join ("\t", @$_) } 
+            (@{cmpthese ({New => $new_hr, Old => $old_hr })})
+        ),
+        "\n",
+    );
+}
Index: lib/DBIx/Class/ResultClass/HashRefInflator.pm
===================================================================
--- lib/DBIx/Class/ResultClass/HashRefInflator.pm	(revision 4759)
+++ lib/DBIx/Class/ResultClass/HashRefInflator.pm	(working copy)
@@ -3,6 +3,9 @@
 use strict;
 use warnings;
 
+our %inflator_cache;
+our $inflate_data;
+
 =head1 NAME
 
 DBIx::Class::ResultClass::HashRefInflator
@@ -36,6 +39,19 @@
 
 =back
 
+=head1 AUTOMATICALLY INFLATING COLUMN VALUES
+
+So you want to skip the DBIx::Class object creation part, but you still want 
+all your data to be inflated according to the rules you defined in your table
+classes. Setting the global variable 
+C<$DBIx::Class::ResultClass::HashRefInflator::inflate_data> to a true value
+will instruct L<mk_hash> to interrogate the processed columns and apply any
+inflation methods declared via L<DBIx::Class::InflateColumn/inflate_column>.
+
+For increased speed the inflation method lookups are cached in 
+C<%DBIx::Class::ResultClass::HashRefInflator::inflator_cache>. Make sure to 
+reset this hash if you modify column inflators at run time.
+
 =head1 METHODS
 
 =head2 inflate_result
@@ -47,7 +63,9 @@
 sub inflate_result {
     my ($self, $source, $me, $prefetch) = @_;
 
-    return mk_hash($me, $prefetch);
+    my $hashref = mk_hash($me, $prefetch);
+    inflate_hash ($source->schema, $source->result_class, $hashref) if $inflate_data;
+    return $hashref;
 }
 
 =head2 mk_hash
@@ -56,7 +74,7 @@
 
 =cut
 
-sub mk_hash {
+sub old_mk_hash {
     my ($me, $rest) = @_;
 
     # $me is the hashref of cols/data from the immediate resultsource
@@ -87,6 +105,70 @@
     };
 }
 
+sub mk_hash { 
+    if (ref $_[0] eq 'ARRAY') {     # multi relationship
+        return [ map { mk_hash (@$_) || () } (@_) ];
+    }
+    else {
+        my $hash = {
+            # the main hash could be an undef if we are processing a skipped-over join
+            $_[0] ? %{$_[0]} : (),
+
+            # the second arg is a hash of arrays for each prefetched relation
+            map
+                { $_ => mk_hash( @{$_[1]->{$_}} ) }
+                ( $_[1] ? (keys %{$_[1]}) : () )
+        };
+
+        # if there is at least one defined column consider the resultset real
+        # (and not an emtpy has_many rel containing one empty hashref)
+        for (values %$hash) {
+            return $hash if defined $_;
+        }
+
+        return undef;
+    }
+}
+
+=head2 inflate_hash
+
+This walks through a hashref produced by L<mk_hash> and inflates any data 
+for which there is a registered inflator in the C<column_info>
+
+=cut
+
+sub inflate_hash {
+    my ($schema, $rc, $data) = @_;
+
+    foreach my $column (keys %{$data}) {
+
+        if (ref $data->{$column} eq 'HASH') {
+            inflate_hash ($schema, $schema->source ($rc)->related_class ($column), $data->{$column});
+        } 
+        elsif (ref $data->{$column} eq 'ARRAY') {
+            foreach my $rel (@{$data->{$column}}) {
+                inflate_hash ($schema, $schema->source ($rc)->related_class ($column), $rel);
+            }
+        }
+        else {
+            # "null is null is null"
+            next if not defined $data->{$column};
+
+            # cache the inflator coderef
+            unless (exists $inflator_cache{$rc}{$column}) {
+                $inflator_cache{$rc}{$column} = exists $schema->source ($rc)->_relationships->{$column}
+                    ? undef     # currently no way to inflate a column sharing a name with a rel 
+                    : $rc->column_info($column)->{_inflate_info}{inflate}
+                ;
+            }
+
+            if ($inflator_cache{$rc}{$column}) {
+                $data->{$column} = $inflator_cache{$rc}{$column}->($data->{$column});
+            }
+        }
+    }
+}
+
 =head1 CAVEAT
 
 This will not work for relationships that have been prefetched. Consider the
_______________________________________________
List: http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class
IRC: irc.perl.org#dbix-class
SVN: http://dev.catalyst.perl.org/repos/bast/DBIx-Class/
Searchable Archive: http://www.grokbase.com/group/[EMAIL PROTECTED]

Reply via email to