[Bast-commits] r3297 - in branches/DBIx-Class/bulk_create: . lib/DBIx/Class lib/DBIx/Class/ResultClass lib/DBIx/Class/Storage t t/lib t/lib/DBICTest

matthewt at dev.catalyst.perl.org matthewt at dev.catalyst.perl.org
Fri May 11 04:18:40 GMT 2007


Author: matthewt
Date: 2007-05-11 04:18:37 +0100 (Fri, 11 May 2007)
New Revision: 3297

Modified:
   branches/DBIx-Class/bulk_create/
   branches/DBIx-Class/bulk_create/Changes
   branches/DBIx-Class/bulk_create/Makefile.PL
   branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultClass/HashRefInflator.pm
   branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSet.pm
   branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSource.pm
   branches/DBIx-Class/bulk_create/lib/DBIx/Class/Storage/DBI.pm
   branches/DBIx-Class/bulk_create/t/19quotes_newstyle.t
   branches/DBIx-Class/bulk_create/t/76joins.t
   branches/DBIx-Class/bulk_create/t/83cache.t
   branches/DBIx-Class/bulk_create/t/89dbicadmin.t
   branches/DBIx-Class/bulk_create/t/90join_torture.t
   branches/DBIx-Class/bulk_create/t/94versioning.t
   branches/DBIx-Class/bulk_create/t/bindtype_columns.t
   branches/DBIx-Class/bulk_create/t/lib/DBICTest.pm
   branches/DBIx-Class/bulk_create/t/lib/DBICTest/Plain.pm
Log:
 r50790 at cain (orig r3288):  blblack | 2007-05-10 23:46:23 +0000
 Got rid of effectively dead dbh_do code in the txn_{begin|end|rollback} funcs
 Reworked the AutoCommit/transaction_depth stuff so that transaction_depth is always 1 or higher with AutoCommit off
 Doc updates to recommend AutoCommit => 1 / txn_do
 Warn if the user doesn't explicitly set AutoCommit
 Added AutoCommit => 1 to some tests that were triggering the above warning
 
 r50793 at cain (orig r3291):  claco | 2007-05-11 01:59:38 +0000
 Updated prereq version of CAG to 0.05001
 r50795 at cain (orig r3293):  matthewt | 2007-05-11 02:52:12 +0000
  r35887 at cain (orig r3046):  matthewt | 2007-01-20 23:37:02 +0000
  half-finished collapse code
 
 r50796 at cain (orig r3294):  matthewt | 2007-05-11 02:53:24 +0000
  r50794 at cain (orig r3292):  matthewt | 2007-05-11 02:51:46 +0000
  collapse result refac hopefully complete
 
 r50797 at cain (orig r3295):  matthewt | 2007-05-11 02:56:11 +0000
 post-merge fixups
 r50798 at cain (orig r3296):  matthewt | 2007-05-11 03:03:40 +0000
 autocommit error gone for dbicadmin test



Property changes on: branches/DBIx-Class/bulk_create
___________________________________________________________________
Name: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
78d5d833-3a7e-474d-aed6-cfba645156b5:/local/DBIx-Class.current:29201
78d5d833-3a7e-474d-aed6-cfba645156b5:/local/DBIx-Class.oracle8:29250
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:3284
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/column_info_from_storage:2596
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/load_namespaces:2725
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/param_bind:3015
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/source-handle:2975
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/storage_exceptions:2617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/versioning:2930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3225
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
78d5d833-3a7e-474d-aed6-cfba645156b5:/local/DBIx-Class.current:29201
78d5d833-3a7e-474d-aed6-cfba645156b5:/local/DBIx-Class.oracle8:29250
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:3296
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/collapse_result_rewrite:3292
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/column_info_from_storage:2596
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/load_namespaces:2725
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/param_bind:3015
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/source-handle:2975
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/storage_exceptions:2617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/versioning:2930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3225

Modified: branches/DBIx-Class/bulk_create/Changes
===================================================================
--- branches/DBIx-Class/bulk_create/Changes	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/Changes	2007-05-11 03:18:37 UTC (rev 3297)
@@ -9,7 +9,7 @@
           is_foreign_key_constrain to allow explicit control over wether or
           not a foreign constraint is needed
         - resultset_class/result_class now (again) auto loads the specified
-          class; requires Class::Accessor::Grouped 0.05000+
+          class; requires Class::Accessor::Grouped 0.05001+
 
 0.07006 2007-04-17 23:18:00
         - Lots of documentation updates

Modified: branches/DBIx-Class/bulk_create/Makefile.PL
===================================================================
--- branches/DBIx-Class/bulk_create/Makefile.PL	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/Makefile.PL	2007-05-11 03:18:37 UTC (rev 3297)
@@ -15,7 +15,7 @@
 requires 'DBI'                       => 1.40;
 requires 'Module::Find'              => 0;
 requires 'Class::Inspector'          => 0;
-requires 'Class::Accessor::Grouped'  => 0.05000;
+requires 'Class::Accessor::Grouped'  => 0.05001;
 requires 'JSON'                      => 1.00; 
 
 # Perl 5.8.0 doesn't have utf8::is_utf8()

Modified: branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultClass/HashRefInflator.pm
===================================================================
--- branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultClass/HashRefInflator.pm	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultClass/HashRefInflator.pm	2007-05-11 03:18:37 UTC (rev 3297)
@@ -63,6 +63,16 @@
     # to avoid emtpy has_many rels contain one empty hashref
     return if (not keys %$me);
 
+    my $def;
+
+    foreach (values %$me) {
+        if (defined $_) {
+            $def = 1;
+            last;
+        }
+    }
+    return unless $def;
+
     return { %$me,
         map {
           ( $_ =>

Modified: branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSet.pm
===================================================================
--- branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSet.pm	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSet.pm	2007-05-11 03:18:37 UTC (rev 3297)
@@ -243,7 +243,7 @@
         : $having);
   }
 
-  my $rs = (ref $self)->new($self->_source_handle, $new_attrs);
+  my $rs = (ref $self)->new($self->result_source, $new_attrs);
   if ($rows) {
     $rs->set_cache($rows);
   }
@@ -747,85 +747,132 @@
 sub _construct_object {
   my ($self, @row) = @_;
   my $info = $self->_collapse_result($self->{_attrs}{as}, \@row);
-  my @new = $self->result_class->inflate_result($self->_source_handle, @$info);
+  my @new = $self->result_class->inflate_result($self->result_source, @$info);
   @new = $self->{_attrs}{record_filter}->(@new)
     if exists $self->{_attrs}{record_filter};
   return @new;
 }
 
 sub _collapse_result {
-  my ($self, $as, $row, $prefix) = @_;
+  my ($self, $as_proto, $row) = @_;
 
-  my %const;
   my @copy = @$row;
-  
-  foreach my $this_as (@$as) {
-    my $val = shift @copy;
-    if (defined $prefix) {
-      if ($this_as =~ m/^\Q${prefix}.\E(.+)$/) {
-        my $remain = $1;
-        $remain =~ /^(?:(.*)\.)?([^.]+)$/;
-        $const{$1||''}{$2} = $val;
+
+  # 'foo'         => [ undef, 'foo' ]
+  # 'foo.bar'     => [ 'foo', 'bar' ]
+  # 'foo.bar.baz' => [ 'foo.bar', 'baz' ]
+
+  my @construct_as = map { [ (/^(?:(.*)\.)?([^.]+)$/) ] } @$as_proto;
+
+  my %collapse = %{$self->{_attrs}{collapse}||{}};
+
+  my @pri_index;
+
+  # if we're doing collapsing (has_many prefetch) we need to grab records
+  # until the PK changes, so fill @pri_index. if not, we leave it empty so
+  # we know we don't have to bother.
+
+  # the reason for not using the collapse stuff directly is because if you
+  # had for e.g. two artists in a row with no cds, the collapse info for
+  # both would be NULL (undef) so you'd lose the second artist
+
+  # store just the index so we can check the array positions from the row
+  # without having to contruct the full hash
+
+  if (keys %collapse) {
+    my %pri = map { ($_ => 1) } $self->result_source->primary_columns;
+    foreach my $i (0 .. $#construct_as) {
+      next if defined($construct_as[$i][0]); # only self table
+      if (delete $pri{$construct_as[$i][1]}) {
+        push(@pri_index, $i);
       }
-    } else {
-      $this_as =~ /^(?:(.*)\.)?([^.]+)$/;
-      $const{$1||''}{$2} = $val;
+      last unless keys %pri; # short circuit (Johnny Five Is Alive!)
     }
   }
 
-  my $alias = $self->{attrs}{alias};
-  my $info = [ {}, {} ];
-  foreach my $key (keys %const) {
-    if (length $key && $key ne $alias) {
-      my $target = $info;
-      my @parts = split(/\./, $key);
-      foreach my $p (@parts) {
-        $target = $target->[1]->{$p} ||= [];
-      }
-      $target->[0] = $const{$key};
-    } else {
-      $info->[0] = $const{$key};
-    }
-  }
+  # no need to do an if, it'll be empty if @pri_index is empty anyway
+
+  my %pri_vals = map { ($_ => $copy[$_]) } @pri_index;
+
+  my @const_rows;
+
+  do { # no need to check anything at the front, we always want the first row
+
+    my %const;
   
-  my @collapse;
-  if (defined $prefix) {
-    @collapse = map {
-        m/^\Q${prefix}.\E(.+)$/ ? ($1) : ()
-    } keys %{$self->{_attrs}{collapse}}
-  } else {
-    @collapse = keys %{$self->{_attrs}{collapse}};
-  };
-
-  if (@collapse) {
-    my ($c) = sort { length $a <=> length $b } @collapse;
-    my $target = $info;
-    foreach my $p (split(/\./, $c)) {
-      $target = $target->[1]->{$p} ||= [];
+    foreach my $this_as (@construct_as) {
+      $const{$this_as->[0]||''}{$this_as->[1]} = shift(@copy);
     }
-    my $c_prefix = (defined($prefix) ? "${prefix}.${c}" : $c);
-    my @co_key = @{$self->{_attrs}{collapse}{$c_prefix}};
-    my $tree = $self->_collapse_result($as, $row, $c_prefix);
-    my %co_check = map { ($_, $tree->[0]->{$_}); } @co_key;
-    my (@final, @raw);
 
-    while (
-      !(
+    push(@const_rows, \%const);
+
+  } until ( # no pri_index => no collapse => drop straight out
+      !@pri_index
+    or
+      do { # get another row, stash it, drop out if different PK
+
+        @copy = $self->cursor->next;
+        $self->{stashed_row} = \@copy;
+
+        # last thing in do block, counts as true if anything doesn't match
+
+        # check xor defined first for NULL vs. NOT NULL then if one is
+        # defined the other must be so check string equality
+
         grep {
-          !defined($tree->[0]->{$_}) || $co_check{$_} ne $tree->[0]->{$_}
-        } @co_key
-        )
-    ) {
-      push(@final, $tree);
-      last unless (@raw = $self->cursor->next);
-      $row = $self->{stashed_row} = \@raw;
-      $tree = $self->_collapse_result($as, $row, $c_prefix);
+          (defined $pri_vals{$_} ^ defined $copy[$_])
+          || (defined $pri_vals{$_} && ($pri_vals{$_} ne $copy[$_]))
+        } @pri_index;
+      }
+  );
+
+  my $alias = $self->{attrs}{alias};
+  my $info = [];
+
+  my %collapse_pos;
+
+  my @const_keys;
+
+  use Data::Dumper;
+
+  foreach my $const (@const_rows) {
+    scalar @const_keys or do {
+      @const_keys = sort { length($a) <=> length($b) } keys %$const;
+    };
+    foreach my $key (@const_keys) {
+      if (length $key) {
+        my $target = $info;
+        my @parts = split(/\./, $key);
+        my $cur = '';
+        my $data = $const->{$key};
+        foreach my $p (@parts) {
+          $target = $target->[1]->{$p} ||= [];
+          $cur .= ".${p}";
+          if ($cur eq ".${key}" && (my @ckey = @{$collapse{$cur}||[]})) { 
+            # collapsing at this point and on final part
+            my $pos = $collapse_pos{$cur};
+            CK: foreach my $ck (@ckey) {
+              if (!defined $pos->{$ck} || $pos->{$ck} ne $data->{$ck}) {
+                $collapse_pos{$cur} = $data;
+                delete @collapse_pos{ # clear all positioning for sub-entries
+                  grep { m/^\Q${cur}.\E/ } keys %collapse_pos
+                };
+                push(@$target, []);
+                last CK;
+              }
+            }
+          }
+          if (exists $collapse{$cur}) {
+            $target = $target->[-1];
+          }
+        }
+        $target->[0] = $data;
+      } else {
+        $info->[0] = $const->{$key};
+      }
     }
-    @$target = (@final ? @final : [ {}, {} ]);
-      # single empty result to indicate an empty prefetched has_many
   }
 
-  #print "final info: " . Dumper($info);
   return $info;
 }
 

Modified: branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSource.pm
===================================================================
--- branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSource.pm	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/lib/DBIx/Class/ResultSource.pm	2007-05-11 03:18:37 UTC (rev 3297)
@@ -175,7 +175,7 @@
   return $self;
 }
 
-*add_column = \&add_columns;
+sub add_column { shift->add_columns(@_); } # DO NOT CHANGE THIS TO GLOB
 
 =head2 has_column
 
@@ -286,7 +286,7 @@
   $self->_ordered_columns(\@remaining);
 }
 
-*remove_column = \&remove_columns;
+sub remove_column { shift->remove_columns(@_); } # DO NOT CHANGE THIS TO GLOB
 
 =head2 set_primary_key
 
@@ -874,9 +874,13 @@
       $self->throw_exception(
         "Can't prefetch has_many ${pre} (join cond too complex)")
         unless ref($rel_info->{cond}) eq 'HASH';
+      #my @col = map { (/^self\.(.+)$/ ? ("${as_prefix}.$1") : ()); }
+      #              values %{$rel_info->{cond}};
+      $collapse->{".${as_prefix}${pre}"} = [ $rel_source->primary_columns ];
+        # action at a distance. prepending the '.' allows simpler code
+        # in ResultSet->_collapse_result
       my @key = map { (/^foreign\.(.+)$/ ? ($1) : ()); }
                     keys %{$rel_info->{cond}};
-      $collapse->{"${as_prefix}${pre}"} = \@key;
       my @ord = (ref($rel_info->{attrs}{order_by}) eq 'ARRAY'
                    ? @{$rel_info->{attrs}{order_by}}
                    : (defined $rel_info->{attrs}{order_by}
@@ -1034,3 +1038,4 @@
 
 =cut
 
+1;

Modified: branches/DBIx-Class/bulk_create/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- branches/DBIx-Class/bulk_create/lib/DBIx/Class/Storage/DBI.pm	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/lib/DBIx/Class/Storage/DBI.pm	2007-05-11 03:18:37 UTC (rev 3297)
@@ -331,7 +331,12 @@
 
 The arrayref can either contain the same set of arguments one would
 normally pass to L<DBI/connect>, or a lone code reference which returns
-a connected database handle.
+a connected database handle.  Please note that the L<DBI> docs
+recommend that you always explicitly set C<AutoCommit> to either
+C<0> or C<1>.   L<DBIx::Class> further recommends that it be set
+to C<1>, and that you perform transactions via our L</txn_do>
+method.  L<DBIx::Class> will emit a warning if you fail to explicitly
+set C<AutoCommit> one way or the other.  See below for more details.
 
 In either case, if the final argument in your connect_info happens
 to be a hashref, C<connect_info> will look there for several
@@ -390,6 +395,21 @@
 force this setting for you anyways.  Setting HandleError to anything
 other than simple exception object wrapper might cause problems too.
 
+Another Important Note:
+
+DBIC can do some wonderful magic with handling exceptions,
+disconnections, and transactions when you use C<AutoCommit =&gt; 1>
+combined with C<txn_do> for transaction support.
+
+If you set C<AutoCommit =&gt; 0> in your connect info, then you are always
+in an assumed transaction between commits, and you're telling us you'd
+like to manage that manually.  A lot of DBIC's magic protections
+go away.  We can't protect you from exceptions due to database
+disconnects because we don't know anything about how to restart your
+transactions.  You're on your own for handling all sorts of exceptional
+cases if you choose the C<AutoCommit =&gt 0> path, just as you would
+be with raw DBI.
+
 Examples:
 
   # Simple SQLite connection
@@ -404,7 +424,7 @@
       'dbi:Pg:dbname=foo',
       'postgres',
       'my_pg_password',
-      { AutoCommit => 0 },
+      { AutoCommit => 1 },
       { quote_char => q{"}, name_sep => q{.} },
     ]
   );
@@ -415,7 +435,7 @@
       'dbi:Pg:dbname=foo',
       'postgres',
       'my_pg_password',
-      { AutoCommit => 0, quote_char => q{"}, name_sep => q{.} },
+      { AutoCommit => 1, quote_char => q{"}, name_sep => q{.} },
     ]
   );
 
@@ -462,6 +482,18 @@
     pop(@$info) if !keys %$last_info;
   }
 
+  # Now check the (possibly new) final argument for AutoCommit,
+  #  but not in the coderef case, obviously.
+  if(ref $info->[0] ne 'CODE') {
+      $last_info = $info->[3];
+
+      warn "You *really* should explicitly set AutoCommit "
+         . "(preferably to 1) in your db connect info"
+           if !$last_info
+              || ref $last_info ne 'HASH'
+              || !defined $last_info->{AutoCommit};
+  }
+
   $self->_connect_info($info);
 }
 
@@ -688,6 +720,10 @@
   my @info = @{$self->_connect_info || []};
   $self->_dbh($self->_connect(@info));
 
+  # Always set the transaction depth on connect, since
+  #  there is no transaction in progress by definition
+  $self->{transaction_depth} = $self->_dbh->{AutoCommit} ? 0 : 1;
+
   if(ref $self eq 'DBIx::Class::Storage::DBI') {
     my $driver = $self->_dbh->{Driver}->{Name};
     if ($self->load_optional_class("DBIx::Class::Storage::DBI::${driver}")) {
@@ -741,75 +777,61 @@
   $dbh;
 }
 
-sub _dbh_txn_begin {
-  my ($self, $dbh) = @_;
-  if ($dbh->{AutoCommit}) {
+
+sub txn_begin {
+  my $self = shift;
+  if($self->{transaction_depth}++ == 0) {
     $self->debugobj->txn_begin()
-      if ($self->debug);
-    $dbh->begin_work;
+      if $self->debug;
+    # this isn't ->_dbh-> because
+    #  we should reconnect on begin_work
+    #  for AutoCommit users
+    $self->dbh->begin_work;
   }
 }
 
-sub txn_begin {
+sub txn_commit {
   my $self = shift;
-  $self->dbh_do($self->can('_dbh_txn_begin'))
-    if $self->{transaction_depth}++ == 0;
-}
-
-sub _dbh_txn_commit {
-  my ($self, $dbh) = @_;
-  if ($self->{transaction_depth} == 0) {
-    unless ($dbh->{AutoCommit}) {
-      $self->debugobj->txn_commit()
-        if ($self->debug);
-      $dbh->commit;
-    }
+  if ($self->{transaction_depth} == 1) {
+    my $dbh = $self->_dbh;
+    $self->debugobj->txn_commit()
+      if ($self->debug);
+    $dbh->commit;
+    $self->{transaction_depth} = 0
+      if $dbh->{AutoCommit};
   }
-  else {
-    if (--$self->{transaction_depth} == 0) {
-      $self->debugobj->txn_commit()
-        if ($self->debug);
-      $dbh->commit;
-    }
+  elsif($self->{transaction_depth} > 1) {
+    $self->{transaction_depth}--
   }
 }
 
-sub txn_commit {
+sub txn_rollback {
   my $self = shift;
-  $self->dbh_do($self->can('_dbh_txn_commit'));
-}
-
-sub _dbh_txn_rollback {
-  my ($self, $dbh) = @_;
-  if ($self->{transaction_depth} == 0) {
-    unless ($dbh->{AutoCommit}) {
+  my $dbh = $self->_dbh;
+  my $autocommit;
+  eval {
+    $autocommit = $dbh->{AutoCommit};
+    if ($self->{transaction_depth} == 1) {
       $self->debugobj->txn_rollback()
         if ($self->debug);
       $dbh->rollback;
+      $self->{transaction_depth} = 0
+        if $autocommit;
     }
-  }
-  else {
-    if (--$self->{transaction_depth} == 0) {
-      $self->debugobj->txn_rollback()
-        if ($self->debug);
-      $dbh->rollback;
+    elsif($self->{transaction_depth} > 1) {
+      $self->{transaction_depth}--;
     }
     else {
       die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new;
     }
-  }
-}
-
-sub txn_rollback {
-  my $self = shift;
-
-  eval { $self->dbh_do($self->can('_dbh_txn_rollback')) };
+  };
   if ($@) {
     my $error = $@;
     my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
     $error =~ /$exception_class/ and $self->throw_exception($error);
-    $self->{transaction_depth} = 0;          # ensure that a failed rollback
-    $self->throw_exception($error);          # resets the transaction depth
+    # ensure that a failed rollback resets the transaction depth
+    $self->{transaction_depth} = $autocommit ? 0 : 1;
+    $self->throw_exception($error);
   }
 }
 

Modified: branches/DBIx-Class/bulk_create/t/19quotes_newstyle.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/19quotes_newstyle.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/19quotes_newstyle.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -22,7 +22,13 @@
 diag('Testing against ' . join(' ', map { $schema->storage->dbh->get_info($_) } qw/17 18/));
 
 my $dsn = $schema->storage->connect_info->[0];
-$schema->connection($dsn, { quote_char => '`', name_sep => '.' });
+$schema->connection(
+  $dsn,
+  undef,
+  undef,
+  { AutoCommit => 1 },
+  { quote_char => '`', name_sep => '.' },
+);
 
 my $sql = '';
 $schema->storage->debugcb(sub { $sql = $_[1] });
@@ -47,7 +53,12 @@
 eval { $rs->first };
 like($sql, qr/ORDER BY \Q${order}\E/, 'did not quote ORDER BY with scalarref');
 
-$schema->connection($dsn, { quote_char => [qw/[ ]/], name_sep => '.' });
+$schema->connection(
+  $dsn,
+  undef,
+  undef,
+  { AutoCommit => 1, quote_char => [qw/[ ]/], name_sep => '.' }
+);
 $schema->storage->debugcb(sub { $sql = $_[1] });
 $schema->storage->debug(1);
 
@@ -62,7 +73,12 @@
        order => '12'
 );
 
-$schema->connection($dsn, { quote_char => '`', name_sep => '.' });
+$schema->connection(
+  $dsn,
+  undef,
+  undef,
+  { AutoCommit => 1, quote_char => '`', name_sep => '.' }
+);
 
 is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
 

Modified: branches/DBIx-Class/bulk_create/t/76joins.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/76joins.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/76joins.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -408,9 +408,5 @@
 my $prefetch_result = make_hash_struc($art_rs_pr);
 my $nonpre_result   = make_hash_struc($art_rs);
 
-TODO: {
-  local $TODO = 'fixing collapse in -current';
 is_deeply( $prefetch_result, $nonpre_result,
     'Compare 2 level prefetch result to non-prefetch result' );
-}
-

Modified: branches/DBIx-Class/bulk_create/t/83cache.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/83cache.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/83cache.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -12,7 +12,7 @@
 
 eval "use DBD::SQLite";
 plan skip_all => 'needs DBD::SQLite for testing' if $@;
-plan tests => 22;
+plan tests => 23;
 
 my $rs = $schema->resultset("Artist")->search(
   { artistid => 1 }
@@ -158,8 +158,16 @@
   push @objs, $tag->id; #warn "tag: ", $tag->ID;
 }
 
-is_deeply( \@objs, [ 2, 5, 8 ], 'second cd has correct tags' );
+is_deeply( \@objs, [ 1 ], 'second cd has correct tags' );
 
+$tags = $cds->next->tags;
+ at objs = ();
+while( my $tag = $tags->next ) {
+  push @objs, $tag->id; #warn "tag: ", $tag->ID;
+}
+
+is_deeply( \@objs, [ 2, 5, 8 ], 'third cd has correct tags' );
+
 is( $queries, 0, 'no additional SQL statements while checking nested data' );
 
 # start test for prefetch SELECT count

Modified: branches/DBIx-Class/bulk_create/t/89dbicadmin.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/89dbicadmin.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/89dbicadmin.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -23,7 +23,7 @@
 # tests run on windows as well
 
 my $employees = $schema->resultset('Employee');
-my $cmd = qq|$^X script/dbicadmin --schema=DBICTest::Schema --class=Employee --tlibs --connect="['dbi:SQLite:dbname=t/var/DBIxClass.db','','']" --force --tlibs|;
+my $cmd = qq|$^X script/dbicadmin --schema=DBICTest::Schema --class=Employee --tlibs --connect="['dbi:SQLite:dbname=t/var/DBIxClass.db','','',{AutoCommit:1}]" --force --tlibs|;
 
 `$cmd --op=insert --set="{name:'Matt'}"`;
 ok( ($employees->count()==1), 'insert count' );

Modified: branches/DBIx-Class/bulk_create/t/90join_torture.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/90join_torture.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/90join_torture.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -13,7 +13,7 @@
 is($rs1a_results[0]->title, 'Forkful of bees', "bare field conditions okay after search related");
 my $rs1 = $schema->resultset("Artist")->search({ 'tags.tag' => 'Blue' }, { join => {'cds' => 'tracks'}, prefetch => {'cds' => 'tags'} });
 my @artists = $rs1->all;
-cmp_ok(@artists, '==', 1, "Two artists returned");
+cmp_ok(@artists, '==', 2, "Two artists returned");
 
 my $rs2 = $rs1->search({ artistid => '1' }, { join => {'cds' => {'cd_to_producer' => 'producer'} } });
 

Modified: branches/DBIx-Class/bulk_create/t/94versioning.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/94versioning.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/94versioning.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -21,7 +21,12 @@
 mkdir("t/var") unless -d "t/var";
 unlink('t/var/DBICVersion-Schema-1.0-SQLite.sql');
 
-my $schema_orig = DBICVersion::Schema->connect("dbi:SQLite:$db_file");
+my $schema_orig = DBICVersion::Schema->connect(
+  "dbi:SQLite:$db_file",
+  undef,
+  undef,
+  { AutoCommit => 1 },
+);
 # $schema->storage->ensure_connected();
 
 is($schema_orig->ddl_filename('SQLite', 't/var', '1.0'), File::Spec->catfile('t', 'var', 'DBICVersion-Schema-1.0-SQLite.sql'), 'Filename creation working');
@@ -35,7 +40,12 @@
 is($schema_orig->exists($tvrs), 1, 'Created schema from DDL file');
 
 eval "use DBICVersionNew";
-my $schema_new = DBICVersion::Schema->connect("dbi:SQLite:$db_file");
+my $schema_new = DBICVersion::Schema->connect(
+  "dbi:SQLite:$db_file",
+  undef,
+  undef,
+  { AutoCommit => 1 },
+);
 
 unlink('t/var/DBICVersion-Schema-2.0-SQLite.sql');
 unlink('t/var/DBICVersion-Schema-1.0-2.0-SQLite.sql');
@@ -43,7 +53,12 @@
 ok(-f 't/var/DBICVersion-Schema-1.0-2.0-SQLite.sql', 'Created DDL upgrade file');
 
 ## create new to pick up filedata for upgrade files we just made (on_connect)
-my $schema_upgrade = DBICVersion::Schema->connect("dbi:SQLite:$db_file");
+my $schema_upgrade = DBICVersion::Schema->connect(
+  "dbi:SQLite:$db_file",
+  undef,
+  undef,
+  { AutoCommit => 1 },
+);
 
 ## do this here or let Versioned.pm do it?
 $schema_upgrade->upgrade();

Modified: branches/DBIx-Class/bulk_create/t/bindtype_columns.t
===================================================================
--- branches/DBIx-Class/bulk_create/t/bindtype_columns.t	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/bindtype_columns.t	2007-05-11 03:18:37 UTC (rev 3297)
@@ -12,7 +12,7 @@
   
 plan tests => 3;
 
-my $schema = DBICTest::Schema->connection($dsn, $dbuser, $dbpass);
+my $schema = DBICTest::Schema->connection($dsn, $dbuser, $dbpass, { AutoCommit => 1 });
 
 my $dbh = $schema->storage->dbh;
 

Modified: branches/DBIx-Class/bulk_create/t/lib/DBICTest/Plain.pm
===================================================================
--- branches/DBIx-Class/bulk_create/t/lib/DBICTest/Plain.pm	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/lib/DBICTest/Plain.pm	2007-05-11 03:18:37 UTC (rev 3297)
@@ -15,7 +15,13 @@
 my $dsn = "dbi:SQLite:${db_file}";
 
 __PACKAGE__->load_classes("Test");
-my $schema = __PACKAGE__->compose_connection(__PACKAGE__, $dsn);
+my $schema = __PACKAGE__->compose_connection(
+  __PACKAGE__,
+  $dsn,
+  undef,
+  undef,
+  { AutoCommit => 1 }
+);
 
 my $dbh = DBI->connect($dsn);
 

Modified: branches/DBIx-Class/bulk_create/t/lib/DBICTest.pm
===================================================================
--- branches/DBIx-Class/bulk_create/t/lib/DBICTest.pm	2007-05-11 03:03:40 UTC (rev 3296)
+++ branches/DBIx-Class/bulk_create/t/lib/DBICTest.pm	2007-05-11 03:18:37 UTC (rev 3297)
@@ -60,7 +60,7 @@
                            : 'compose_namespace');
 
     my $schema = DBICTest::Schema->$compose_method('DBICTest')
-                                 ->connect($dsn, $dbuser, $dbpass);
+                     ->connect($dsn, $dbuser, $dbpass, { AutoCommit => 1 });
     $schema->storage->on_connect_do(['PRAGMA synchronous = OFF']);
     if ( !$args{no_deploy} ) {
         __PACKAGE__->deploy_schema( $schema );




More information about the Bast-commits mailing list