[Bast-commits] r4039 - in DBIx-Class/0.09/trunk: . lib/DBIx lib/DBIx/Class lib/DBIx/Class/InflateColumn lib/DBIx/Class/Manual lib/DBIx/Class/Relationship lib/DBIx/Class/ResultClass lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI lib/DBIx/Class/Storage/DBI/ODBC lib/SQL/Translator/Parser/DBIx script t t/lib/DBICTest t/lib/DBICTest/Schema

ash at dev.catalyst.perl.org ash at dev.catalyst.perl.org
Thu Feb 7 11:29:23 GMT 2008


Author: ash
Date: 2008-02-07 11:29:23 +0000 (Thu, 07 Feb 2008)
New Revision: 4039

Added:
   DBIx-Class/0.09/trunk/lib/DBIx/Class/StartupCheck.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
   DBIx-Class/0.09/trunk/t/746mssql.t
   DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/EventTZ.pm
   DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/ForceForeign.pm
Modified:
   DBIx-Class/0.09/trunk/
   DBIx-Class/0.09/trunk/Changes
   DBIx-Class/0.09/trunk/Makefile.PL
   DBIx-Class/0.09/trunk/lib/DBIx/Class.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn/DateTime.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Cookbook.pod
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/FAQ.pod
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Intro.pod
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Joining.pod
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Relationship/BelongsTo.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultClass/HashRefInflator.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSet.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSource.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSourceHandle.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Row.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Schema.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/Replication.pm
   DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/Statistics.pm
   DBIx-Class/0.09/trunk/lib/SQL/Translator/Parser/DBIx/Class.pm
   DBIx-Class/0.09/trunk/script/dbicadmin
   DBIx-Class/0.09/trunk/t/68inflate.t
   DBIx-Class/0.09/trunk/t/73oracle_inflate.t
   DBIx-Class/0.09/trunk/t/746db2_400.t
   DBIx-Class/0.09/trunk/t/76joins.t
   DBIx-Class/0.09/trunk/t/84serialize.t
   DBIx-Class/0.09/trunk/t/86sqlt.t
   DBIx-Class/0.09/trunk/t/89dbicadmin.t
   DBIx-Class/0.09/trunk/t/89inflate_datetime.t
   DBIx-Class/0.09/trunk/t/90join_torture.t
   DBIx-Class/0.09/trunk/t/91merge_attr.t
   DBIx-Class/0.09/trunk/t/96multi_create.t
   DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema.pm
   DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/Artist.pm
Log:
 r4961 at proteus (orig r3809):  ash | 2007-10-07 22:50:33 +0100
 Timezone support for InflateColumn::DateTime (sergio)
 r4977 at proteus (orig r3813):  ash | 2007-10-12 11:26:55 +0100
 Add an add_index method on ResultSource (and proxy classes)
 r4978 at proteus (orig r3814):  ash | 2007-10-12 11:32:41 +0100
 s/indices/indexes/ to keep Jess happy.
 r4982 at proteus (orig r3815):  ash | 2007-10-12 18:46:55 +0100
 Remove add_index and replace it with sqlt_deploy_hook
 r4983 at proteus (orig r3816):  ash | 2007-10-12 19:20:43 +0100
 Test sqlt_deploy_hook on the Schema level
 r4984 at proteus (orig r3817):  ash | 2007-10-13 13:33:33 +0100
 Add docs about sqlt_deploy_hook, and reorganise/reorder the Cookbook
 r5117 at proteus (orig r3821):  nigel | 2007-10-17 16:59:12 +0100
 Added reference to RH perl bug at appropriate points
 r5120 at proteus (orig r3824):  ash | 2007-10-20 12:01:07 +0100
 Added startup checks to warn loudly if we appear to be running on RedHat systems from perl-5.8.8-10 and up that have the bless/overload patch applied (badly) which causes 2x -> 100x performance penalty.  (Jon Schutz)
 r5166 at proteus (orig r3825):  castaway | 2007-10-21 12:46:56 +0100
 Documentation clarifications
 
 r5170 at proteus (orig r3829):  captainL | 2007-10-22 20:45:46 +0100
 fixed problem with duplicate related objects for Row::new/insert
 r5181 at proteus (orig r3840):  bert | 2007-10-25 11:04:42 +0100
  r10510 at beetle:  bert | 2007-10-25 11:22:07 +0200
  Merged with replication branch (fixed wrong object-function calls preventing set_schema to work rev3823). Added myself to contributors.
 
 r5182 at proteus (orig r3841):  captainL | 2007-10-26 11:53:26 +0100
 the checks in reverse_relationship_info are less ambiguous
 r5183 at proteus (orig r3842):  ash | 2007-10-27 18:53:39 +0100
 Reorder Changes file. (New changes go at bottom of block please!)
 r5185 at proteus (orig r3843):  tomboh | 2007-10-29 17:19:01 +0000
 Improve Documentation.
 
 r5186 at proteus (orig r3844):  captainL | 2007-10-29 20:26:02 +0000
 fixed _merge_attr bug
 r5187 at proteus (orig r3845):  captainL | 2007-10-29 20:28:45 +0000
 spelt Zby's name properly
 r8126 at proteus (orig r3852):  castaway | 2007-11-03 02:17:11 +0000
 Improve inflatecolumn docs
 
 r9392 at proteus (orig r3853):  matthewt | 2007-11-06 14:53:48 +0000
 make belongs_to accept an [] join cond
 r9632 at proteus (orig r3872):  castaway | 2007-11-12 21:13:33 +0000
 Added cookbook recipe for using dual, thanks Richard
 
 r9764 at proteus (orig r3879):  ash | 2007-11-15 12:49:53 +0000
 Fix is_foreign_key_constraint - thanks Jon Schutz
 r9766 at proteus (orig r3880):  ash | 2007-11-15 13:02:15 +0000
 Adding missing file
 r9767 at proteus (orig r3881):  ash | 2007-11-15 13:52:58 +0000
 Fix t/82cascade_copy.t
 r9812 at proteus (orig r3883):  tomboh | 2007-11-15 15:05:12 +0000
 Remove an unneeded requirement.
 
 r9814 at proteus (orig r3885):  ash | 2007-11-16 14:30:51 +0000
 Version bump
 r9896 at proteus (orig r3886):  ash | 2007-11-19 18:11:53 +0000
 Fix END block
 r11311 at proteus (orig r3887):  wreis | 2007-11-21 13:57:35 +0000
 minor fixes for ResultSet docs
 r11312 at proteus (orig r3888):  ash | 2007-11-22 15:27:23 +0000
 Fix mistakes
 r11454 at proteus (orig r3889):  ash | 2007-11-24 21:24:53 +0000
 Sort tables for consistent output
 r11889 at proteus (orig r3897):  wreis | 2007-12-11 01:29:51 +0000
 minor doc fix
 r12746 at proteus (orig r3901):  ash | 2007-12-20 11:02:15 +0000
 Add proper thaw hooks so schema gets re-attached
 r13156 at proteus (orig r3908):  perigrin | 2008-01-02 20:52:13 +0000
 move dbicadmin to JSON::Any
 r13157 at proteus (orig r3909):  semifor | 2008-01-02 22:24:23 +0000
 Added Storage::DBI subclass for MSSQL auto PK over ODBC.
 
 r13158 at proteus (orig r3910):  nothingmuch | 2008-01-03 13:18:36 +0000
 failing test for inflate not being triggerred with copy()
 r13159 at proteus (orig r3911):  nothingmuch | 2008-01-03 13:31:49 +0000
 test plan
 r13160 at proteus (orig r3912):  nothingmuch | 2008-01-03 13:35:20 +0000
 introduce set_inflated_columns
 r13161 at proteus (orig r3913):  nothingmuch | 2008-01-03 13:36:07 +0000
 pod coverage for Storage::DBI::ODBC::Microsoft_SQL_Server
 r13162 at proteus (orig r3914):  nothingmuch | 2008-01-03 13:53:00 +0000
 dbicadmin printed even when quiet
 r13163 at proteus (orig r3915):  nothingmuch | 2008-01-03 13:54:38 +0000
 make the dbicadmin test portable to JSON modules that do not support single quotes and bare strings, even on windaz
 r13164 at proteus (orig r3916):  nothingmuch | 2008-01-03 14:50:52 +0000
 changelog
 r13166 at proteus (orig r3918):  tomboh | 2008-01-07 15:23:15 +0000
 Fix class name typo
 
 r13173 at proteus (orig r3925):  nigel | 2008-01-11 12:55:17 +0000
 Copied documentation for id & discard_charges methods from Pk.pm
 into Row.pm as few people think to look in Pk.pm.  Put reference
 back to original source.
 
 r13312 at proteus (orig r3942):  schwern | 2008-01-16 10:25:36 +0000
 Declare dep on JSON::Any rather than JSON.
 
 No reason JSON::Any should prefer JSON.pm over it's default ordering.
 r13313 at proteus (orig r3943):  schwern | 2008-01-16 10:26:26 +0000
 And fix the check in the dbicadmin test to look for JSON::Any.
 r13867 at proteus (orig r3954):  matthewt | 2008-01-18 13:03:08 +0000
 added strict and warnings to HashRefInflator, fixed inflation for empty has_many rels
 r13868 at proteus (orig r3955):  matthewt | 2008-01-20 12:28:55 +0000
 made search_rs smarter about when to preserve the cache to fix mm prefetch usage
 r13878 at proteus (orig r3965):  semifor | 2008-01-22 15:13:11 +0000
 Added build_datetime_parser method for MSSQL over ODBC.
 r13883 at proteus (orig r3970):  castaway | 2008-01-24 13:19:52 +0000
 Oops, fix joining manual to be correct
 
 r13884 at proteus (orig r3971):  castaway | 2008-01-24 23:22:49 +0000
 Version 0.08009
 
 r13885 at proteus (orig r3972):  castaway | 2008-01-24 23:36:59 +0000
 0.08009 released
 
 r13888 at proteus (orig r3975):  tomboh | 2008-01-25 17:20:38 +0000
 Fix a typo and a couple of links.
 
 r14018 at proteus (orig r4024):  oyse | 2008-02-05 08:42:32 +0000
 Added ?\195?\152ystein Torget to the list of contributers



Property changes on: DBIx-Class/0.09/trunk
___________________________________________________________________
Name: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:4024
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510

Modified: DBIx-Class/0.09/trunk/Changes
===================================================================
--- DBIx-Class/0.09/trunk/Changes	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/Changes	2008-02-07 11:29:23 UTC (rev 4039)
@@ -1,11 +1,36 @@
 Revision history for DBIx::Class
 
+0.08009 2008-01-20 13:30
+        - Made search_rs smarter about when to preserve the cache to fix
+          mm prefetch usage
+        - Added Storage::DBI subclass for MSSQL over ODBC. 
+        - Added freeze, thaw and dclone methods to Schema so that thawed
+          objects will get re-attached to the schema.
+        - Moved dbicadmin to JSON::Any wrapped JSON.pm for a sane API
+        - introduced DBIx::Class::set_inflated_columns
+        - DBIx::Class::Row::copy uses set_inflated_columns
+
+0.08008 2007-11-16 14:30:00
+        - Fixed join merging bug (test from Zby)
         - When adding relationships, it will throw an exception if you get the
           foreign and self parts the wrong way round in the condition
         - ResultSetColumn::func() now returns all results if called in list
           context; this makes things like func('DISTINCT') work as expected
         - Many-to-many relationships now warn if the utility methods would 
           clash
+        - InflateColumn::DateTime now accepts an extra parameter of timezone
+          to set timezone on the DT object (thanks Sergio Salvi)
+        - Added sqlt_deploy_hook to result classes so that indexes can be 
+          added.
+        - Added startup checks to warn loudly if we appear to be running on 
+          RedHat systems from perl-5.8.8-10 and up that have the bless/overload
+          patch applied (badly) which causes 2x -> 100x performance penalty.
+          (Jon Schutz)
+        - ResultSource::reverse_relationship_info can distinguish between 
+          sources using the same table
+        - Row::insert will now not fall over if passed duplicate related objects
+        - Row::copy will not fall over if you have two relationships to the 
+          same source with a unique constraint on it
 
 0.08007 2007-09-04 19:36:00
         - patch for Oracle datetime inflation (abram at arin.net)

Modified: DBIx-Class/0.09/trunk/Makefile.PL
===================================================================
--- DBIx-Class/0.09/trunk/Makefile.PL	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/Makefile.PL	2008-02-07 11:29:23 UTC (rev 4039)
@@ -16,7 +16,7 @@
 requires 'Module::Find'              => 0;
 requires 'Class::Inspector'          => 0;
 requires 'Class::Accessor::Grouped'  => 0.05002;
-requires 'JSON'                      => 1.00; 
+requires 'JSON::Any'                 => 1.00; 
 requires 'Scope::Guard'              => 0.03;
 
 # Perl 5.8.0 doesn't have utf8::is_utf8()

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn/DateTime.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn/DateTime.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn/DateTime.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -24,6 +24,12 @@
   print "This event starts the month of ".
     $event->starts_when->month_name();
 
+If you want to set a specific timezone for that field, use:
+
+  __PACKAGE__->add_columns(
+    starts_when => { data_type => 'datetime', extra => { timezone => "America/Chicago" } }
+  );
+
 =head1 DESCRIPTION
 
 This module figures out the type of DateTime::Format::* class to 
@@ -55,6 +61,11 @@
   return unless defined($info->{data_type});
   my $type = lc($info->{data_type});
   $type = 'datetime' if ($type =~ /^timestamp/);
+  my $timezone;
+  if ( exists $info->{extra} and exists $info->{extra}{timezone} and defined $info->{extra}{timezone} ) {
+    $timezone = $info->{extra}{timezone};
+  }
+
   if ($type eq 'datetime' || $type eq 'date') {
     my ($parse, $format) = ("parse_${type}", "format_${type}");
     $self->inflate_column(
@@ -62,10 +73,13 @@
         {
           inflate => sub {
             my ($value, $obj) = @_;
-            $obj->_datetime_parser->$parse($value);
+            my $dt = $obj->_datetime_parser->$parse($value);
+            $dt->set_time_zone($timezone) if $timezone;
+            return $dt;
           },
           deflate => sub {
             my ($value, $obj) = @_;
+            $value->set_time_zone($timezone) if $timezone;
             $obj->_datetime_parser->$format($value);
           },
         }

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/InflateColumn.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -25,13 +25,20 @@
 for the database.
 
 It can be used, for example, to automatically convert to and from
-L<DateTime> objects for your date and time fields.
+L<DateTime> objects for your date and time fields. There's a
+conveniece component to actually do that though, try
+L<DBIx::Class::InflateColumn::DateTime>.
 
-It will accept arrayrefs, hashrefs and blessed references (objects),
-but not scalarrefs. Scalar references are passed through to the
-database to deal with, to allow such settings as C< \'year + 1'> and
-C< \'DEFAULT' > to work.
+It will handle all types of references except scalar references. It
+will not handle scalar values, these are ignored and thus passed
+through to L<SQL::Abstract>. This is to allow setting raw values to
+"just work". Scalar references are passed through to the database to
+deal with, to allow such settings as C< \'year + 1'> and C< \'DEFAULT' >
+to work.
 
+If you want to filter plain scalar values and replace them with
+something else, contribute a filtering component.
+
 =head1 METHODS
 
 =head2 inflate_column
@@ -57,8 +64,7 @@
 
 The coderefs you set for inflate and deflate are called with two parameters,
 the first is the value of the column to be inflated/deflated, the second is the
-row object itself. Thus you can call C<< ->result_source->schema->storage->dbh >> on
-it, to feed to L<DateTime::Format::DBI>.
+row object itself. Thus you can call C<< ->result_source->schema->storage->dbh >> in your inflate/defalte subs, to feed to L<DateTime::Format::DBI>.
 
 In this example, calls to an event's C<insert_time> accessor return a
 L<DateTime> object. This L<DateTime> object is later "deflated" when

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Cookbook.pod	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Cookbook.pod	2008-02-07 11:29:23 UTC (rev 4039)
@@ -2,12 +2,10 @@
 
 DBIx::Class::Manual::Cookbook - Miscellaneous recipes
 
-=head1 RECIPES
+=head1 SEARCHING
 
-=head2 Searching
+=head2 Paged results
 
-=head3 Paged results
-
 When you expect a large number of results, you can ask L<DBIx::Class> for a
 paged resultset, which will fetch only a defined number of records at a time:
 
@@ -37,7 +35,7 @@
 
   return $rs->pager();
 
-=head3 Complex WHERE clauses
+=head2 Complex WHERE clauses
 
 Sometimes you need to formulate a query using specific operators:
 
@@ -70,7 +68,7 @@
 For more information on generating complex queries, see
 L<SQL::Abstract/WHERE CLAUSES>.
 
-=head3 Arbitrary SQL through a custom ResultSource
+=head2 Arbitrary SQL through a custom ResultSource
 
 Sometimes you have to run arbitrary SQL because your query is too complex
 (e.g. it contains Unions, Sub-Selects, Stored Procedures, etc.) or has to
@@ -120,7 +118,7 @@
   
 ... and you'll get back a perfect L<DBIx::Class::ResultSet>.
 
-=head3 Using specific columns
+=head2 Using specific columns
 
 When you only want specific columns from a table, you can use
 C<columns> to specify which ones you need. This is useful to avoid
@@ -140,7 +138,7 @@
 This is a shortcut for C<select> and C<as>, see below. C<columns>
 cannot be used together with C<select> and C<as>.
 
-=head3 Using database functions or stored procedures
+=head2 Using database functions or stored procedures
 
 The combination of C<select> and C<as> can be used to return the result of a
 database function or stored procedure as a column value. You use C<select> to
@@ -184,7 +182,7 @@
   # Or use DBIx::Class::AccessorGroup:
   __PACKAGE__->mk_group_accessors('column' => 'name_length');
 
-=head3 SELECT DISTINCT with multiple columns
+=head2 SELECT DISTINCT with multiple columns
 
   my $rs = $schema->resultset('Foo')->search(
     {},
@@ -198,7 +196,7 @@
 
   my $count = $rs->next->get_column('count');
 
-=head3 SELECT COUNT(DISTINCT colname)
+=head2 SELECT COUNT(DISTINCT colname)
 
   my $rs = $schema->resultset('Foo')->search(
     {},
@@ -210,7 +208,7 @@
     }
   );
 
-=head3 Grouping results
+=head2 Grouping results
 
 L<DBIx::Class> supports C<GROUP BY> as follows:
 
@@ -233,7 +231,7 @@
 are in any way unsure about the use of the attributes above (C< join
 >, C< select >, C< as > and C< group_by >).
 
-=head3 Predefined searches
+=head2 Predefined searches
 
 You can write your own L<DBIx::Class::ResultSet> class by inheriting from it
 and define often used searches as methods:
@@ -263,7 +261,7 @@
 
    my $ordered_cds = $schema->resultset('CD')->search_cds_ordered();
 
-=head3 Using SQL functions on the left hand side of a comparison
+=head2 Using SQL functions on the left hand side of a comparison
 
 Using SQL functions on the left hand side of a comparison is generally
 not a good idea since it requires a scan of the entire table.  However,
@@ -294,6 +292,8 @@
 
 =end hidden
 
+=head1 JOINS AND PREFETCHING
+
 =head2 Using joins and prefetch
 
 You can use the C<join> attribute to allow searching on, or sorting your
@@ -392,7 +392,7 @@
 definitely use data from a related table. Pre-fetching related tables when you
 only need columns from the main table will make performance worse!
 
-=head3 Multi-step joins
+=head2 Multi-step joins
 
 Sometimes you want to join more than one relationship deep. In this example,
 we want to find all C<Artist> objects who have C<CD>s whose C<LinerNotes>
@@ -473,8 +473,249 @@
   my $tag = $rs->first;
   print $tag->cd->artist->name;
 
-=head2 Columns of data
+=head1 ROW-LEVEL OPERATIONS
 
+=head2 Retrieving a row object's Schema
+
+It is possible to get a Schema object from a row object like so:
+
+  my $schema = $cd->result_source->schema;
+  # use the schema as normal:
+  my $artist_rs = $schema->resultset('Artist'); 
+
+This can be useful when you don't want to pass around a Schema object to every
+method.
+
+=head2 Getting the value of the primary key for the last database insert
+
+AKA getting last_insert_id
+
+If you are using PK::Auto (which is a core component as of 0.07), this is 
+straightforward:
+
+  my $foo = $rs->create(\%blah);
+  # do more stuff
+  my $id = $foo->id; # foo->my_primary_key_field will also work.
+
+If you are not using autoincrementing primary keys, this will probably
+not work, but then you already know the value of the last primary key anyway.
+
+=head2 Stringification
+
+Employ the standard stringification technique by using the C<overload>
+module.
+
+To make an object stringify itself as a single column, use something
+like this (replace C<foo> with the column/method of your choice):
+
+  use overload '""' => sub { shift->name}, fallback => 1;
+
+For more complex stringification, you can use an anonymous subroutine:
+
+  use overload '""' => sub { $_[0]->name . ", " .
+                             $_[0]->address }, fallback => 1;
+
+=head3 Stringification Example
+
+Suppose we have two tables: C<Product> and C<Category>. The table
+specifications are:
+
+  Product(id, Description, category)
+  Category(id, Description)
+
+C<category> is a foreign key into the Category table.
+
+If you have a Product object C<$obj> and write something like
+
+  print $obj->category
+
+things will not work as expected.
+
+To obtain, for example, the category description, you should add this
+method to the class defining the Category table:
+
+  use overload "" => sub {
+      my $self = shift;
+
+      return $self->Description;
+  }, fallback => 1;
+
+=head2 Want to know if find_or_create found or created a row?
+
+Just use C<find_or_new> instead, then check C<in_storage>:
+
+  my $obj = $rs->find_or_new({ blah => 'blarg' });
+  unless ($obj->in_storage) {
+    $obj->insert;
+    # do whatever else you wanted if it was a new row
+  }
+
+=head2 Dynamic Sub-classing DBIx::Class proxy classes 
+
+AKA multi-class object inflation from one table
+ 
+L<DBIx::Class> classes are proxy classes, therefore some different
+techniques need to be employed for more than basic subclassing.  In
+this example we have a single user table that carries a boolean bit
+for admin.  We would like like to give the admin users
+objects(L<DBIx::Class::Row>) the same methods as a regular user but
+also special admin only methods.  It doesn't make sense to create two
+seperate proxy-class files for this.  We would be copying all the user
+methods into the Admin class.  There is a cleaner way to accomplish
+this.
+
+Overriding the C<inflate_result> method within the User proxy-class
+gives us the effect we want.  This method is called by
+L<DBIx::Class::ResultSet> when inflating a result from storage.  So we
+grab the object being returned, inspect the values we are looking for,
+bless it if it's an admin object, and then return it.  See the example
+below:
+ 
+B<Schema Definition> 
+ 
+    package DB::Schema; 
+     
+    use base qw/DBIx::Class::Schema/; 
+ 
+    __PACKAGE__->load_classes(qw/User/); 
+ 
+ 
+B<Proxy-Class definitions> 
+ 
+    package DB::Schema::User; 
+     
+    use strict; 
+    use warnings; 
+    use base qw/DBIx::Class/; 
+     
+    ### Defined what our admin class is for ensure_class_loaded 
+    my $admin_class = __PACKAGE__ . '::Admin'; 
+     
+    __PACKAGE__->load_components(qw/Core/); 
+     
+    __PACKAGE__->table('users'); 
+     
+    __PACKAGE__->add_columns(qw/user_id   email    password  
+                                firstname lastname active 
+                                admin/); 
+     
+    __PACKAGE__->set_primary_key('user_id'); 
+     
+    sub inflate_result { 
+        my $self = shift;  
+        my $ret = $self->next::method(@_); 
+        if( $ret->admin ) {### If this is an admin rebless for extra functions  
+            $self->ensure_class_loaded( $admin_class ); 
+            bless $ret, $admin_class; 
+        } 
+        return $ret; 
+    } 
+     
+    sub hello { 
+        print "I am a regular user.\n"; 
+        return ; 
+    } 
+     
+     
+    package DB::Schema::User::Admin; 
+     
+    use strict; 
+    use warnings; 
+    use base qw/DB::Schema::User/; 
+     
+    sub hello 
+    { 
+        print "I am an admin.\n"; 
+        return; 
+    } 
+     
+    sub do_admin_stuff 
+    { 
+        print "I am doing admin stuff\n"; 
+        return ; 
+    } 
+ 
+B<Test File> test.pl 
+ 
+    use warnings; 
+    use strict; 
+    use DB::Schema; 
+     
+    my $user_data = { email    => 'someguy at place.com',  
+                      password => 'pass1',  
+                      admin    => 0 }; 
+                           
+    my $admin_data = { email    => 'someadmin at adminplace.com',  
+                       password => 'pass2',  
+                       admin    => 1 }; 
+                           
+    my $schema = DB::Schema->connection('dbi:Pg:dbname=test'); 
+     
+    $schema->resultset('User')->create( $user_data ); 
+    $schema->resultset('User')->create( $admin_data ); 
+     
+    ### Now we search for them 
+    my $user = $schema->resultset('User')->single( $user_data ); 
+    my $admin = $schema->resultset('User')->single( $admin_data ); 
+     
+    print ref $user, "\n"; 
+    print ref $admin, "\n"; 
+     
+    print $user->password , "\n"; # pass1 
+    print $admin->password , "\n";# pass2; inherited from User 
+    print $user->hello , "\n";# I am a regular user. 
+    print $admin->hello, "\n";# I am an admin. 
+ 
+    ### The statement below will NOT print 
+    print "I can do admin stuff\n" if $user->can('do_admin_stuff'); 
+    ### The statement below will print 
+    print "I can do admin stuff\n" if $admin->can('do_admin_stuff'); 
+
+=head2 Skip object creation for faster results
+
+DBIx::Class is not built for speed, it's built for convenience and
+ease of use, but sometimes you just need to get the data, and skip the
+fancy objects.
+  
+To do this simply use L<DBIx::Class::ResultClass::HashRefInflator>.
+  
+ my $rs = $schema->resultset('CD');
+ 
+ $rs->result_class('DBIx::Class::ResultClass::HashRefInflator');
+ 
+ my $hash_ref = $rs->find(1);
+  
+Wasn't that easy?
+  
+=head2 Get raw data for blindingly fast results
+
+If the L<HashRefInflator|DBIx::Class::ResultClass::HashRefInflator> solution
+above is not fast enough for you, you can use a DBIx::Class to return values
+exactly as they come out of the data base with none of the convenience methods
+wrapped round them.
+
+This is used like so:-
+
+  my $cursor = $rs->cursor
+  while (my @vals = $cursor->next) {
+      # use $val[0..n] here
+  }
+
+You will need to map the array offsets to particular columns (you can
+use the I<select> attribute of C<search()> to force ordering).
+
+=head1 RESULTSET OPERATIONS
+
+=head2 Getting Schema from a ResultSet
+
+To get the schema object from a result set, do the following:
+
+ $rs->result_source->schema
+
+=head2 Getting Columns Of Data
+
+AKA Aggregating Data
+
 If you want to find the sum of a particular column there are several
 ways, the obvious one is to use search:
 
@@ -522,37 +763,70 @@
 Which will of course only work if your database supports this function.
 See L<DBIx::Class::ResultSetColumn> for more documentation.
 
-=head2 Using relationships
+=head1 USING RELATIONSHIPS
 
-=head3 Create a new row in a related table
+=head2 Create a new row in a related table
 
-  my $book->create_related('author', { name => 'Fred'});
+  my $author = $book->create_related('author', { name => 'Fred'});
 
-=head3 Search in a related table
+=head2 Search in a related table
 
 Only searches for books named 'Titanic' by the author in $author.
 
-  my $author->search_related('books', { name => 'Titanic' });
+  my $books_rs = $author->search_related('books', { name => 'Titanic' });
 
-=head3 Delete data in a related table
+=head2 Delete data in a related table
 
 Deletes only the book named Titanic by the author in $author.
 
-  my $author->delete_related('books', { name => 'Titanic' });
+  $author->delete_related('books', { name => 'Titanic' });
 
-=head3 Ordering a relationship result set
+=head2 Ordering a relationship result set
 
 If you always want a relation to be ordered, you can specify this when you 
 create the relationship.
 
-To order C<< $book->pages >> by descending page_number.
+To order C<< $book->pages >> by descending page_number, create the relation
+as follows:
 
-  Book->has_many('pages' => 'Page', 'book', { order_by => \'page_number DESC'} );
+  __PACKAGE__->has_many('pages' => 'Page', 'book', { order_by => \'page_number DESC'} );
 
+=head2 Many-to-many relationships
 
+This is straightforward using L<ManyToMany|DBIx::Class::Relationship/many_to_many>:
 
-=head2 Transactions
+  package My::User;
+  use base 'DBIx::Class';
+  __PACKAGE__->load_components('Core');
+  __PACKAGE__->table('user');
+  __PACKAGE__->add_columns(qw/id name/);
+  __PACKAGE__->set_primary_key('id');
+  __PACKAGE__->has_many('user_address' => 'My::UserAddress', 'user');
+  __PACKAGE__->many_to_many('addresses' => 'user_address', 'address');
 
+  package My::UserAddress;
+  use base 'DBIx::Class';
+  __PACKAGE__->load_components('Core');
+  __PACKAGE__->table('user_address');
+  __PACKAGE__->add_columns(qw/user address/);
+  __PACKAGE__->set_primary_key(qw/user address/);
+  __PACKAGE__->belongs_to('user' => 'My::User');
+  __PACKAGE__->belongs_to('address' => 'My::Address');
+
+  package My::Address;
+  use base 'DBIx::Class';
+  __PACKAGE__->load_components('Core');
+  __PACKAGE__->table('address');
+  __PACKAGE__->add_columns(qw/id street town area_code country/);
+  __PACKAGE__->set_primary_key('id');
+  __PACKAGE__->has_many('user_address' => 'My::UserAddress', 'address');
+  __PACKAGE__->many_to_many('users' => 'user_address', 'user');
+
+  $rs = $user->addresses(); # get all addresses for a user
+  $rs = $address->users(); # get all users for an address
+
+=head1 TRANSACTIONS
+
 As of version 0.04001, there is improved transaction support in
 L<DBIx::Class::Storage> and L<DBIx::Class::Schema>.  Here is an
 example of the recommended way to use it:
@@ -568,7 +842,7 @@
     $genus->add_to_species({ name => 'troglodyte' });
     $genus->wings(2);
     $genus->update;
-    $schema->txn_do($coderef2); # Can have a nested transaction
+    $schema->txn_do($coderef2); # Can have a nested transaction. Only the outer will actualy commit
     return $genus->species;
   };
 
@@ -591,119 +865,24 @@
 transactions (for databases that support them) will hopefully be added
 in the future.
 
-=head2 Many-to-many relationships
+=head1 SQL 
 
-This is straightforward using L<ManyToMany|DBIx::Class::Relationship/many_to_many>:
+=head2 Creating Schemas From An Existing Database
 
-  package My::DB;
-  # ... set up connection ...
+L<DBIx::Class::Schema::Loader> will connect to a database and create a 
+L<DBIx::Class::Schema> and associated sources by examining the database.
 
-  package My::User;
-  use base 'My::DB';
-  __PACKAGE__->table('user');
-  __PACKAGE__->add_columns(qw/id name/);
-  __PACKAGE__->set_primary_key('id');
-  __PACKAGE__->has_many('user_address' => 'My::UserAddress', 'user');
-  __PACKAGE__->many_to_many('addresses' => 'user_address', 'address');
+The recommend way of achieving this is to use the 
+L<make_schema_at|DBIx::Class::Schema::Loader/make_schema_at> method:
 
-  package My::UserAddress;
-  use base 'My::DB';
-  __PACKAGE__->table('user_address');
-  __PACKAGE__->add_columns(qw/user address/);
-  __PACKAGE__->set_primary_key(qw/user address/);
-  __PACKAGE__->belongs_to('user' => 'My::User');
-  __PACKAGE__->belongs_to('address' => 'My::Address');
+  perl -MDBIx::Class::Schema::Loader=make_schema_at,dump_to_dir:./lib \
+    -e 'make_schema_at("My::Schema", { debug => 1 }, [ "dbi:Pg:dbname=foo","postgres" ])'
 
-  package My::Address;
-  use base 'My::DB';
-  __PACKAGE__->table('address');
-  __PACKAGE__->add_columns(qw/id street town area_code country/);
-  __PACKAGE__->set_primary_key('id');
-  __PACKAGE__->has_many('user_address' => 'My::UserAddress', 'address');
-  __PACKAGE__->many_to_many('users' => 'user_address', 'user');
+This will create a tree of files rooted at C<./lib/My/Schema/> containing
+source definitions for all the tables found in the C<foo> database.
 
-  $rs = $user->addresses(); # get all addresses for a user
-  $rs = $address->users(); # get all users for an address
+=head2 Creating DDL SQL
 
-=head2 Setting default values for a row
-
-It's as simple as overriding the C<new> method.  Note the use of
-C<next::method>.
-
-  sub new {
-    my ( $class, $attrs ) = @_;
-
-    $attrs->{foo} = 'bar' unless defined $attrs->{foo};
-
-    my $new = $class->next::method($attrs);
-
-    return $new;
-  }
-
-For more information about C<next::method>, look in the L<Class::C3> 
-documentation. See also L<DBIx::Class::Manual::Component> for more
-ways to write your own base classes to do this.
-
-People looking for ways to do "triggers" with DBIx::Class are probably
-just looking for this. 
-
-=head2 Stringification
-
-Employ the standard stringification technique by using the C<overload>
-module.
-
-To make an object stringify itself as a single column, use something
-like this (replace C<foo> with the column/method of your choice):
-
-  use overload '""' => sub { shift->name}, fallback => 1;
-
-For more complex stringification, you can use an anonymous subroutine:
-
-  use overload '""' => sub { $_[0]->name . ", " .
-                             $_[0]->address }, fallback => 1;
-
-=head3 Stringification Example
-
-Suppose we have two tables: C<Product> and C<Category>. The table
-specifications are:
-
-  Product(id, Description, category)
-  Category(id, Description)
-
-C<category> is a foreign key into the Category table.
-
-If you have a Product object C<$obj> and write something like
-
-  print $obj->category
-
-things will not work as expected.
-
-To obtain, for example, the category description, you should add this
-method to the class defining the Category table:
-
-  use overload "" => sub {
-      my $self = shift;
-
-      return $self->Description;
-  }, fallback => 1;
-
-=head2 Disconnecting cleanly
-
-If you find yourself quitting an app with Control-C a lot during
-development, you might like to put the following signal handler in
-your main database class to make sure it disconnects cleanly:
-
-  $SIG{INT} = sub {
-    __PACKAGE__->storage->disconnect;
-  };
-
-=head2 Schema import/export
-
-To create a DBIx::Class schema from an existing database, use
-L<DBIx::Class::Schema::Loader>'s C<make_schema_at>:
-
-  perl -MDBIx::Class::Schema::Loader=make_schema_at,dump_to_dir:./lib -e 'make_schema_at("My::Schema", { debug => 1 }, [ "dbi:Pg:dbname=foo","postgres" ])'
-
 The following functionality requires you to have L<SQL::Translator>
 (also known as "SQL Fairy") installed.
 
@@ -712,7 +891,7 @@
  my $schema = My::Schema->connect($dsn);
  $schema->create_ddl_dir(['MySQL', 'SQLite', 'PostgreSQL'],
                         '0.1',
-                        '/dbscriptdir/'
+                        './dbscriptdir/'
                         );
 
 By default this will create schema files in the current directory, for
@@ -729,7 +908,7 @@
 
 To create C<ALTER TABLE> conversion scripts to update a database to a
 newer version of your schema at a later point, first set a new
-$VERSION in your Schema file, then:
+C<$VERSION> in your Schema file, then:
 
  my $schema = My::Schema->connect($dsn);
  $schema->create_ddl_dir(['MySQL', 'SQLite', 'PostgreSQL'],
@@ -743,45 +922,140 @@
 requires that the files for 0.1 as created above are available in the
 given directory to diff against.
 
+=head2 Select from dual
 
-=head2 Easy migration from class-based to schema-based setup
+Dummy tables are needed by some databases to allow calling functions
+or expressions that aren't based on table content, for examples of how
+this applies to various database types, see:
+L<http://troels.arvin.dk/db/rdbms/#other-dummy_table>.
 
-You want to start using the schema-based approach to L<DBIx::Class>
-(see L<SchemaIntro.pod>), but have an established class-based setup with lots
-of existing classes that you don't want to move by hand. Try this nifty script
-instead:
+Note: If you're using Oracles dual table don't B<ever> do anything
+other than a select, if you CRUD on your dual table you *will* break
+your database.
 
-  use MyDB;
-  use SQL::Translator;
-  
-  my $schema = MyDB->schema_instance;
-  
-  my $translator           =  SQL::Translator->new( 
-      debug                => $debug          ||  0,
-      trace                => $trace          ||  0,
-      no_comments          => $no_comments    ||  0,
-      show_warnings        => $show_warnings  ||  0,
-      add_drop_table       => $add_drop_table ||  0,
-      validate             => $validate       ||  0,
-      parser_args          => {
-         'DBIx::Schema'    => $schema,
-                              },
-      producer_args   => {
-          'prefix'         => 'My::Schema',
-                         },
+Make a table class as you would for any other table
+                                                                               
+  package MyAppDB::Dual;
+  use strict;
+  use warnings;
+  use base 'DBIx::Class';
+  __PACKAGE__->load_components("Core");
+  __PACKAGE__->table("Dual");
+  __PACKAGE__->add_columns(
+    "dummy",
+    { data_type => "VARCHAR2", is_nullable => 0, size => 1 },
   );
-  
-  $translator->parser('SQL::Translator::Parser::DBIx::Class');
-  $translator->producer('SQL::Translator::Producer::DBIx::Class::File');
-  
-  my $output = $translator->translate(@args) or die
-          "Error: " . $translator->error;
-  
-  print $output;
+ 
+Once you've loaded your table class select from it using C<select>
+and C<as> instead of C<columns>
+ 
+  my $rs = $schema->resultset('Dual')->search(undef,
+    { select => [ 'sydate' ],
+      as     => [ 'now' ]
+    },
+  );
+ 
+All you have to do now is be careful how you access your resultset, the below
+will not work because there is no column called 'now' in the Dual table class
+ 
+  while (my $dual = $rs->next) {
+    print $dual->now."\n";
+  }
+  # Can't locate object method "now" via package "MyAppDB::Dual" at headshot.pl line 23.
+ 
+You could of course use 'dummy' in C<as> instead of 'now', or C<add_columns> to
+your Dual class for whatever you wanted to select from dual, but that's just
+silly, instead use C<get_column>
+ 
+  while (my $dual = $rs->next) {
+    print $dual->get_column('now')."\n";
+  }
+ 
+Or use C<cursor>
+ 
+  my $cursor = $rs->cursor;
+  while (my @vals = $cursor->next) {
+    print $vals[0]."\n";
+  }
+ 
+Or use L<DBIx::Class::ResultClass::HashRefInflator>
+ 
+  $rs->result_class('DBIx::Class::ResultClass::HashRefInflator');
+  while ( my $dual = $rs->next ) {
+    print $dual->{now}."\n";
+  }
+ 
+Here are some example C<select> conditions to illustrate the different syntax
+you could use for doing stuff like 
+C<oracles.heavily(nested(functions_can('take', 'lots'), OF), 'args')>
+ 
+  # get a sequence value
+  select => [ 'A_SEQ.nextval' ],
+ 
+  # get create table sql
+  select => [ { 'dbms_metadata.get_ddl' => [ "'TABLE'", "'ARTIST'" ]} ],
+ 
+  # get a random num between 0 and 100
+  select => [ { "trunc" => [ { "dbms_random.value" => [0,100] } ]} ],
+ 
+  # what year is it?
+  select => [ { 'extract' => [ \'year from sysdate' ] } ],
+ 
+  # do some math
+  select => [ {'round' => [{'cos' => [ \'180 * 3.14159265359/180' ]}]}],
+ 
+  # which day of the week were you born on?
+  select => [{'to_char' => [{'to_date' => [ "'25-DEC-1980'", "'dd-mon-yyyy'" ]}, "'day'"]}],
+ 
+  # select 16 rows from dual
+  select   => [ "'hello'" ],
+  as       => [ 'world' ],
+  group_by => [ 'cube( 1, 2, 3, 4 )' ],
+ 
+ 
 
-You could use L<Module::Find> to search for all subclasses in the MyDB::*
-namespace, which is currently left as an exercise for the reader.
+=head2 Adding Indexes And Functions To Your SQL
 
+Often you will want indexes on columns on your table to speed up searching. To
+do this, create a method called C<sqlt_deploy_hook> in the relevant source 
+class:
+
+ package My::Schema::Artist;
+
+ __PACKAGE__->table('artist');
+ __PACKAGE__->add_columns(id => { ... }, name => { ... })
+
+ sub sqlt_deploy_hook {
+   my ($self, $sqlt_table) = @_;
+
+   $sqlt_table->add_index(name => 'idx_name', fields => ['name']);
+ }
+
+ 1;
+
+Sometimes you might want to change the index depending on the type of the 
+database for which SQL is being generated:
+
+  my ($db_type = $sqlt_table->schema->translator->producer_type)
+    =~ s/^SQL::Translator::Producer:://;
+
+You can also add hooks to the schema level to stop certain tables being 
+created:
+
+ package My::Schema;
+
+ ...
+
+ sub sqlt_deploy_hook {
+   my ($self, $sqlt_schema) = @_;
+
+   $sqlt_schema->drop_table('table_name');
+ }
+
+You could also add views or procedures to the output using 
+L<SQL::Translator::Schema/add_view> or 
+L<SQL::Translator::Schema/add_procedure>.
+
 =head2 Schema versioning
 
 The following example shows simplistically how you might use DBIx::Class to
@@ -811,17 +1085,16 @@
 
 =back
 
-=head3 Create a DBIx::Class schema
+B<Create a DBIx::Class schema>
 
 This can either be done manually, or generated from an existing database as
-described under C<Schema import/export>.
+described under L</Creating Schemas From An Existing Database>
 
-=head3 Save the schema
+B<Save the schema>
 
-Call L<DBIx::Class::Schema/create_ddl_dir> as above under L<Schema
-import/export>.
+Call L<DBIx::Class::Schema/create_ddl_dir> as above under L</Creating DDL SQL>.
 
-=head3 Deploy to customers
+B<Deploy to customers>
 
 There are several ways you could deploy your schema. These are probably
 beyond the scope of this recipe, but might include:
@@ -839,14 +1112,14 @@
 
 =back
 
-=head3 Modify the schema to change functionality
+B<Modify the schema to change functionality>
 
 As your application evolves, it may be necessary to modify your schema
 to change functionality. Once the changes are made to your schema in
 DBIx::Class, export the modified schema and the conversion scripts as
-in L<Schema import/export>.
+in L</Creating DDL SQL>.
 
-=head3 Deploy update to customers
+B<Deploy update to customers>
 
 Add the L<DBIx::Class::Schema::Versioned> schema component to your
 Schema class. This will add a new table to your database called
@@ -857,6 +1130,36 @@
 Alternatively, you can send the conversion sql scripts to your
 customers as above.
 
+=head2 Setting quoting for the generated SQL. 
+
+If the database contains column names with spaces and/or reserved words, they
+need to be quoted in the SQL queries. This is done using:
+
+ __PACKAGE__->storage->sql_maker->quote_char([ qw/[ ]/] );
+ __PACKAGE__->storage->sql_maker->name_sep('.');
+
+The first sets the quote characters. Either a pair of matching
+brackets, or a C<"> or C<'>:
+  
+ __PACKAGE__->storage->sql_maker->quote_char('"');
+
+Check the documentation of your database for the correct quote
+characters to use. C<name_sep> needs to be set to allow the SQL
+generator to put the quotes the correct place.
+
+In most cases you should set these as part of the arguments passed to 
+L<DBIx::Class::Schema/conect>:
+
+ my $schema = My::Schema->connect(
+  'dbi:mysql:my_db',
+  'db_user',
+  'db_password',
+  {
+    quote_char => '"',
+    name_sep   => '.'
+  }
+ )
+
 =head2 Setting limit dialect for SQL::Abstract::Limit
 
 In some cases, SQL::Abstract::Limit cannot determine the dialect of
@@ -872,24 +1175,50 @@
 The JDBC bridge is one way of getting access to a MSSQL server from a platform
 that Microsoft doesn't deliver native client libraries for. (e.g. Linux)
 
-=head2 Setting quoting for the generated SQL. 
+The limit dialect can also be set at connect time by specifying a 
+C<limit_dialect> key in the final hash as shown above.
 
-If the database contains column names with spaces and/or reserved words, they
-need to be quoted in the SQL queries. This is done using:
+=head1 BOOTSTRAPPING/MIGRATING 
 
-  __PACKAGE__->storage->sql_maker->quote_char([ qw/[ ]/] );
-  __PACKAGE__->storage->sql_maker->name_sep('.');
+=head2 Easy migration from class-based to schema-based setup
 
-The first sets the quote characters. Either a pair of matching
-brackets, or a C<"> or C<'>:
+You want to start using the schema-based approach to L<DBIx::Class>
+(see L<SchemaIntro.pod>), but have an established class-based setup with lots
+of existing classes that you don't want to move by hand. Try this nifty script
+instead:
+
+  use MyDB;
+  use SQL::Translator;
   
-  __PACKAGE__->storage->sql_maker->quote_char('"');
+  my $schema = MyDB->schema_instance;
+  
+  my $translator           =  SQL::Translator->new( 
+      debug                => $debug          ||  0,
+      trace                => $trace          ||  0,
+      no_comments          => $no_comments    ||  0,
+      show_warnings        => $show_warnings  ||  0,
+      add_drop_table       => $add_drop_table ||  0,
+      validate             => $validate       ||  0,
+      parser_args          => {
+         'DBIx::Schema'    => $schema,
+                              },
+      producer_args   => {
+          'prefix'         => 'My::Schema',
+                         },
+  );
+  
+  $translator->parser('SQL::Translator::Parser::DBIx::Class');
+  $translator->producer('SQL::Translator::Producer::DBIx::Class::File');
+  
+  my $output = $translator->translate(@args) or die
+          "Error: " . $translator->error;
+  
+  print $output;
 
-Check the documentation of your database for the correct quote
-characters to use. C<name_sep> needs to be set to allow the SQL
-generator to put the quotes the correct place.
+You could use L<Module::Find> to search for all subclasses in the MyDB::*
+namespace, which is currently left as an exercise for the reader.
 
-=head2 Overloading methods
+=head1 OVERLOADING METHODS
 
 L<DBIx::Class> uses the L<Class::C3> package, which provides for redispatch of
 method calls, useful for things like default values and triggers. You have to
@@ -897,8 +1226,30 @@
 L<Class::C3> with L<DBIx::Class> can be found in
 L<DBIx::Class::Manual::Component>.
 
-=head3 Changing one field whenever another changes
+=head2 Setting default values for a row
 
+It's as simple as overriding the C<new> method.  Note the use of
+C<next::method>.
+
+  sub new {
+    my ( $class, $attrs ) = @_;
+
+    $attrs->{foo} = 'bar' unless defined $attrs->{foo};
+
+    my $new = $class->next::method($attrs);
+
+    return $new;
+  }
+
+For more information about C<next::method>, look in the L<Class::C3> 
+documentation. See also L<DBIx::Class::Manual::Component> for more
+ways to write your own base classes to do this.
+
+People looking for ways to do "triggers" with DBIx::Class are probably
+just looking for this. 
+
+=head2 Changing one field whenever another changes
+
 For example, say that you have three columns, C<id>, C<number>, and 
 C<squared>.  You would like to make changes to C<number> and have
 C<squared> be automagically set to the value of C<number> squared.
@@ -915,7 +1266,7 @@
 Note that the hard work is done by the call to C<next::method>, which
 redispatches your call to store_column in the superclass(es).
 
-=head3 Automatically creating related objects
+=head2 Automatically creating related objects
 
 You might have a class C<Artist> which has many C<CD>s.  Further, if you
 want to create a C<CD> object every time you insert an C<Artist> object.
@@ -931,8 +1282,51 @@
 where C<fill_from_artist> is a method you specify in C<CD> which sets
 values in C<CD> based on the data in the C<Artist> object you pass in.
 
-=head2 Debugging DBIx::Class objects with Data::Dumper
+=head2 Wrapping/overloading a column accessor
 
+B<Problem:>
+
+Say you have a table "Camera" and want to associate a description
+with each camera. For most cameras, you'll be able to generate the description from
+the other columns. However, in a few special cases you may want to associate a
+custom description with a camera.
+
+B<Solution:>
+
+In your database schema, define a description field in the "Camera" table that
+can contain text and null values.
+
+In DBIC, we'll overload the column accessor to provide a sane default if no
+custom description is defined. The accessor will either return or generate the
+description, depending on whether the field is null or not.
+
+First, in your "Camera" schema class, define the description field as follows:
+
+  __PACKAGE__->add_columns(description => { accessor => '_description' });
+
+Next, we'll define the accessor-wrapper subroutine:
+
+  sub description {
+      my $self = shift;
+
+      # If there is an update to the column, we'll let the original accessor
+      # deal with it.
+      return $self->_description(@_) if @_;
+
+      # Fetch the column value.
+      my $description = $self->_description;
+
+      # If there's something in the description field, then just return that.
+      return $description if defined $description && length $descripton;
+
+      # Otherwise, generate a description.
+      return $self->generate_description;
+  }
+
+=head1 DEBUGGING AND PROFILING
+
+=head2 DBIx::Class objects with Data::Dumper
+
 L<Data::Dumper> can be a very useful tool for debugging, but sometimes it can
 be hard to find the pertinent data in all the data it can generate.
 Specifically, if one naively tries to use it like so,
@@ -975,17 +1369,6 @@
 will automagically clean up your data before printing it. See
 L<Data::Dumper/EXAMPLES> for more information.
 
-=head2 Retrieving a row object's Schema
-
-It is possible to get a Schema object from a row object like so:
-
-  my $schema = $cd->result_source->schema;
-  # use the schema as normal:
-  my $artist_rs = $schema->resultset('Artist'); 
-
-This can be useful when you don't want to pass around a Schema object to every
-method.
-
 =head2 Profiling
 
 When you enable L<DBIx::Class::Storage>'s debugging it prints the SQL
@@ -1048,218 +1431,5 @@
 statement and dig down to see if certain parameters cause aberrant behavior.
 You might want to check out L<DBIx::Class::QueryLog> as well.
 
-=head2 Getting the value of the primary key for the last database insert
 
-AKA getting last_insert_id
-
-If you are using PK::Auto, this is straightforward:
-
-  my $foo = $rs->create(\%blah);
-  # do more stuff
-  my $id = $foo->id; # foo->my_primary_key_field will also work.
-
-If you are not using autoincrementing primary keys, this will probably
-not work, but then you already know the value of the last primary key anyway.
-
-=head2 Dynamic Sub-classing DBIx::Class proxy classes 
-(AKA multi-class object inflation from one table) 
- 
-L<DBIx::Class> classes are proxy classes, therefore some different
-techniques need to be employed for more than basic subclassing.  In
-this example we have a single user table that carries a boolean bit
-for admin.  We would like like to give the admin users
-objects(L<DBIx::Class::Row>) the same methods as a regular user but
-also special admin only methods.  It doesn't make sense to create two
-seperate proxy-class files for this.  We would be copying all the user
-methods into the Admin class.  There is a cleaner way to accomplish
-this.
-
-Overriding the C<inflate_result> method within the User proxy-class
-gives us the effect we want.  This method is called by
-L<DBIx::Class::ResultSet> when inflating a result from storage.  So we
-grab the object being returned, inspect the values we are looking for,
-bless it if it's an admin object, and then return it.  See the example
-below:
- 
-B<Schema Definition> 
- 
-    package DB::Schema; 
-     
-    use base qw/DBIx::Class::Schema/; 
- 
-    __PACKAGE__->load_classes(qw/User/); 
- 
- 
-B<Proxy-Class definitions> 
- 
-    package DB::Schema::User; 
-     
-    use strict; 
-    use warnings; 
-    use base qw/DBIx::Class/; 
-     
-    ### Defined what our admin class is for ensure_class_loaded 
-    my $admin_class = __PACKAGE__ . '::Admin'; 
-     
-    __PACKAGE__->load_components(qw/Core/); 
-     
-    __PACKAGE__->table('users'); 
-     
-    __PACKAGE__->add_columns(qw/user_id   email    password  
-                                firstname lastname active 
-                                admin/); 
-     
-    __PACKAGE__->set_primary_key('user_id'); 
-     
-    sub inflate_result { 
-        my $self = shift;  
-        my $ret = $self->next::method(@_); 
-        if( $ret->admin ) {### If this is an admin rebless for extra functions  
-            $self->ensure_class_loaded( $admin_class ); 
-            bless $ret, $admin_class; 
-        } 
-        return $ret; 
-    } 
-     
-    sub hello { 
-        print "I am a regular user.\n"; 
-        return ; 
-    } 
-     
-     
-    package DB::Schema::User::Admin; 
-     
-    use strict; 
-    use warnings; 
-    use base qw/DB::Schema::User/; 
-     
-    sub hello 
-    { 
-        print "I am an admin.\n"; 
-        return; 
-    } 
-     
-    sub do_admin_stuff 
-    { 
-        print "I am doing admin stuff\n"; 
-        return ; 
-    } 
- 
-B<Test File> test.pl 
- 
-    use warnings; 
-    use strict; 
-    use DB::Schema; 
-     
-    my $user_data = { email    => 'someguy at place.com',  
-                      password => 'pass1',  
-                      admin    => 0 }; 
-                           
-    my $admin_data = { email    => 'someadmin at adminplace.com',  
-                       password => 'pass2',  
-                       admin    => 1 }; 
-                           
-    my $schema = DB::Schema->connection('dbi:Pg:dbname=test'); 
-     
-    $schema->resultset('User')->create( $user_data ); 
-    $schema->resultset('User')->create( $admin_data ); 
-     
-    ### Now we search for them 
-    my $user = $schema->resultset('User')->single( $user_data ); 
-    my $admin = $schema->resultset('User')->single( $admin_data ); 
-     
-    print ref $user, "\n"; 
-    print ref $admin, "\n"; 
-     
-    print $user->password , "\n"; # pass1 
-    print $admin->password , "\n";# pass2; inherited from User 
-    print $user->hello , "\n";# I am a regular user. 
-    print $admin->hello, "\n";# I am an admin. 
- 
-    ### The statement below will NOT print 
-    print "I can do admin stuff\n" if $user->can('do_admin_stuff'); 
-    ### The statement below will print 
-    print "I can do admin stuff\n" if $admin->can('do_admin_stuff'); 
-
-=head2 Skip object creation for faster results
-
-DBIx::Class is not built for speed, it's built for convenience and
-ease of use, but sometimes you just need to get the data, and skip the
-fancy objects.
-  
-To do this simply use L<DBIx::Class::ResultClass::HashRefInflator>.
-  
- my $rs = $schema->resultset('CD');
- 
- $rs->result_class('DBIx::Class::ResultClass::HashRefInflator');
- 
- my $hash_ref = $rs->find(1);
-  
-Wasn't that easy?
-  
-=head2 Get raw data for blindingly fast results
-
-If the C<inflate_result> solution above is not fast enough for you, you
-can use a DBIx::Class to return values exactly as they come out of the
-data base with none of the convenience methods wrapped round them.
-
-This is used like so:-
-
-  my $cursor = $rs->cursor
-  while (my @vals = $cursor->next) {
-      # use $val[0..n] here
-  }
-
-You will need to map the array offsets to particular columns (you can
-use the I<select> attribute of C<search()> to force ordering).
-
-=head2 Want to know if find_or_create found or created a row?
-
-Just use C<find_or_new> instead, then check C<in_storage>:
-
-  my $obj = $rs->find_or_new({ blah => 'blarg' });
-  unless ($obj->in_storage) {
-    $obj->insert;
-    # do whatever else you wanted if it was a new row
-  }
-
-=head3 Wrapping/overloading a column accessor
-
-Problem: Say you have a table "Camera" and want to associate a description
-with each camera. For most cameras, you'll be able to generate the description from
-the other columns. However, in a few special cases you may want to associate a
-custom description with a camera.
-
-Solution:
-
-In your database schema, define a description field in the "Camera" table that
-can contain text and null values.
-
-In DBIC, we'll overload the column accessor to provide a sane default if no
-custom description is defined. The accessor will either return or generate the
-description, depending on whether the field is null or not.
-
-First, in your "Camera" schema class, define the description field as follows:
-
-  __PACKAGE__->add_columns(description => { accessor => '_description' });
-
-Next, we'll define the accessor-wrapper subroutine:
-
-  sub description {
-      my $self = shift;
-
-      # If there is an update to the column, we'll let the original accessor
-      # deal with it.
-      return $self->_description(@_) if @_;
-
-      # Fetch the column value.
-      my $description = $self->_description;
-
-      # If there's something in the description field, then just return that.
-      return $description if defined $description && length $descripton;
-
-      # Otherwise, generate a description.
-      return $self->generate_description;
-  }
-
 =cut

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/FAQ.pod
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/FAQ.pod	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/FAQ.pod	2008-02-07 11:29:23 UTC (rev 4039)
@@ -419,6 +419,17 @@
 search again or relationship accessors. The SQL query is only run when
 you ask the resultset for an actual row object.
 
+=item How do I deal with tables that lack a primary key?
+
+If your table lacks a primary key, DBIx::Class can't work out which row
+it should operate on, for example to delete or update.  However, a
+UNIQUE constraint on one or more columns allows DBIx::Class to uniquely
+identify the row, so you can tell L<DBIx::Class::ResultSource> these
+columns act as a primary key, even if they don't from the database's
+point of view:
+
+ $resultset->set_primary_key(@column);
+
 =back
 
 =head2 Notes for CDBI users

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Intro.pod
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Intro.pod	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Intro.pod	2008-02-07 11:29:23 UTC (rev 4039)
@@ -373,6 +373,16 @@
 For a complete overview of the available attributes, see
 L<DBIx::Class::ResultSet/ATTRIBUTES>.
 
+=head1 NOTES
+
+=head2 Problems on RHEL5/CentOS5
+
+There is a problem with slow performance of certain DBIx::Class operations in
+perl-5.8.8-10 and later on RedHat and related systems, due to a bad backport of
+a "use overload" related bug.  The problem is in the Perl binary itself, not in
+DBIx::Class.  If your system has this problem, you will see a warning on
+startup, with some options as to what to do about it.
+
 =head1 SEE ALSO
 
 =over 4

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Joining.pod
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Joining.pod	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Manual/Joining.pod	2008-02-07 11:29:23 UTC (rev 4039)
@@ -165,7 +165,7 @@
 
   join => [ 'room', 'room' ]
 
-The aliases are: C<room_1> and C<room_2>.
+The aliases are: C<room> and C<room_2>.
 
 =cut
 

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Relationship/BelongsTo.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Relationship/BelongsTo.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Relationship/BelongsTo.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -41,20 +41,25 @@
     );
   }
   # explicit join condition
-  elsif (ref $cond eq 'HASH') {
-    my $cond_rel;
-    for (keys %$cond) {
-      if (m/\./) { # Explicit join condition
-        $cond_rel = $cond;
-        last;
+  elsif (ref $cond) {
+    if (ref $cond eq 'HASH') { # ARRAY is also valid
+      my $cond_rel;
+      for (keys %$cond) {
+        if (m/\./) { # Explicit join condition
+          $cond_rel = $cond;
+          last;
+        }
+        $cond_rel->{"foreign.$_"} = "self.".$cond->{$_};
       }
-      $cond_rel->{"foreign.$_"} = "self.".$cond->{$_};
+      $cond = $cond_rel;
     }
-    my $acc_type = (keys %$cond_rel == 1 and $class->has_column($rel))
-      ? 'filter'
-      : 'single';
+    my $acc_type = ((ref $cond eq 'HASH')
+                       && keys %$cond == 1
+                       && $class->has_column($rel))
+                     ? 'filter'
+                     : 'single';
     $class->add_relationship($rel, $f_class,
-      $cond_rel,
+      $cond,
       { accessor => $acc_type, %{$attrs || {}} }
     );
   }

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultClass/HashRefInflator.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultClass/HashRefInflator.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultClass/HashRefInflator.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -1,5 +1,8 @@
 package DBIx::Class::ResultClass::HashRefInflator;
 
+use strict;
+use warnings;
+
 =head1 NAME
 
 DBIx::Class::ResultClass::HashRefInflator
@@ -61,7 +64,7 @@
     # related sources.
 
     # to avoid emtpy has_many rels contain one empty hashref
-    return if (not keys %$me);
+    return undef if (not keys %$me);
 
     my $def;
 
@@ -71,13 +74,14 @@
             last;
         }
     }
-    return unless $def;
+    return undef unless $def;
 
     return { %$me,
         map {
           ( $_ =>
-             ref($rest->{$_}[0]) eq 'ARRAY' ? [ map { mk_hash(@$_) } @{$rest->{$_}} ]
-                                            : mk_hash( @{$rest->{$_}} )
+             ref($rest->{$_}[0]) eq 'ARRAY'
+                 ? [ grep defined, map mk_hash(@$_), @{$rest->{$_}} ]
+                 : mk_hash( @{$rest->{$_}} )
           )
         } keys %$rest
     };

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSet.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSet.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -11,6 +11,7 @@
 use Storable;
 use DBIx::Class::ResultSetColumn;
 use DBIx::Class::ResultSourceHandle;
+use List::Util ();
 use base qw/DBIx::Class/;
 
 __PACKAGE__->mk_group_accessors('simple' => qw/result_class _source_handle/);
@@ -95,6 +96,8 @@
 
   $attrs->{alias} ||= 'me';
 
+  # Creation of {} and bless separated to mitigate RH perl bug
+  # see https://bugzilla.redhat.com/show_bug.cgi?id=196836
   my $self = {
     _source_handle => $source,
     result_class => $attrs->{result_class} || $source->resolve->result_class,
@@ -138,6 +141,8 @@
 L<Searching|DBIx::Class::Manual::Cookbook/Searching>. For a complete
 documentation for the first argument, see L<SQL::Abstract>.
 
+For more help on using joins with search, see L<DBIx::Class::Manual::Joining>.
+
 =cut
 
 sub search {
@@ -164,18 +169,26 @@
 sub search_rs {
   my $self = shift;
 
-  my $rows;
-
-  unless (@_) {                 # no search, effectively just a clone
-    $rows = $self->get_cache;
-  }
-
   my $attrs = {};
   $attrs = pop(@_) if @_ > 1 and ref $_[$#_] eq 'HASH';
   my $our_attrs = { %{$self->{attrs}} };
   my $having = delete $our_attrs->{having};
   my $where = delete $our_attrs->{where};
 
+  my $rows;
+
+  my %safe = (alias => 1, cache => 1);
+
+  unless (
+    (@_ && defined($_[0])) # @_ == () or (undef)
+    || 
+    (keys %$attrs # empty attrs or only 'safe' attrs
+    && List::Util::first { !$safe{$_} } keys %$attrs)
+  ) {
+    # no search, effectively just a clone
+    $rows = $self->get_cache;
+  }
+
   my $new_attrs = { %{$our_attrs}, %{$attrs} };
 
   # merge new attrs into inherited
@@ -268,7 +281,7 @@
 only be used in that context. There are known problems using C<search_literal>
 in chained queries; it can result in bind values in the wrong order.  See
 L<DBIx::Class::Manual::Cookbook/Searching> and
-L<DBIx::Class::Manual::FAQ/Searching> for seaching techniques that do not
+L<DBIx::Class::Manual::FAQ/Searching> for searching techniques that do not
 require C<search_literal>.
 
 =cut
@@ -925,7 +938,7 @@
 with to find the number of elements. If passed arguments, does a search
 on the resultset and counts the results of that.
 
-Note: When using C<count> with C<group_by>, L<DBIX::Class> emulates C<GROUP BY>
+Note: When using C<count> with C<group_by>, L<DBIx::Class> emulates C<GROUP BY>
 using C<COUNT( DISTINCT( columns ) )>. Some databases (notably SQLite) do
 not support C<DISTINCT> with multiple columns. If you are using such a
 database, you should only use columns from the main table in your C<group_by>
@@ -1559,7 +1572,7 @@
 
 =item Arguments: \%vals
 
-=item Return Value: $object
+=item Return Value: a L<DBIx::Class::Row> $object
 
 =back
 
@@ -2074,10 +2087,10 @@
       $position++;
     }
     my ($b_key) = ( ref $b_element eq 'HASH' ) ? keys %{$b_element} : ($b_element);
+
     if ($best_candidate->{score} == 0 || exists $seen_keys->{$b_key}) {
       push( @{$a}, $b_element );
     } else {
-      $seen_keys->{$b_key} = 1; # don't merge the same key twice
       my $a_best = $a->[$best_candidate->{position}];
       # merge a_best and b_element together and replace original with merged
       if (ref $a_best ne 'HASH') {
@@ -2087,6 +2100,7 @@
         $a->[$best_candidate->{position}] = { $key => $self->_merge_attr($a_best->{$key}, $b_element->{$key}) };
       }
     }
+    $seen_keys->{$b_key} = 1; # don't merge the same key twice
   }
 
   return $a;
@@ -2199,7 +2213,7 @@
 =over 4
 
 Indicates additional columns to be selected from storage.  Works the same as
-L<select> but adds columns to the selection.
+L</select> but adds columns to the selection.
 
 =back
 
@@ -2207,7 +2221,7 @@
 
 =over 4
 
-Indicates additional column names for those added via L<+select>.
+Indicates additional column names for those added via L</+select>.
 
 =back
 
@@ -2328,6 +2342,8 @@
 If you want to fetch related objects from other tables as well, see C<prefetch>
 below.
 
+For more help on using joins with search, see L<DBIx::Class::Manual::Joining>.
+
 =head2 prefetch
 
 =over 4

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSource.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSource.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSource.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -640,7 +640,7 @@
     my $otherrel_info = $othertable->relationship_info($otherrel);
 
     my $back = $othertable->related_source($otherrel);
-    next unless $back->name eq $self->name;
+    next unless $back->source_name eq $self->source_name;
 
     my @othertestconds;
 
@@ -980,7 +980,9 @@
 
 Set the class of the resultset, this is useful if you want to create your
 own resultset methods. Create your own class derived from
-L<DBIx::Class::ResultSet>, and set it here. 
+L<DBIx::Class::ResultSet>, and set it here. If called with no arguments,
+this method returns the name of the existing resultset class, if one
+exists.
 
 =head2 resultset_attributes
 
@@ -1057,6 +1059,15 @@
   }
 }
 
+=head2 sqlt_deploy_hook($sqlt_table)
+
+An optional sub which you can declare in your own Schema class that will get 
+passed the L<SQL::Translator::Schema::Table> object when you deploy the schema
+via L</create_ddl_dir> or L</deploy>.
+
+For an example of what you can do with this, see 
+L<DBIx::Class::Manual::Cookbook/Adding Indexes And Functions To Your SQL>.
+
 =head1 AUTHORS
 
 Matt S. Trout <mst at shadowcatsystems.co.uk>

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSourceHandle.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSourceHandle.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/ResultSourceHandle.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -7,11 +7,16 @@
 use base qw/DBIx::Class/;
 
 use overload
+    # on some RH perls the following line causes serious performance problem
+    # see https://bugzilla.redhat.com/show_bug.cgi?id=196836
     q/""/ => sub { __PACKAGE__ . ":" . shift->source_moniker; },
     fallback => 1;
 
 __PACKAGE__->mk_group_accessors('simple' => qw/schema source_moniker/);
 
+# Schema to use when thawing.
+our $thaw_schema;
+
 =head1 NAME
 
 DBIx::Class::ResultSourceHandle
@@ -69,20 +74,32 @@
 
 sub STORABLE_freeze {
     my ($self, $cloning) = @_;
+
     my $to_serialize = { %$self };
+    
     delete $to_serialize->{schema};
     return (Storable::freeze($to_serialize));
 }
 
 =head2 STORABLE_thaw
 
-Thaws frozen handle.
+Thaws frozen handle. Resets the internal schema reference to the package
+variable C<$thaw_schema>. The recomened way of setting this is to use 
+C<$schema->thaw($ice)> which handles this for you.
 
 =cut
 
+
 sub STORABLE_thaw {
     my ($self, $cloning,$ice) = @_;
     %$self = %{ Storable::thaw($ice) };
+    $self->{schema} = $thaw_schema;
 }
 
+=head1 AUTHOR
+
+Ash Berlin C<< <ash at cpan.org> >>
+
+=cut
+
 1;

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Row.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Row.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Row.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -116,7 +116,6 @@
           next;
         }
       }
-      use Data::Dumper;
       $new->throw_exception("No such column $key on $class")
         unless $class->has_column($key);
       $new->store_column($key => $attrs->{$key});          
@@ -243,7 +242,7 @@
         my $reverse = $source->reverse_relationship_info($relname);
         foreach my $obj (@cands) {
           $obj->set_from_related($_, $self) for keys %$reverse;
-          $obj->insert() if(!$obj->in_storage);
+          $obj->insert() unless ($obj->in_storage || $obj->result_source->resultset->search({$obj->get_columns})->count);
         }
       }
     }
@@ -282,9 +281,10 @@
 required.
 
 Also takes an options hashref of C<< column_name => value> pairs >> to update
-first. But be aware that this hashref might be edited in place, so dont rely on
-it being the same after a call to C<update>. If you need to preserve the hashref,
-it is sufficient to pass a shallow copy to C<update>, e.g. ( { %{ $href } } )
+first. But be aware that the hashref will be passed to
+C<set_inflated_columns>, which might edit it in place, so dont rely on it being
+the same after a call to C<update>.  If you need to preserve the hashref, it is
+sufficient to pass a shallow copy to C<update>, e.g. ( { %{ $href } } )
 
 =cut
 
@@ -295,38 +295,7 @@
   $self->throw_exception("Cannot safely update a row in a PK-less table")
     if ! keys %$ident_cond;
 
-  if ($upd) {
-    foreach my $key (keys %$upd) {
-      if (ref $upd->{$key}) {
-        my $info = $self->relationship_info($key);
-        if ($info && $info->{attrs}{accessor}
-          && $info->{attrs}{accessor} eq 'single')
-        {
-          my $rel = delete $upd->{$key};
-          $self->set_from_related($key => $rel);
-          $self->{_relationship_data}{$key} = $rel;          
-        } elsif ($info && $info->{attrs}{accessor}
-            && $info->{attrs}{accessor} eq 'multi'
-            && ref $upd->{$key} eq 'ARRAY') {
-            my $others = delete $upd->{$key};
-            foreach my $rel_obj (@$others) {
-              if(!Scalar::Util::blessed($rel_obj)) {
-                $rel_obj = $self->create_related($key, $rel_obj);
-              }
-            }
-            $self->{_relationship_data}{$key} = $others; 
-#            $related->{$key} = $others;
-            next;
-        }
-        elsif ($self->has_column($key)
-          && exists $self->column_info($key)->{_inflate_info})
-        {
-          $self->set_inflated_column($key, delete $upd->{$key});          
-        }
-      }
-    }
-    $self->set_columns($upd);    
-  }
+  $self->set_inflated_columns($upd) if $upd;
   my %to_update = $self->get_dirty_columns;
   return $self unless keys %to_update;
   my $rows = $self->result_source->storage->update(
@@ -353,7 +322,7 @@
 reinserted using C<< ->insert() >> before C<< ->update() >> can be used
 on it. If you delete an object in a class with a C<has_many>
 relationship, all the related objects will be deleted as well. To turn
-this behavior off, pass C<cascade_delete => 0> in the C<$attr>
+this behavior off, pass C<< cascade_delete => 0 >> in the C<$attr>
 hashref. Any database-level cascade or restrict will take precedence
 over a DBIx-Class-based cascading delete. See also L<DBIx::Class::ResultSet/delete>.
 
@@ -509,6 +478,52 @@
   return $self;
 }
 
+=head2 set_inflated_columns
+
+  my $copy = $orig->set_inflated_columns({ $col => $val, $rel => $obj, ... });
+
+Sets more than one column value at once, taking care to respect inflations and
+relationships if relevant. Be aware that this hashref might be edited in place,
+so dont rely on it being the same after a call to C<set_inflated_columns>. If
+you need to preserve the hashref, it is sufficient to pass a shallow copy to
+C<set_inflated_columns>, e.g. ( { %{ $href } } )
+
+=cut
+
+sub set_inflated_columns {
+  my ( $self, $upd ) = @_;
+  foreach my $key (keys %$upd) {
+    if (ref $upd->{$key}) {
+      my $info = $self->relationship_info($key);
+      if ($info && $info->{attrs}{accessor}
+        && $info->{attrs}{accessor} eq 'single')
+      {
+        my $rel = delete $upd->{$key};
+        $self->set_from_related($key => $rel);
+        $self->{_relationship_data}{$key} = $rel;          
+      } elsif ($info && $info->{attrs}{accessor}
+        && $info->{attrs}{accessor} eq 'multi'
+        && ref $upd->{$key} eq 'ARRAY') {
+        my $others = delete $upd->{$key};
+        foreach my $rel_obj (@$others) {
+          if(!Scalar::Util::blessed($rel_obj)) {
+            $rel_obj = $self->create_related($key, $rel_obj);
+          }
+        }
+        $self->{_relationship_data}{$key} = $others; 
+#            $related->{$key} = $others;
+        next;
+      }
+      elsif ($self->has_column($key)
+        && exists $self->column_info($key)->{_inflate_info})
+      {
+        $self->set_inflated_column($key, delete $upd->{$key});          
+      }
+    }
+  }
+  $self->set_columns($upd);    
+}
+
 =head2 copy
 
   my $copy = $orig->copy({ change => $to, ... });
@@ -530,17 +545,31 @@
   bless $new, ref $self;
 
   $new->result_source($self->result_source);
-  $new->set_columns($changes);
+  $new->set_inflated_columns($changes);
   $new->insert;
+
+  # Its possible we'll have 2 relations to the same Source. We need to make 
+  # sure we don't try to insert the same row twice esle we'll violate unique
+  # constraints
+  my $rels_copied = {};
+
   foreach my $rel ($self->result_source->relationships) {
     my $rel_info = $self->result_source->relationship_info($rel);
-    if ($rel_info->{attrs}{cascade_copy}) {
-      my $resolved = $self->result_source->resolve_condition(
-       $rel_info->{cond}, $rel, $new);
-      foreach my $related ($self->search_related($rel)) {
-        $related->copy($resolved);
-      }
+
+    next unless $rel_info->{attrs}{cascade_copy};
+  
+    my $resolved = $self->result_source->resolve_condition(
+      $rel_info->{cond}, $rel, $new
+    );
+
+    my $copied = $rels_copied->{ $rel_info->{source} } ||= {};
+    foreach my $related ($self->search_related($rel)) {
+      my $id_str = join("\0", $related->id);
+      next if $copied->{$id_str};
+      $copied->{$id_str} = 1;
+      my $rel_copy = $related->copy($resolved);
     }
+ 
   }
   return $new;
 }
@@ -735,6 +764,22 @@
   }
 }
 
+=head2 id
+
+Returns the primary key(s) for a row. Can't be called as a class method.
+Actually implemented in L<DBIx::Class::PK>
+
+=head2 discard_changes
+
+Re-selects the row from the database, losing any changes that had
+been made.
+
+This method can also be used to refresh from storage, retrieving any
+changes made since the row was last read from storage. Actually
+implemented in L<DBIx::Class::PK>
+
+=cut
+
 1;
 
 =head1 AUTHORS

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Schema.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Schema.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Schema.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -1050,6 +1050,53 @@
     return $filename;
 }
 
+=head2 sqlt_deploy_hook($sqlt_schema)
+
+An optional sub which you can declare in your own Schema class that will get 
+passed the L<SQL::Translator::Schema> object when you deploy the schema via
+L</create_ddl_dir> or L</deploy>.
+
+For an example of what you can do with this, see 
+L<DBIx::Class::Manual::Cookbook/Adding Indexes And Functions To Your SQL>.
+
+=head2 thaw
+
+Provided as the recommened way of thawing schema objects. You can call 
+C<Storable::thaw> directly if you wish, but the thawed objects will not have a
+reference to any schema, so are rather useless
+
+=cut
+
+sub thaw {
+  my ($self, $obj) = @_;
+  local $DBIx::Class::ResultSourceHandle::thaw_schema = $self;
+  return Storable::thaw($obj);
+}
+
+=head2 freeze
+
+This doesn't actualy do anything more than call L<Storable/freeze>, it is just
+provided here for symetry.
+
+=cut
+
+sub freeze {
+  return Storable::freeze($_[1]);
+}
+
+=head2 dclone
+
+Recommeneded way of dcloning objects. This is needed to properly maintain
+references to the schema object (which itself is B<not> cloned.)
+
+=cut
+
+sub dclone {
+  my ($self, $obj) = @_;
+  local $DBIx::Class::ResultSourceHandle::thaw_schema = $self;
+  return Storable::dclone($obj);
+}
+
 1;
 
 =head1 AUTHORS

Added: DBIx-Class/0.09/trunk/lib/DBIx/Class/StartupCheck.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/StartupCheck.pm	                        (rev 0)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/StartupCheck.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -0,0 +1,70 @@
+package DBIx::Class::StartupCheck;
+
+BEGIN {
+
+    { package TestRHBug; use overload bool => sub { 0 } }
+
+    sub _has_bug_34925 {
+	my %thing;
+	my $r1 = \%thing;
+	my $r2 = \%thing;
+	bless $r1 => 'TestRHBug';
+	return !!$r2;
+    }
+
+    sub _possibly_has_bad_overload_performance {
+	return $] < 5.008009 && ! _has_bug_34925();
+    }
+
+    unless ($ENV{DBIC_NO_WARN_BAD_PERL}) {
+	if (_possibly_has_bad_overload_performance()) {
+	    print STDERR "\n\nWARNING: " . __PACKAGE__ . ": This version of Perl is likely to exhibit\n" .
+		"extremely slow performance for certain critical operations.\n" .
+		"Please consider recompiling Perl.  For more information, see\n" .
+		"https://bugzilla.redhat.com/show_bug.cgi?id=196836 and/or\n" .
+		"http://lists.scsys.co.uk/pipermail/dbix-class/2007-October/005119.html.\n" .
+		"You can suppress this message by setting DBIC_NO_WARN_BAD_PERL=1 in your\n" .
+		"environment.\n\n";
+	}
+    }
+}
+
+=head1 NAME
+
+DBIx::Class::StartupCheck - Run environment checks on startup
+
+=head1 SYNOPSIS
+
+  use DBIx::Class::StartupCheck;
+  
+=head1 DESCRIPTION
+
+Currently this module checks for, and if necessary issues a warning for, a
+particular bug found on RedHat systems from perl-5.8.8-10 and up.  Other checks
+may be added from time to time.
+
+Any checks herein can be disabled by setting an appropriate environment
+variable.  If your system suffers from a particular bug, you will get a warning
+message on startup sent to STDERR, explaining what to do about it and how to
+suppress the message.  If you don't see any messages, you have nothing to worry
+about.
+
+=head1 CONTRIBUTORS
+
+Nigel Metheringham
+
+Brandon Black
+
+Matt S. Trout
+
+=head1 AUTHOR
+
+Jon Schutz
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
+
+1;

Added: DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	                        (rev 0)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -0,0 +1,96 @@
+package DBIx::Class::Storage::DBI::ODBC::Microsoft_SQL_Server;
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+sub _prep_for_execute {
+    my $self = shift;
+    my ($op, $extra_bind, $ident, $args) = @_;
+
+    my ($sql, $bind) = $self->SUPER::_prep_for_execute(@_);
+    $sql .= ';SELECT SCOPE_IDENTITY()' if $op eq 'insert';
+
+    return ($sql, $bind);
+}
+
+sub insert {
+    my ($self, $source, $to_insert) = @_;
+
+    my $bind_attributes = $self->source_bind_attributes($source);
+    my (undef, $sth) = $self->_execute( 'insert' => [], $source, $bind_attributes, $to_insert);
+    $self->{_scope_identity} = $sth->fetchrow_array;
+
+    return $to_insert;
+}
+
+sub last_insert_id { shift->{_scope_identity} }
+
+sub sqlt_type { 'SQLServer' }
+
+sub _sql_maker_opts {
+    my ( $self, $opts ) = @_;
+
+    if ( $opts ) {
+        $self->{_sql_maker_opts} = { %$opts };
+    }
+
+    return { limit_dialect => 'Top', %{$self->{_sql_maker_opts}||{}} };
+}
+
+sub build_datetime_parser {
+  my $self = shift;
+  my $type = "DateTime::Format::Strptime";
+  eval "use ${type}";
+  $self->throw_exception("Couldn't load ${type}: $@") if $@;
+  return $type->new( pattern => '%F %T' );
+}
+
+1;
+
+__END__
+
+=head1 NAME
+
+DBIx::Class::Storage::ODBC::Microsoft_SQL_Server - Support specific to
+Microsoft SQL Server over ODBC
+
+=head1 DESCRIPTION
+
+This class implements support specific to Microsoft SQL Server over ODBC,
+including auto-increment primary keys and SQL::Abstract::Limit dialect.  It
+is loaded automatically by by DBIx::Class::Storage::DBI::ODBC when it
+detects a MSSQL back-end.
+
+=head1 IMPLEMENTATION NOTES
+
+Microsoft SQL Server supports three methods of retrieving the IDENTITY
+value for inserted row: IDENT_CURRENT, @@IDENTITY, and SCOPE_IDENTITY().
+SCOPE_IDENTITY is used here because it is the safest.  However, it must
+be called is the same execute statement, not just the same connection.
+
+So, this implementation appends a SELECT SCOPE_IDENTITY() statement
+onto each INSERT to accommodate that requirement.
+
+=head1 METHODS
+
+=head2 insert
+
+=head2 last_insert_id
+
+=head2 sqlt_type
+
+=head2 build_datetime_parser
+
+The resulting parser handles the MSSQL C<DATETIME> type, but is almost
+certainly not sufficient for the other MSSQL 2008 date/time types.
+
+=head1 AUTHORS
+
+Marc Mims C<< <marc at questright.com> >>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/Replication.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/Replication.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/DBI/Replication.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -131,22 +131,12 @@
     shift->read_source->build_datetime_parser( @_ );
 }
 
-sub limit_dialect {
-    my $self = shift;
-    $self->$_->limit_dialect( @_ ) for( $self->all_sources );
-}
-sub quote_char {
-    my $self = shift;
-    $self->$_->quote_char( @_ ) for( $self->all_sources );
-}
-sub name_sep {
-    my $self = shift;
-    $self->$_->quote_char( @_ ) for( $self->all_sources );
-}
-sub disconnect {
-    my $self = shift;
-    $self->$_->disconnect( @_ ) for( $self->all_sources );
-}
+sub limit_dialect { $_->limit_dialect( @_ ) for( shift->all_sources ) }
+sub quote_char { $_->quote_char( @_ ) for( shift->all_sources ) }
+sub name_sep { $_->quote_char( @_ ) for( shift->all_sources ) }
+sub disconnect { $_->disconnect( @_ ) for( shift->all_sources ) }
+sub set_schema { $_->set_schema( @_ ) for( shift->all_sources ) }
+
 sub DESTROY {
     my $self = shift;
 

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/Statistics.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/Statistics.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage/Statistics.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -3,6 +3,7 @@
 use warnings;
 
 use base qw/Class::Accessor::Grouped/;
+use IO::File;
 
 __PACKAGE__->mk_group_accessors(simple => qw/callback debugfh/);
 

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class/Storage.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -419,6 +419,10 @@
 
 Old name for DBIC_TRACE
 
+=head1 SEE ALSO
+
+L<DBIx::Class::Storage::DBI> - reference storage inplementation using SQL::Abstract and DBI.
+
 =head1 AUTHORS
 
 Matt S. Trout <mst at shadowcatsystems.co.uk>

Modified: DBIx-Class/0.09/trunk/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/DBIx/Class.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/DBIx/Class.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -5,6 +5,7 @@
 
 use vars qw($VERSION);
 use base qw/DBIx::Class::Componentised Class::Accessor::Grouped/;
+use DBIx::Class::StartupCheck;
 
 
 sub mk_classdata { 
@@ -23,7 +24,7 @@
 # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
 # brain damage and presumably various other packaging systems too
 
-$VERSION = '0.08007';
+$VERSION = '0.08009';
 
 sub MODIFY_CODE_ATTRIBUTES {
   my ($class,$code, at attrs) = @_;
@@ -201,6 +202,8 @@
 
 ash: Ash Berlin <ash at cpan.org>
 
+bert: Norbert Csongradi <bert at cpan.org>
+
 blblack: Brandon L. Black <blblack at gmail.com>
 
 bluefeet: Aran Deltac <bluefeet at cpan.org>
@@ -233,6 +236,8 @@
 
 jnapiorkowski: John Napiorkowski <jjn1056 at yahoo.com>
 
+jon: Jon Schutz <jjschutz at cpan.org>
+
 jshirley: J. Shirley <jshirley at gmail.com>
 
 konobi: Scott McWhirter
@@ -249,10 +254,14 @@
 
 Numa: Dan Sully <daniel at cpan.org>
 
+oyse: Øystein Torget <oystein.torget at dnv.com>
+
 paulm: Paul Makepeace
 
 penguin: K J Cheetham
 
+perigrin: Chris Prather <chris at prather.org>
+
 phaylon: Robert Sedlacek <phaylon at dunkelheit.at>
 
 quicksilver: Jules Bean

Modified: DBIx-Class/0.09/trunk/lib/SQL/Translator/Parser/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.09/trunk/lib/SQL/Translator/Parser/DBIx/Class.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/lib/SQL/Translator/Parser/DBIx/Class.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -5,6 +5,8 @@
 
 # Some mistakes the fault of Matt S Trout
 
+# Others the fault of Ash Berlin
+
 use strict;
 use warnings;
 use vars qw($DEBUG $VERSION @EXPORT_OK);
@@ -63,10 +65,8 @@
     }
 
 
-    foreach my $moniker (@monikers)
+    foreach my $moniker (sort @monikers)
     {
-        #eval "use $tableclass";
-        #print("Can't load $tableclass"), next if($@);
         my $source = $dbixschema->source($moniker);
 
         next if $seen_tables{$source->name}++;
@@ -78,7 +78,7 @@
         my $colcount = 0;
         foreach my $col ($source->columns)
         {
-            # assuming column_info in dbix is the same as DBI (?)
+            # assuming column_info in dbic is the same as DBI (?)
             # data_type is a number, column_type is text?
             my %colinfo = (
               name => $col,
@@ -111,7 +111,7 @@
 
         my %created_FK_rels;
 
-        foreach my $rel (@rels)
+        foreach my $rel (sort @rels)
         {
             my $rel_info = $source->relationship_info($rel);
 
@@ -149,10 +149,10 @@
                 # us to another table.
                 # OR: If is_foreign_key_constraint attr is explicity set (or set to false) on the relation
                 if ( ! exists $created_FK_rels{$rel_table}->{$key_test} &&
-                     ( exists $rel_info->{attrs}{is_foreign_key_constraint} && 
-                       $rel_info->{attrs}{is_foreign_key_constraint} ||
+                     ( exists $rel_info->{attrs}{is_foreign_key_constraint} ?
+                       $rel_info->{attrs}{is_foreign_key_constraint} :
                        !$source->compare_relationship_keys(\@keys, \@primary)
-                     )
+		     )
                    )
                 {
                     $created_FK_rels{$rel_table}->{$key_test} = 1;
@@ -168,7 +168,16 @@
                 }
             }
         }
+
+        if ($source->result_class->can('sqlt_deploy_hook')) {
+          $source->result_class->sqlt_deploy_hook($table);
+        }
     }
+
+    if ($dbixschema->can('sqlt_deploy_hook')) {
+      $dbixschema->sqlt_deploy_hook($schema);
+    }
+
     return 1;
 }
 

Modified: DBIx-Class/0.09/trunk/script/dbicadmin
===================================================================
--- DBIx-Class/0.09/trunk/script/dbicadmin	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/script/dbicadmin	2008-02-07 11:29:23 UTC (rev 4039)
@@ -4,11 +4,11 @@
 
 use Getopt::Long;
 use Pod::Usage;
-use JSON qw( jsonToObj );
+use JSON::Any;
 
-$JSON::BareKey = 1;
-$JSON::QuotApos = 1;
 
+my $json = JSON::Any->new(allow_barekey => 1, allow_singlequote => 1);
+
 GetOptions(
     'schema=s'  => \my $schema_class,
     'class=s'   => \my $resultset_class,
@@ -50,7 +50,7 @@
 die('No schema specified') if(!$schema_class);
 eval("require $schema_class");
 die('Unable to load schema') if ($@);
-$connect = jsonToObj( $connect ) if ($connect);
+$connect = $json->jsonToObj( $connect ) if ($connect);
 my $schema = $schema_class->connect(
     ( $connect ? @$connect : () )
 );
@@ -59,15 +59,15 @@
 my $resultset = eval{ $schema->resultset($resultset_class) };
 die('Unable to load the class with the schema') if ($@);
 
-$set = jsonToObj( $set ) if ($set);
-$where = jsonToObj( $where ) if ($where);
-$attrs = jsonToObj( $attrs ) if ($attrs);
+$set = $json->jsonToObj( $set ) if ($set);
+$where = $json->jsonToObj( $where ) if ($where);
+$attrs = $json->jsonToObj( $attrs ) if ($attrs);
 
 if ($op eq 'insert') {
     die('Do not use the where option with the insert op') if ($where);
     die('Do not use the attrs option with the insert op') if ($attrs);
     my $obj = $resultset->create( $set );
-    print ''.ref($resultset).' ID: '.join(',',$obj->id())."\n";
+    print ''.ref($resultset).' ID: '.join(',',$obj->id())."\n" if (!$quiet);
 }
 elsif ($op eq 'update') {
     $resultset = $resultset->search( ($where||{}) );

Modified: DBIx-Class/0.09/trunk/t/68inflate.t
===================================================================
--- DBIx-Class/0.09/trunk/t/68inflate.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/68inflate.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -10,7 +10,7 @@
 eval { require DateTime };
 plan skip_all => "Need DateTime for inflation tests" if $@;
 
-plan tests => 20;
+plan tests => 21;
 
 $schema->class('CD')
 #DBICTest::Schema::CD
@@ -99,6 +99,10 @@
 $cd->discard_changes;
 
 is($cd->year->year, $before_year + 1, 'discard_changes clears the inflated value');
+
+my $copy = $cd->copy({ year => $now, title => "zemoose" });
+
+isnt( $copy->year->year, $before_year, "copy" );
  
 # eval { $cd->store_inflated_column('year', \'year + 1') };
 # print STDERR "ERROR: $@" if($@);

Modified: DBIx-Class/0.09/trunk/t/73oracle_inflate.t
===================================================================
--- DBIx-Class/0.09/trunk/t/73oracle_inflate.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/73oracle_inflate.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -54,9 +54,6 @@
 
 # clean up our mess
 END {
-    # Set the metadata back for the last_updated_on column
-    $schema->class('Track')->add_column( 'last_updated_on' => $col_metadata );
-
     if($dbh) {
         $dbh->do("DROP TABLE track");
     }

Modified: DBIx-Class/0.09/trunk/t/746db2_400.t
===================================================================
--- DBIx-Class/0.09/trunk/t/746db2_400.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/746db2_400.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -21,7 +21,7 @@
 
 my $dbh = $schema->storage->dbh;
 
-$dbh->do("DROP TABLE artist", { RaiseError => 0, PrintError => 0 });
+eval { $dbh->do("DROP TABLE artist") };
 
 $dbh->do("CREATE TABLE artist (artistid INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), name VARCHAR(255), charfield CHAR(10))");
 

Added: DBIx-Class/0.09/trunk/t/746mssql.t
===================================================================
--- DBIx-Class/0.09/trunk/t/746mssql.t	                        (rev 0)
+++ DBIx-Class/0.09/trunk/t/746mssql.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -0,0 +1,63 @@
+use strict;
+use warnings;  
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
+
+plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test'
+  unless ($dsn && $user);
+
+plan tests => 12;
+
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1});
+
+$schema->storage->ensure_connected;
+isa_ok( $schema->storage, 'DBIx::Class::Storage::DBI::ODBC::Microsoft_SQL_Server' );
+
+my $dbh = $schema->storage->dbh;
+
+eval { $dbh->do("DROP TABLE artist") };
+
+    $dbh->do(<<'');
+CREATE TABLE artist (
+   artistid INT IDENTITY NOT NULL,
+   name VARCHAR(255),
+   charfield CHAR(10),
+   primary key(artistid)
+)
+
+my %seen_id;
+
+# test primary key handling
+my $new = $schema->resultset('Artist')->create({ name => 'foo' });
+ok($new->artistid > 0, "Auto-PK worked");
+
+$seen_id{$new->artistid}++;
+
+# test LIMIT support
+for (1..6) {
+    $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
+    is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
+    $seen_id{$new->artistid}++;
+}
+
+my $it = $schema->resultset('Artist')->search( {}, {
+    rows => 3,
+    order_by => 'artistid',
+});
+
+is( $it->count, 3, "LIMIT count ok" );
+is( $it->next->name, "foo", "iterator->next ok" );
+$it->next;
+is( $it->next->name, "Artist 2", "iterator->next ok" );
+is( $it->next, undef, "next past end of resultset ok" );
+
+
+# clean up our mess
+END {
+    $dbh->do('DROP TABLE artist') if $dbh;
+}
+

Modified: DBIx-Class/0.09/trunk/t/76joins.t
===================================================================
--- DBIx-Class/0.09/trunk/t/76joins.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/76joins.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -247,12 +247,8 @@
 
 is($cd->producers->first->name, 'Matt S Trout', 'many_to_many accessor ok');
 
-TODO: {
-  local $TODO = 'use prefetched values for many_to_many accessor';
+is($queries, 1, 'many_to_many accessor with nested prefetch ran exactly 1 query');
 
-  is($queries, 1, 'many_to_many accessor with nested prefetch ran exactly 1 query');
-}
-
 $queries = 0;
 
 my $producers = $cd->search_related('cd_to_producer')->search_related('producer');

Modified: DBIx-Class/0.09/trunk/t/84serialize.t
===================================================================
--- DBIx-Class/0.09/trunk/t/84serialize.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/84serialize.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -8,9 +8,25 @@
 
 my $schema = DBICTest->init_schema();
 
-plan tests => 1;
+plan tests => 6;
 
 my $artist = $schema->resultset('Artist')->find(1);
-my $copy = eval { Storable::dclone($artist) };
-is_deeply($copy, $artist, 'serialize row object works');
 
+{
+  my $copy = $schema->dclone($artist);
+  is_deeply($copy, $artist, "dclone row object works");
+  eval { $copy->discard_changes };
+  ok( !$@, "discard_changes okay" );
+  is($copy->id, $artist->id, "IDs still match ");
+}
+
+{
+  my $ice = $schema->freeze($artist);
+  my $copy = $schema->thaw($ice);
+  is_deeply($copy, $artist, 'dclone row object works');
+
+  eval { $copy->discard_changes };
+  ok( !$@, "discard_changes okay" );
+  is($copy->id, $artist->id, "IDs still okay");
+}
+

Modified: DBIx-Class/0.09/trunk/t/86sqlt.t
===================================================================
--- DBIx-Class/0.09/trunk/t/86sqlt.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/86sqlt.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -10,7 +10,7 @@
 
 my $schema = DBICTest->init_schema;
 
-plan tests => 54;
+plan tests => 60;
 
 my $translator = SQL::Translator->new( 
   parser_args => {
@@ -176,6 +176,16 @@
       on_delete => '', on_update => '',
     },
   ],
+  # ForceForeign
+  forceforeign => [
+    {
+      'display' => 'forceforeign->artist',
+      'selftable' => 'forceforeign', 'foreigntable' => 'artist', 
+      'selfcols'  => ['artist'], 'foreigncols' => ['artist_id'], 
+      on_delete => '', on_update => '',
+    },
+  ],
+
 );
 
 my %unique_constraints = (
@@ -213,13 +223,26 @@
 #  ],
 );
 
+my %indexes = (
+  artist => [
+    {
+      'fields' => ['name']
+    },
+  ]
+);
+
 my $tschema = $translator->schema();
+# Test that the $schema->sqlt_deploy_hook was called okay and that it removed
+# the 'link' table
+ok( !defined($tschema->get_table('link')), "Link table was removed by hook");
 
 # Test that nonexistent constraints are not found
 my $constraint = get_constraint('FOREIGN KEY', 'cd', ['title'], 'cd', ['year']);
 ok( !defined($constraint), 'nonexistent FOREIGN KEY constraint not found' );
 $constraint = get_constraint('UNIQUE', 'cd', ['artist']);
 ok( !defined($constraint), 'nonexistent UNIQUE constraint not found' );
+$constraint = get_constraint('FOREIGN KEY', 'forceforeign', ['cd'], 'cd', ['cdid']);
+ok( !defined($constraint), 'forced nonexistent FOREIGN KEY constraint not found' );
 
 for my $expected_constraints (keys %fk_constraints) {
   for my $expected_constraint (@{ $fk_constraints{$expected_constraints} }) {
@@ -244,6 +267,13 @@
   }
 }
 
+for my $table_index (keys %indexes) {
+  for my $expected_index ( @{ $indexes{$table_index} } ) {
+
+    ok ( get_index($table_index, $expected_index), "Got a matching index on $table_index table");
+  }
+}
+
 # Returns the Constraint object for the specified constraint type, table and
 # columns from the SQL::Translator schema, or undef if no matching constraint
 # is found.
@@ -293,6 +323,34 @@
   return undef; # didn't find a matching constraint
 }
 
+sub get_index {
+  my ($table_name, $index) = @_;
+
+  my $table = $tschema->get_table($table_name);
+
+ CAND_INDEX:
+  for my $cand_index ( $table->get_indices ) {
+   
+    next CAND_INDEX if $index->{name} && $cand_index->name ne $index->{name}
+                    || $index->{type} && $cand_index->type ne $index->{type};
+
+    my %idx_fields = map { $_ => 1 } $cand_index->fields;
+
+    for my $field ( @{ $index->{fields} } ) {
+      next CAND_INDEX unless $idx_fields{$field};
+    }
+
+    %idx_fields = map { $_ => 1 } @{$index->{fields}};
+    for my $field ( $cand_index->fields) {
+      next CAND_INDEX unless $idx_fields{$field};
+    }
+
+    return $cand_index;
+  }
+
+  return undef; # No matching idx
+}
+
 # Test parameters in a FOREIGN KEY constraint other than columns
 sub test_fk {
   my ($expected, $got) = @_;

Modified: DBIx-Class/0.09/trunk/t/89dbicadmin.t
===================================================================
--- DBIx-Class/0.09/trunk/t/89dbicadmin.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/89dbicadmin.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -8,8 +8,8 @@
 
 my $schema = DBICTest->init_schema();
 
-eval 'require JSON';
-plan skip_all => 'Install JSON to run this test' if ($@);
+eval 'require JSON::Any';
+plan skip_all => 'Install JSON::Any to run this test' if ($@);
 
 eval 'require Text::CSV_XS';
 if ($@) {
@@ -19,26 +19,31 @@
 
 plan tests => 5;
 
-# double quotes round the arguments and single-quote within to make sure the
-# tests run on windows as well
+# the script supports double quotes round the arguments and single-quote within
+# to make sure it runs on windows as well, but only if JSON::Any picks the right module
 
+
+
 my $employees = $schema->resultset('Employee');
-my $cmd = qq|$^X script/dbicadmin --schema=DBICTest::Schema --class=Employee --tlibs --connect="['dbi:SQLite:dbname=t/var/DBIxClass.db','','',{AutoCommit:1}]" --force --tlibs|;
+my @cmd = ($^X, qw|script/dbicadmin --quiet --schema=DBICTest::Schema --class=Employee --tlibs|, q|--connect=["dbi:SQLite:dbname=t/var/DBIxClass.db","","",{"AutoCommit":1}]|, qw|--force --tlibs|);
 
-`$cmd --op=insert --set="{name:'Matt'}"`;
+system(@cmd, qw|--op=insert --set={"name":"Matt"}|);
 ok( ($employees->count()==1), 'insert count' );
 
 my $employee = $employees->find(1);
 ok( ($employee->name() eq 'Matt'), 'insert valid' );
 
-`$cmd --op=update --set="{name:'Trout'}"`;
+system(@cmd, qw|--op=update --set={"name":"Trout"}|);
 $employee = $employees->find(1);
 ok( ($employee->name() eq 'Trout'), 'update' );
 
-`$cmd --op=insert --set="{name:'Aran'}"`;
-my $data = `$cmd --op=select --attrs="{order_by:'name'}"`;
+system(@cmd, qw|--op=insert --set={"name":"Aran"}|);
+
+open(my $fh, "-|", @cmd, qw|--op=select --attrs={"order_by":"name"}|) or die $!;
+my $data = do { local $/; <$fh> };
+close($fh);
 ok( ($data=~/Aran.*Trout/s), 'select with attrs' );
 
-`$cmd --op=delete --where="{name:'Trout'}"`;
+system(@cmd, qw|--op=delete --where={"name":"Trout"}|);
 ok( ($employees->count()==1), 'delete' );
 

Modified: DBIx-Class/0.09/trunk/t/89inflate_datetime.t
===================================================================
--- DBIx-Class/0.09/trunk/t/89inflate_datetime.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/89inflate_datetime.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -10,7 +10,7 @@
 eval { require DateTime::Format::MySQL };
 plan skip_all => "Need DateTime::Format::MySQL for inflation tests" if $@;
 
-plan tests => 8;
+plan tests => 17;
 
 # inflation test
 my $event = $schema->resultset("Event")->find(1);
@@ -42,3 +42,31 @@
 
 isa_ok($created->created_on, 'DateTime', 'DateTime returned');
 is("$created_cron", '2006-06-23T00:00:00', 'Correct date/time');
+
+
+# Test "timezone" parameter
+my $event_tz = $schema->resultset('EventTZ')->create({
+    starts_at => DateTime->new(year=>2007, month=>12, day=>31, time_zone => "America/Chicago" ),
+    created_on => DateTime->new(year=>2006, month=>1, day=>31,
+        hour => 13, minute => 34, second => 56, time_zone => "America/New_York" ),
+});
+
+my $starts_at = $event_tz->starts_at;
+is("$starts_at", '2007-12-31T00:00:00', 'Correct date/time using timezone');
+
+my $created_on = $event_tz->created_on;
+is("$created_on", '2006-01-31T12:34:56', 'Correct timestamp using timezone');
+is($event_tz->created_on->time_zone->name, "America/Chicago", "Correct timezone");
+
+my $loaded_event = $schema->resultset('EventTZ')->find( $event_tz->id );
+
+isa_ok($loaded_event->starts_at, 'DateTime', 'DateTime returned');
+$starts_at = $loaded_event->starts_at;
+is("$starts_at", '2007-12-31T00:00:00', 'Loaded correct date/time using timezone');
+is($starts_at->time_zone->name, 'America/Chicago', 'Correct timezone');
+
+isa_ok($loaded_event->created_on, 'DateTime', 'DateTime returned');
+$created_on = $loaded_event->created_on;
+is("$created_on", '2006-01-31T12:34:56', 'Loaded correct timestamp using timezone');
+is($created_on->time_zone->name, 'America/Chicago', 'Correct timezone');
+

Modified: DBIx-Class/0.09/trunk/t/90join_torture.t
===================================================================
--- DBIx-Class/0.09/trunk/t/90join_torture.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/90join_torture.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -6,7 +6,7 @@
 use DBICTest;
 my $schema = DBICTest->init_schema();
 
-plan tests => 20;
+plan tests => 22;
 
  {
    my $rs = $schema->resultset( 'CD' )->search(
@@ -119,4 +119,10 @@
 
 ok(!$@, "pathological prefetch ok");
 
+my $rs = $schema->resultset("Artist")->search({}, { join => 'twokeys' });
+my $second_search_rs = $rs->search({ 'cds_2.cdid' => '2' }, { join =>
+['cds', 'cds'] });
+is(scalar(@{$second_search_rs->{attrs}->{join}}), 3, 'both joins kept');
+ok($second_search_rs->next, 'query on double joined rel runs okay');
+
 1;

Modified: DBIx-Class/0.09/trunk/t/91merge_attr.t
===================================================================
--- DBIx-Class/0.09/trunk/t/91merge_attr.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/91merge_attr.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -6,7 +6,7 @@
 use DBICTest;
 use Test::More;
 
-plan tests => 14;
+plan tests => 15;
 
 my $schema = DBICTest->init_schema();
 my $rs = $schema->resultset( 'CD' );
@@ -52,6 +52,14 @@
 }
 
 {
+  my $a = [ 'twokeys' ];
+  my $b = [ 'cds', 'cds' ];
+  my $expected = [ 'twokeys', 'cds', 'cds' ];
+  my $result = $rs->_merge_attr($a, $b);
+  is_deeply( $result, $expected );
+}
+
+{
   my $a = [ 'artist', 'cd', { 'artist' => 'manager' } ];
   my $b = 'artist';
   my $expected = [ 'artist', 'cd', { 'artist' => 'manager' } ];

Modified: DBIx-Class/0.09/trunk/t/96multi_create.t
===================================================================
--- DBIx-Class/0.09/trunk/t/96multi_create.t	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/96multi_create.t	2008-02-07 11:29:23 UTC (rev 4039)
@@ -51,7 +51,24 @@
 
 is($newartist2->name, 'Fred 3', 'Created new artist with cds via find_or_create');
 
+my $artist2 = $schema->resultset('Artist')->create({ artistid => 1000,
+                                                    name => 'Fred 3',
+                                                     cds => [
+                                                             { artist => 1000,
+                                                               title => 'Music to code by',
+                                                               year => 2007,
+                                                             },
+                                                             ],
+                                                    cds_unordered => [
+                                                             { artist => 1000,
+                                                               title => 'Music to code by',
+                                                               year => 2007,
+                                                             },
+                                                             ]
+                                                     });
 
+is($artist2->in_storage, 1, 'artist with duplicate rels inserted okay');
+
 CREATE_RELATED1 :{
 
 	my $artist = $schema->resultset('Artist')->first;

Modified: DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/Artist.pm
===================================================================
--- DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/Artist.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/Artist.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -31,6 +31,9 @@
     cds => 'DBICTest::Schema::CD', undef,
     { order_by => 'year' },
 );
+__PACKAGE__->has_many(
+    cds_unordered => 'DBICTest::Schema::CD'
+);
 
 __PACKAGE__->has_many( twokeys => 'DBICTest::Schema::TwoKeys' );
 __PACKAGE__->has_many( onekeys => 'DBICTest::Schema::OneKey' );
@@ -41,4 +44,14 @@
   { cascade_copy => 0 } # this would *so* not make sense
 );
 
+sub sqlt_deploy_hook {
+  my ($self, $sqlt_table) = @_;
+
+
+  if ($sqlt_table->schema->translator->producer_type =~ /SQLite$/ ) {
+    $sqlt_table->add_index( name => 'artist_name', fields => ['name'] )
+      or die $sqlt_table->error;
+  }
+}
+
 1;

Copied: DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/EventTZ.pm (from rev 3608, DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/Event.pm)
===================================================================
--- DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/EventTZ.pm	                        (rev 0)
+++ DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/EventTZ.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -0,0 +1,19 @@
+package DBICTest::Schema::EventTZ;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Core/;
+
+__PACKAGE__->load_components(qw/InflateColumn::DateTime/);
+
+__PACKAGE__->table('event');
+
+__PACKAGE__->add_columns(
+  id => { data_type => 'integer', is_auto_increment => 1 },
+  starts_at => { data_type => 'datetime', extra => { timezone => "America/Chicago" } },
+  created_on => { data_type => 'timestamp', extra => { timezone => "America/Chicago" } },
+);
+
+__PACKAGE__->set_primary_key('id');
+
+1;

Added: DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/ForceForeign.pm
===================================================================
--- DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/ForceForeign.pm	                        (rev 0)
+++ DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema/ForceForeign.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -0,0 +1,32 @@
+package # hide from PAUSE
+    DBICTest::Schema::ForceForeign;
+
+use base 'DBIx::Class::Core';
+
+__PACKAGE__->table('forceforeign');
+__PACKAGE__->add_columns(
+  'artist' => { data_type => 'integer' },
+  'cd' => { data_type => 'integer' },
+);
+__PACKAGE__->set_primary_key(qw/artist/);
+
+# Normally this would not appear as a FK constraint
+# since it uses the PK
+__PACKAGE__->might_have(
+			'artist_1', 'DBICTest::Schema::Artist', {
+			    'foreign.artist_id' => 'self.artist',
+			}, {
+			    is_foreign_key_constraint => 1,
+			},
+);
+
+# Normally this would appear as a FK constraint
+__PACKAGE__->might_have(
+			'cd_1', 'DBICTest::Schema::CD', {
+			    'foreign.cdid' => 'self.cd',
+			}, {
+			    is_foreign_key_constraint => 0,
+			},
+);
+
+1;

Modified: DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema.pm
===================================================================
--- DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema.pm	2008-02-05 23:31:30 UTC (rev 4038)
+++ DBIx-Class/0.09/trunk/t/lib/DBICTest/Schema.pm	2008-02-07 11:29:23 UTC (rev 4039)
@@ -34,9 +34,16 @@
     'Producer',
     'CD_to_Producer',
   ),
-  qw/SelfRefAlias TreeLike TwoKeyTreeLike Event NoPrimaryKey/,
+  qw/SelfRefAlias TreeLike TwoKeyTreeLike Event EventTZ NoPrimaryKey/,
   qw/Collection CollectionObject TypedObject/,
-  qw/Owners BooksInLibrary/
+  qw/Owners BooksInLibrary/,
+  qw/ForceForeign/  
 );
 
+sub sqlt_deploy_hook {
+  my ($self, $sqlt_schema) = @_;
+
+  $sqlt_schema->drop_table('link');
+}
+
 1;




More information about the Bast-commits mailing list