[Bast-commits] r9557 - in DBIx-Class/0.08/branches/extended_rels: . lib/DBIx lib/DBIx/Class lib/DBIx/Class/CDBICompat lib/DBIx/Class/InflateColumn lib/DBIx/Class/Manual lib/DBIx/Class/Optional lib/DBIx/Class/Relationship lib/DBIx/Class/ResultSourceProxy lib/DBIx/Class/SQLAHacks lib/DBIx/Class/Schema lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI lib/DBIx/Class/Storage/DBI/ODBC lib/DBIx/Class/Storage/DBI/Oracle lib/DBIx/Class/Storage/DBI/Replicated lib/DBIx/Class/Storage/DBI/Sybase lib/DBIx/Class/Storage/DBI/Sybase/ASE lib/SQL/Translator/Parser/DBIx maint script t t/admin t/bind t/cdbi t/count t/inflate t/lib t/lib/DBIC t/lib/DBICNSTest/Result t/lib/DBICNSTest/ResultSet t/lib/DBICTest t/lib/DBICTest/Schema t/lib/testinclude t/ordered t/prefetch t/resultset t/row t/search t/sqlahacks t/sqlahacks/limit_dialects t/sqlahacks/sql_maker t/storage t/var

ribasushi at dev.catalyst.perl.org ribasushi at dev.catalyst.perl.org
Wed Jun 2 17:41:37 GMT 2010


Author: ribasushi
Date: 2010-06-02 18:41:37 +0100 (Wed, 02 Jun 2010)
New Revision: 9557

Added:
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/FilterColumn.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/Oracle.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/UniqueIdentifier.pm
   DBIx-Class/0.08/branches/extended_rels/maint/benchmark_datafetch.pl
   DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_informix.t
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/Result/D.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/ResultSet/D.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/TimestampPrimaryKey.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/testinclude/
   DBIx-Class/0.08/branches/extended_rels/t/lib/testinclude/DBICTestAdminInc.pm
   DBIx-Class/0.08/branches/extended_rels/t/row/filter_column.t
   DBIx-Class/0.08/branches/extended_rels/t/row/inflate_result.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/generic_subq.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rno.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rownum.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oracle.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oraclejoin.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/order_by_func.t
   DBIx-Class/0.08/branches/extended_rels/t/storage/dbi_env.t
   DBIx-Class/0.08/branches/extended_rels/t/storage/deploy.t
   DBIx-Class/0.08/branches/extended_rels/t/storage/global_destruction.t
   DBIx-Class/0.08/branches/extended_rels/t/storage/txn.t
   DBIx-Class/0.08/branches/extended_rels/t/var/
Removed:
   DBIx-Class/0.08/branches/extended_rels/t/41orrible.t
   DBIx-Class/0.08/branches/extended_rels/t/81transactions.t
Modified:
   DBIx-Class/0.08/branches/extended_rels/
   DBIx-Class/0.08/branches/extended_rels/Changes
   DBIx-Class/0.08/branches/extended_rels/Makefile.PL
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Admin.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/CDBICompat/ColumnCase.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Componentised.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/DB.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Exception.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn/DateTime.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Cookbook.pod
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/FAQ.pod
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Intro.pod
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Optional/Dependencies.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/Base.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/BelongsTo.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasMany.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasOne.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSet.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSetColumn.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSource.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSourceProxy/Table.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Row.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/MySQL.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/OracleJoins.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/SQLite.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema/Versioned.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ADO.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/AutoCast.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Cursor.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Informix.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/InterBase.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/MSSQL.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Pg.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLite.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/mysql.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBIHacks.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/TxnScopeGuard.pm
   DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/UTF8Columns.pm
   DBIx-Class/0.08/branches/extended_rels/lib/SQL/Translator/Parser/DBIx/Class.pm
   DBIx-Class/0.08/branches/extended_rels/maint/joint_deps.pl
   DBIx-Class/0.08/branches/extended_rels/maint/svn-log.perl
   DBIx-Class/0.08/branches/extended_rels/script/dbicadmin
   DBIx-Class/0.08/branches/extended_rels/t/
   DBIx-Class/0.08/branches/extended_rels/t/03podcoverage.t
   DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_1.t
   DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_3.t
   DBIx-Class/0.08/branches/extended_rels/t/51threads.t
   DBIx-Class/0.08/branches/extended_rels/t/52cycle.t
   DBIx-Class/0.08/branches/extended_rels/t/60core.t
   DBIx-Class/0.08/branches/extended_rels/t/71mysql.t
   DBIx-Class/0.08/branches/extended_rels/t/72pg.t
   DBIx-Class/0.08/branches/extended_rels/t/73oracle.t
   DBIx-Class/0.08/branches/extended_rels/t/746mssql.t
   DBIx-Class/0.08/branches/extended_rels/t/746sybase.t
   DBIx-Class/0.08/branches/extended_rels/t/748informix.t
   DBIx-Class/0.08/branches/extended_rels/t/749sybase_asa.t
   DBIx-Class/0.08/branches/extended_rels/t/74mssql.t
   DBIx-Class/0.08/branches/extended_rels/t/750firebird.t
   DBIx-Class/0.08/branches/extended_rels/t/85utf8.t
   DBIx-Class/0.08/branches/extended_rels/t/90join_torture.t
   DBIx-Class/0.08/branches/extended_rels/t/93single_accessor_object.t
   DBIx-Class/0.08/branches/extended_rels/t/94versioning.t
   DBIx-Class/0.08/branches/extended_rels/t/admin/02ddl.t
   DBIx-Class/0.08/branches/extended_rels/t/admin/10script.t
   DBIx-Class/0.08/branches/extended_rels/t/bind/order_by.t
   DBIx-Class/0.08/branches/extended_rels/t/cdbi/02-Film.t
   DBIx-Class/0.08/branches/extended_rels/t/cdbi/columns_as_hashes.t
   DBIx-Class/0.08/branches/extended_rels/t/count/count_rs.t
   DBIx-Class/0.08/branches/extended_rels/t/count/prefetch.t
   DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_firebird.t
   DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_mssql.t
   DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase.t
   DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase_asa.t
   DBIx-Class/0.08/branches/extended_rels/t/inflate/hri.t
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBIC/DebugObj.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/AuthorCheck.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/BooksInLibrary.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/EventTZPg.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v1.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v2.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v3.pm
   DBIx-Class/0.08/branches/extended_rels/t/lib/sqlite.sql
   DBIx-Class/0.08/branches/extended_rels/t/ordered/cascade_delete.t
   DBIx-Class/0.08/branches/extended_rels/t/prefetch/grouped.t
   DBIx-Class/0.08/branches/extended_rels/t/prefetch/via_search_related.t
   DBIx-Class/0.08/branches/extended_rels/t/resultset/as_subselect_rs.t
   DBIx-Class/0.08/branches/extended_rels/t/resultset/update_delete.t
   DBIx-Class/0.08/branches/extended_rels/t/search/subquery.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/toplimit.t
   DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/sql_maker/sql_maker_quote.t
   DBIx-Class/0.08/branches/extended_rels/t/storage/dbh_do.t
Log:
 r9005 at Thesaurus (orig r8992):  caelum | 2010-03-13 00:47:40 +0100
 update Firebird docs
 r9006 at Thesaurus (orig r8993):  mo | 2010-03-13 10:03:24 +0100
 test the dynamic subclassing example
 r9008 at Thesaurus (orig r8995):  mo | 2010-03-13 13:09:59 +0100
 call inflate_result on new_result, but not from the CDBI compat layer
 r9009 at Thesaurus (orig r8996):  mo | 2010-03-13 13:37:40 +0100
 reverting 8995, was supposed to go to a branch
 r9010 at Thesaurus (orig r8997):  nigel | 2010-03-14 18:09:26 +0100
 Corrected a link to connect_info in Manual::Intro
 r9018 at Thesaurus (orig r9005):  rabbit | 2010-03-15 14:55:17 +0100
 Proper fix for RETURNING with default insert
 r9026 at Thesaurus (orig r9013):  nigel | 2010-03-15 18:36:44 +0100
 Documentation on Unicode use with DBIC
 r9027 at Thesaurus (orig r9014):  rabbit | 2010-03-16 02:55:27 +0100
 Horrible horrible rewrite of the aliastype scanner, but folks are starting to complain that their unqualified columns are making joins go away (this was the initial idea). Hopefully this code will silently die some day. /me can haz shame
 r9028 at Thesaurus (orig r9015):  rabbit | 2010-03-16 16:49:45 +0100
 Regenerate test DDL
 r9029 at Thesaurus (orig r9016):  caelum | 2010-03-16 22:01:21 +0100
 _ping for MSSQL
 r9030 at Thesaurus (orig r9017):  caelum | 2010-03-17 11:49:51 +0100
 add connect_call_use_foreign_keys for SQLite
 r9031 at Thesaurus (orig r9018):  abraxxa | 2010-03-17 16:36:13 +0100
 fixed Alexander Hartmaier's mail address
 
 r9039 at Thesaurus (orig r9026):  frew | 2010-03-18 15:59:55 +0100
 use update instead of set_columns in update_all
 r9040 at Thesaurus (orig r9027):  frew | 2010-03-18 20:53:28 +0100
 Ch Ch Ch Ch Changes!
 r9041 at Thesaurus (orig r9028):  caelum | 2010-03-19 16:03:41 +0100
 POD fixups
 r9042 at Thesaurus (orig r9029):  rabbit | 2010-03-19 18:39:02 +0100
 Fix UTF8Column out of order loading warning
 r9043 at Thesaurus (orig r9030):  rabbit | 2010-03-20 09:00:00 +0100
 Something is wrong with HRI inflation - too slow
 r9044 at Thesaurus (orig r9031):  rabbit | 2010-03-20 09:26:12 +0100
 Extend benchmark
 r9045 at Thesaurus (orig r9032):  rabbit | 2010-03-20 09:41:30 +0100
 MOAR bench
 r9048 at Thesaurus (orig r9035):  caelum | 2010-03-22 16:10:38 +0100
 redo Pg auto-columns using INSERT RETURNING
 r9049 at Thesaurus (orig r9036):  caelum | 2010-03-22 16:45:55 +0100
 move INSERT ... RETURNING code into ::DBI::InsertReturning component for Pg and Firebird
 r9050 at Thesaurus (orig r9037):  rabbit | 2010-03-22 18:03:13 +0100
 Even cleaner way of handling returning (no column interrogation in storage)
 r9051 at Thesaurus (orig r9038):  caelum | 2010-03-22 23:43:19 +0100
 update proxied methods for DBI::Replicated
 r9052 at Thesaurus (orig r9039):  caelum | 2010-03-23 06:56:12 +0100
 fix sort
 r9056 at Thesaurus (orig r9043):  rabbit | 2010-03-24 11:27:37 +0100
 A better illustration how to add relationships at runtime
 r9057 at Thesaurus (orig r9044):  rabbit | 2010-03-24 11:33:04 +0100
 Clearer 'no such rel' errors, correct exception on pkless prefetch
 r9058 at Thesaurus (orig r9045):  rabbit | 2010-03-24 11:44:50 +0100
 One missed step
 r9059 at Thesaurus (orig r9046):  ribasushi | 2010-03-24 12:11:12 +0100
 Straight_join support RT55579
 r9060 at Thesaurus (orig r9047):  rabbit | 2010-03-24 12:43:02 +0100
 bump SQLA dep
 r9061 at Thesaurus (orig r9048):  ribasushi | 2010-03-24 14:10:33 +0100
 Really fix INSERT RETURNING - simply make it a flag on the storage and keep the machinery in core
 r9062 at Thesaurus (orig r9049):  rabbit | 2010-03-24 14:30:17 +0100
 Cosmetics + changes
 r9063 at Thesaurus (orig r9050):  caelum | 2010-03-24 20:44:15 +0100
 Pg version check for can_insert_returning
 r9064 at Thesaurus (orig r9051):  caelum | 2010-03-24 21:25:24 +0100
 collect _server_info on connection
 r9065 at Thesaurus (orig r9052):  caelum | 2010-03-24 21:49:38 +0100
 s/_get_server_info/_populate_server_info/
 r9066 at Thesaurus (orig r9053):  caelum | 2010-03-25 01:24:09 +0100
 remove _get_mssql_version
 r9067 at Thesaurus (orig r9054):  caelum | 2010-03-25 06:32:51 +0100
 minor fix for SQLite version check
 r9068 at Thesaurus (orig r9055):  caelum | 2010-03-25 07:37:36 +0100
 add storage->_server_info->{dbms_ver_normalized}
 r9069 at Thesaurus (orig r9056):  caelum | 2010-03-26 09:55:46 +0100
 a couple minor Informix fixes
 r9070 at Thesaurus (orig r9057):  caelum | 2010-03-26 10:55:55 +0100
 savepoints for Informix
 r9071 at Thesaurus (orig r9058):  caelum | 2010-03-26 12:23:26 +0100
 InflateColumn::DateTime support for Informix
 r9072 at Thesaurus (orig r9059):  caelum | 2010-03-26 15:08:16 +0100
 with_deferred_fk_checks for Informix
 r9073 at Thesaurus (orig r9060):  caelum | 2010-03-26 15:28:24 +0100
 minor cleanups
 r9074 at Thesaurus (orig r9061):  castaway | 2010-03-26 21:16:44 +0100
 Added clarification of quoting to cookbook pod for sql funcs, from metaperl
 
 r9075 at Thesaurus (orig r9062):  caelum | 2010-03-27 00:12:37 +0100
 missing local
 r9076 at Thesaurus (orig r9063):  caelum | 2010-03-27 00:19:56 +0100
 move warning suppression into ::DBI::InterBase
 r9077 at Thesaurus (orig r9064):  caelum | 2010-03-27 00:30:02 +0100
 a bit cleaner warning suppression for DBD::InterBase only
 r9083 at Thesaurus (orig r9070):  rabbit | 2010-03-29 10:12:44 +0200
 pod error
 r9092 at Thesaurus (orig r9079):  boghead | 2010-04-02 22:44:32 +0200
 - Minor language cleanup in some of the Cookbook documentation
   (thanks metaperl and jester)
 - Fix the synopsis for DBIC::Storage::DBI.  ->datetime_parser returns a class,
   so you need to call a method on it in order to transform a DateTime object
 
 
 r9096 at Thesaurus (orig r9083):  ribasushi | 2010-04-05 21:53:13 +0200
 Minor test cleanups
 r9097 at Thesaurus (orig r9084):  caelum | 2010-04-05 22:08:48 +0200
 fix test count
 r9098 at Thesaurus (orig r9085):  ribasushi | 2010-04-06 05:36:04 +0200
 Fix embarassing join optimizer bug
 r9112 at Thesaurus (orig r9099):  caelum | 2010-04-07 02:13:38 +0200
 UUID support for SQL Anywhere
 r9114 at Thesaurus (orig r9101):  caelum | 2010-04-07 19:23:53 +0200
 clean up UUID stringification for SQL Anywhere
 r9115 at Thesaurus (orig r9102):  rabbit | 2010-04-08 11:36:35 +0200
 Fix utf8columns loading-order test/code (really just as POC at this point)
 r9116 at Thesaurus (orig r9103):  ribasushi | 2010-04-08 12:10:12 +0200
 Make the insert_returning capability private (and saner naming)
 r9117 at Thesaurus (orig r9104):  rabbit | 2010-04-08 12:36:06 +0200
 Refactor the version handling
 Clean up normalization wrt non-numeric version parts (i.e. mysql)
 r9118 at Thesaurus (orig r9105):  ribasushi | 2010-04-08 12:56:33 +0200
 Even safer version normalization
 r9119 at Thesaurus (orig r9106):  rabbit | 2010-04-08 13:16:19 +0200
 Changes
 r9121 at Thesaurus (orig r9108):  caelum | 2010-04-08 18:17:29 +0200
 syntax error
 r9122 at Thesaurus (orig r9109):  caelum | 2010-04-08 18:38:59 +0200
 use min dbms_version for ::Replicated
 r9123 at Thesaurus (orig r9110):  matthewt | 2010-04-08 19:19:58 +0200
 fix POD links
 r9126 at Thesaurus (orig r9113):  rabbit | 2010-04-09 13:29:38 +0200
 Test to show utf8columns being indeed broken (sqlite papers over it)
 r9127 at Thesaurus (orig r9114):  rabbit | 2010-04-09 14:16:23 +0200
 Use a sloppy but recommended fix for Test warnings
 r9128 at Thesaurus (orig r9115):  ribasushi | 2010-04-11 10:43:56 +0200
 RT 55865
 r9135 at Thesaurus (orig r9122):  frew | 2010-04-11 19:28:54 +0200
 bump SQLA dep
 r9136 at Thesaurus (orig r9123):  rabbit | 2010-04-11 19:32:20 +0200
 Warn about both UTF8Columns and ForceUTF8 when loaded improperly
 r9137 at Thesaurus (orig r9124):  rabbit | 2010-04-11 20:35:53 +0200
 Deprecate UTF8Columns with a lot of warning whistles
 r9138 at Thesaurus (orig r9125):  frew | 2010-04-11 20:51:23 +0200
 Release 0.08121
 r9139 at Thesaurus (orig r9126):  frew | 2010-04-11 20:54:43 +0200
 set version for dev users
 r9146 at Thesaurus (orig r9133):  caelum | 2010-04-12 20:23:11 +0200
 better way to find minimal dbms version in ::Replicated
 r9155 at Thesaurus (orig r9142):  rabbit | 2010-04-14 15:41:51 +0200
 Add forgotten changes
 r9156 at Thesaurus (orig r9143):  caelum | 2010-04-14 17:04:00 +0200
 support $ENV{DBI_DSN} and $ENV{DBI_DRIVER} (patch from Possum)
 r9157 at Thesaurus (orig r9144):  rabbit | 2010-04-14 17:50:58 +0200
 Fix exception message
 r9190 at Thesaurus (orig r9177):  caelum | 2010-04-15 01:41:26 +0200
 datetime millisecond precision for MSSQL
 r9200 at Thesaurus (orig r9187):  ribasushi | 2010-04-18 23:06:29 +0200
 Fix leftover tabs
 r9201 at Thesaurus (orig r9188):  castaway | 2010-04-20 08:06:26 +0200
 Warn if a class found in ResultSet/ is not a subclass of ::ResultSet
 
 r9203 at Thesaurus (orig r9190):  rbuels | 2010-04-20 21:12:22 +0200
 create_ddl_dir mkpaths its dir if necessary.  also, added storage/deploy.t as place to put deployment tests
 r9204 at Thesaurus (orig r9191):  rbuels | 2010-04-20 21:20:06 +0200
 do not croak, rbuels!  jeez.
 r9205 at Thesaurus (orig r9192):  castaway | 2010-04-21 08:03:08 +0200
 Added missing test file (oops)
 
 r9213 at Thesaurus (orig r9200):  rabbit | 2010-04-24 02:23:05 +0200
 10% speed up on quoted statement generation
 r9215 at Thesaurus (orig r9202):  rabbit | 2010-04-24 02:27:47 +0200
 Revert bogus commit
 r9216 at Thesaurus (orig r9203):  ribasushi | 2010-04-24 02:31:06 +0200
 _quote is now properly handled in SQLA
 r9217 at Thesaurus (orig r9204):  caelum | 2010-04-24 02:32:58 +0200
 add "IMPROVING PERFORMANCE" section to Cookbook
 r9231 at Thesaurus (orig r9218):  ribasushi | 2010-04-26 13:13:13 +0200
 Bump CAG and SQLA dependencies
 r9232 at Thesaurus (orig r9219):  ribasushi | 2010-04-26 15:27:38 +0200
 Bizarre fork failure
 r9233 at Thesaurus (orig r9220):  castaway | 2010-04-26 21:45:32 +0200
 Add tests using select/as to sqlahacks
 
 r9234 at Thesaurus (orig r9221):  castaway | 2010-04-26 21:49:10 +0200
 Add test for fetching related obj/col as well
 
 r9245 at Thesaurus (orig r9232):  abraxxa | 2010-04-27 15:58:56 +0200
 fixed missing ' in update_or_create with key attr example
 
 r9247 at Thesaurus (orig r9234):  ribasushi | 2010-04-27 16:53:06 +0200
 Better concurrency in test (parent blocks)
 r9248 at Thesaurus (orig r9235):  ribasushi | 2010-04-27 16:53:34 +0200
 Reformat tests/comments a bit
 r9249 at Thesaurus (orig r9236):  ribasushi | 2010-04-27 18:40:10 +0200
 Better comment
 r9250 at Thesaurus (orig r9237):  ribasushi | 2010-04-27 18:40:31 +0200
 Rename test
 r9251 at Thesaurus (orig r9238):  ribasushi | 2010-04-27 19:11:45 +0200
 Fix global destruction problems
 r9271 at Thesaurus (orig r9258):  ribasushi | 2010-04-28 11:10:00 +0200
 Refactor SQLA/select interaction (in reality just cleanup)
 r9272 at Thesaurus (orig r9259):  caelum | 2010-04-28 11:20:08 +0200
 update ::DBI::Replicated
 r9273 at Thesaurus (orig r9260):  caelum | 2010-04-28 12:20:01 +0200
 add _verify_pid and _verify_tid to methods that croak in ::Replicated
 r9274 at Thesaurus (orig r9261):  ribasushi | 2010-04-28 14:39:02 +0200
 Fix failing test and some warnings
 r9288 at Thesaurus (orig r9275):  rabbit | 2010-04-29 10:32:10 +0200
 Allow limit syntax change in-flight without digging into internals
 r9292 at Thesaurus (orig r9279):  castaway | 2010-04-30 12:26:52 +0200
 Argh.. committing missing test file for load_namespaces tests
 
 r9295 at Thesaurus (orig r9282):  rabbit | 2010-05-01 11:06:21 +0200
 The final version of the test
 r9309 at Thesaurus (orig r9296):  rabbit | 2010-05-04 09:44:51 +0200
 Test for RT#56257
 r9310 at Thesaurus (orig r9297):  rabbit | 2010-05-04 10:00:11 +0200
 Refactor count handling, make count-resultset attribute lists inclusive rather than exclusive (side effect - solves RT#56257
 r9318 at Thesaurus (orig r9305):  rabbit | 2010-05-05 11:49:51 +0200
  r9296 at Thesaurus (orig r9283):  ribasushi | 2010-05-01 11:51:15 +0200
  Branch to clean up various limit dialects
  r9297 at Thesaurus (orig r9284):  rabbit | 2010-05-01 11:55:04 +0200
  Preliminary version
  r9301 at Thesaurus (orig r9288):  rabbit | 2010-05-03 18:31:24 +0200
  Fix incorrect comparison
  r9302 at Thesaurus (orig r9289):  rabbit | 2010-05-03 18:32:36 +0200
  Do not add TOP prefixes to queries already containing it
  r9303 at Thesaurus (orig r9290):  rabbit | 2010-05-03 18:33:15 +0200
  Add an as selector to a prefetch subquery to aid the subselecting-limit analyzer
  r9304 at Thesaurus (orig r9291):  rabbit | 2010-05-03 18:34:49 +0200
  Rewrite mssql test to verify both types of limit dialects with and without quoting, rewrite the RNO, Top and RowNum dialects to rely on a factored out column re-aliaser
  r9305 at Thesaurus (orig r9292):  rabbit | 2010-05-03 21:06:01 +0200
  Fix Top tests, make extra col selector order consistent
  r9307 at Thesaurus (orig r9294):  ribasushi | 2010-05-04 00:50:35 +0200
  Fix test warning
  r9308 at Thesaurus (orig r9295):  ribasushi | 2010-05-04 01:04:32 +0200
  Some databases (db2) do not like leading __s - use a different weird identifier for extra selector names
  r9313 at Thesaurus (orig r9300):  rabbit | 2010-05-05 11:08:33 +0200
  Rename test
  r9314 at Thesaurus (orig r9301):  rabbit | 2010-05-05 11:11:32 +0200
  If there was no offset, there is no sense in reordering
  r9315 at Thesaurus (orig r9302):  rabbit | 2010-05-05 11:12:19 +0200
  Split and fix oracle tests
  r9317 at Thesaurus (orig r9304):  rabbit | 2010-05-05 11:49:33 +0200
  Changes
 
 r9321 at Thesaurus (orig r9308):  rabbit | 2010-05-05 13:01:35 +0200
 Changes
 r9322 at Thesaurus (orig r9309):  rabbit | 2010-05-05 13:02:39 +0200
 Fix obsucre bug with as_subselect_rs (gah wrong commit msg)
 r9323 at Thesaurus (orig r9310):  rabbit | 2010-05-05 14:56:38 +0200
 Forgotten pieces
 r9329 at Thesaurus (orig r9316):  rabbit | 2010-05-07 10:15:52 +0200
 Failure to determine dbms version is *not* a fatal error - trap exceptions
 r9330 at Thesaurus (orig r9317):  caelum | 2010-05-07 11:57:24 +0200
 detect row_number() over support in MSSQL if version detection fails
 r9331 at Thesaurus (orig r9318):  caelum | 2010-05-07 14:56:57 +0200
 minor change
 r9332 at Thesaurus (orig r9319):  nigel | 2010-05-07 15:03:00 +0200
 empty update OK even if row is not in database
 r9333 at Thesaurus (orig r9320):  nigel | 2010-05-07 15:28:06 +0200
 Added reference to cascade_* in relationship attributes
 r9334 at Thesaurus (orig r9321):  nigel | 2010-05-07 15:39:37 +0200
 empty update OK even if row is not in database (fixed)
 r9335 at Thesaurus (orig r9322):  nigel | 2010-05-07 15:48:19 +0200
 empty update OK even if row is not in database (fixed2)
 r9336 at Thesaurus (orig r9323):  nigel | 2010-05-07 15:54:36 +0200
 Clarification to cascade_update attribute documentation
 r9337 at Thesaurus (orig r9324):  nigel | 2010-05-07 16:08:17 +0200
 Clarification cascade_* attribute defaults documentation
 r9350 at Thesaurus (orig r9337):  rabbit | 2010-05-08 11:23:56 +0200
 Make sure missing author-deps do not kill makefile creation
 r9358 at Thesaurus (orig r9344):  rabbit | 2010-05-11 16:46:47 +0200
  r9147 at Thesaurus (orig r9134):  frew | 2010-04-13 16:54:24 +0200
  branch for FilterColumn
  r9148 at Thesaurus (orig r9135):  frew | 2010-04-13 18:09:57 +0200
  change names wrap accessors
  r9158 at Thesaurus (orig r9145):  frew | 2010-04-14 17:55:14 +0200
  basic tests and a tiny fix
  r9159 at Thesaurus (orig r9146):  frew | 2010-04-14 19:30:46 +0200
  working filter column impl
  r9160 at Thesaurus (orig r9147):  frew | 2010-04-14 19:31:18 +0200
  useless var
  r9161 at Thesaurus (orig r9148):  frew | 2010-04-14 20:10:57 +0200
  MultiCreate test
  r9163 at Thesaurus (orig r9150):  frew | 2010-04-14 20:22:10 +0200
  test db in MC
  r9178 at Thesaurus (orig r9165):  rabbit | 2010-04-14 23:35:00 +0200
  Not sure how this was never noticed, but it definitely doesn't seem right and all tests pass...
  r9191 at Thesaurus (orig r9178):  frew | 2010-04-15 06:34:16 +0200
  better namiology
  r9193 at Thesaurus (orig r9180):  frew | 2010-04-15 16:14:28 +0200
  method and arg rename
  r9194 at Thesaurus (orig r9181):  frew | 2010-04-15 16:35:25 +0200
  use result source for filtering instead of result
  r9195 at Thesaurus (orig r9182):  frew | 2010-04-15 17:04:38 +0200
  initial stab at incomplete docs
  r9278 at Thesaurus (orig r9265):  frew | 2010-04-28 22:05:36 +0200
  doc, removal of source stuff, and Changes
  r9324 at Thesaurus (orig r9311):  frew | 2010-05-06 01:49:25 +0200
  test caching
  r9327 at Thesaurus (orig r9314):  rabbit | 2010-05-06 16:30:36 +0200
  Play nicer with lower-level methods
  r9328 at Thesaurus (orig r9315):  frew | 2010-05-07 04:27:18 +0200
  no filter and inflate column
  r9352 at Thesaurus (orig r9339):  rabbit | 2010-05-10 13:40:00 +0200
  Maintain full coherence between filtered cache and unfiltered results, including store_column
  r9353 at Thesaurus (orig r9340):  rabbit | 2010-05-10 13:40:48 +0200
  Fix typo
  r9357 at Thesaurus (orig r9343):  rabbit | 2010-05-11 16:45:50 +0200
  Comment weird looking code
 
 r9360 at Thesaurus (orig r9346):  caelum | 2010-05-11 17:44:15 +0200
 clearer logic
 r9364 at Thesaurus (orig r9350):  wreis | 2010-05-12 03:44:39 +0200
 add failing test for order_by using a function
 r9378 at Thesaurus (orig r9364):  rabbit | 2010-05-14 11:57:45 +0200
 cleanup test by wreis
 r9396 at Thesaurus (orig r9382):  rabbit | 2010-05-15 17:50:58 +0200
 Fix stupid typo-bug
 r9397 at Thesaurus (orig r9383):  rabbit | 2010-05-15 18:04:59 +0200
 Revert erroneous commit (belongs in a branch)
 r9402 at Thesaurus (orig r9388):  ash | 2010-05-16 12:28:13 +0200
 Fix how Schema::Versioned gets connection attributes
 r9408 at Thesaurus (orig r9394):  caelum | 2010-05-16 19:29:14 +0200
 add sql_maker to @rdbms_specific_methods
 r9420 at Thesaurus (orig r9406):  caelum | 2010-05-20 16:28:18 +0200
 support INSERT OR UPDATE triggers for Oracle
 r9421 at Thesaurus (orig r9407):  matthewt | 2010-05-20 19:19:14 +0200
 don't try and ensure_class_loaded an object. this doesn't work.
 r9422 at Thesaurus (orig r9408):  matthewt | 2010-05-20 19:36:01 +0200
 fix result_class setter behaviour to not stuff attrs (line commented out to prevent this regression being mistakenly re-introduced)
 r9423 at Thesaurus (orig r9409):  matthewt | 2010-05-20 19:49:32 +0200
 forgot to commit fixes
 r9424 at Thesaurus (orig r9410):  matthewt | 2010-05-20 20:09:52 +0200
 fix find() since that was also broken in r8754
 r9435 at Thesaurus (orig r9421):  rabbit | 2010-05-25 11:14:29 +0200
 Fix undef warning
 r9436 at Thesaurus (orig r9422):  rabbit | 2010-05-25 11:15:01 +0200
 Rewrite test as to not propagate several ways to do the same thing
 r9452 at Thesaurus (orig r9438):  caelum | 2010-05-25 21:33:37 +0200
  r24317 at hlagh (orig r9367):  tonvoon | 2010-05-14 12:24:35 -0400
  Branch for converting eval {} to Try::Tiny
  
  r24319 at hlagh (orig r9369):  tonvoon | 2010-05-14 17:25:02 -0400
  Conversion of eval => try (part 1)
  
  r24325 at hlagh (orig r9375):  tonvoon | 2010-05-14 18:03:03 -0400
  Add eval => try
  
  r24326 at hlagh (orig r9376):  tonvoon | 2010-05-14 18:22:57 -0400
  Another eval => try
  
  r24327 at hlagh (orig r9377):  tonvoon | 2010-05-14 18:45:27 -0400
  Corrected usage of $@ in catch block
  
  r24328 at hlagh (orig r9378):  tonvoon | 2010-05-14 19:29:52 -0400
  txn_do's eval => try
  
  r24329 at hlagh (orig r9379):  tonvoon | 2010-05-14 19:46:44 -0400
  eval => try where tests for $@ done
  
  r24330 at hlagh (orig r9380):  tonvoon | 2010-05-14 20:38:43 -0400
  All expected evals converted to try, except where no test is done,
  runtime evaluation, or base perl (such as "require"). Only one test
  failure due to string difference in output
  
  r24346 at hlagh (orig r9396):  tonvoon | 2010-05-17 08:52:28 -0400
  Fix missing $@ in try::tiny conversion
  
  r24347 at hlagh (orig r9397):  tonvoon | 2010-05-17 08:55:13 -0400
  Revert to eval instead of try::tiny because no check for $@
  
  r24348 at hlagh (orig r9398):  tonvoon | 2010-05-17 08:55:45 -0400
  Added myself to contributors
  
  r24349 at hlagh (orig r9399):  tonvoon | 2010-05-17 10:23:57 -0400
  Fixed exception logic due to not being able to use return with a catch{}
  
  r24350 at hlagh (orig r9400):  tonvoon | 2010-05-17 10:31:32 -0400
  Removed tab
  
  r24430 at hlagh (orig r9424):  ribasushi | 2010-05-25 10:09:39 -0400
  More try::tiny conversions
  r24432 at hlagh (orig r9426):  ribasushi | 2010-05-25 11:40:45 -0400
  Try::Tiny conversion finished
  r24433 at hlagh (orig r9427):  ribasushi | 2010-05-25 11:46:52 -0400
  Missed use
  r24440 at hlagh (orig r9434):  rkitover | 2010-05-25 13:47:25 -0400
  fix Oracle
  r24441 at hlagh (orig r9435):  rkitover | 2010-05-25 14:04:10 -0400
  fix odbc/mssql dynamic cursors
  r24442 at hlagh (orig r9436):  rkitover | 2010-05-25 14:32:41 -0400
  fix hang in SQLAnywhere DateTime tests
 
 r9454 at Thesaurus (orig r9440):  rabbit | 2010-05-26 11:28:37 +0200
 Simplify oracle retrial logic
 r9455 at Thesaurus (orig r9441):  rabbit | 2010-05-26 12:00:20 +0200
 Can not return from within a try block
 r9456 at Thesaurus (orig r9442):  rabbit | 2010-05-26 12:17:55 +0200
 Really fix logic
 r9464 at Thesaurus (orig r9450):  jester | 2010-05-27 16:06:43 +0200
 Light doc tweaks
 
 r9475 at Thesaurus (orig r9461):  ribasushi | 2010-05-31 00:17:29 +0200
 Rewrite GenericSubQ from SQLA::L to be actually useful
 Since it now works it is no longer necessary to turn on softlimit when genericsubq is detected
 Switch all sprintf()ed limit/offset specs to unsigned integers
 Lower the default rows-without-offset to 2^32
 r9476 at Thesaurus (orig r9462):  rabbit | 2010-05-31 00:25:01 +0200
 New format of changelog (easier to read)
 r9477 at Thesaurus (orig r9463):  rabbit | 2010-05-31 00:27:18 +0200
 Fix MC double-object creation (important for e.g. IC::FS which otherwise leaves orphaned files)
 r9479 at Thesaurus (orig r9465):  rabbit | 2010-05-31 00:37:23 +0200
 Fix tests to survive the new SQLA bindtype checks
 r9483 at Thesaurus (orig r9469):  rabbit | 2010-05-31 13:21:04 +0200
 Skip tests segfaulting with ancient DBD::Sybase versions
 r9488 at Thesaurus (orig r9474):  frew | 2010-05-31 17:11:51 +0200
 use namespace::clean w/ Try::Tiny
 r9489 at Thesaurus (orig r9475):  rabbit | 2010-05-31 17:13:29 +0200
 Fix Top-limit problem of missed bindvars
 r9490 at Thesaurus (orig r9476):  rabbit | 2010-05-31 17:21:20 +0200
 Skip failing tests on old DBD
 r9491 at Thesaurus (orig r9477):  frew | 2010-05-31 17:23:49 +0200
 add namespace::clean as regular dep
 r9501 at Thesaurus (orig r9487):  rabbit | 2010-05-31 19:45:27 +0200
 Fix RT57467, simplify test
 r9503 at Thesaurus (orig r9489):  rabbit | 2010-05-31 23:52:17 +0200
 Fix Schema::Versioned borkage
 r9506 at Thesaurus (orig r9492):  rabbit | 2010-06-01 00:08:45 +0200
  r9306 at Thesaurus (orig r9293):  edenc | 2010-05-03 21:20:21 +0200
  braching for bug fixes (rt 54939)
  r9339 at Thesaurus (orig r9326):  edenc | 2010-05-07 18:15:47 +0200
  added failing test case for non-versioned schema deploy attempt
  r9340 at Thesaurus (orig r9327):  edenc | 2010-05-07 18:16:03 +0200
  dbicadmin can now install non-versioned schemas
  r9342 at Thesaurus (orig r9329):  rabbit | 2010-05-07 18:28:27 +0200
  Trap erroneous warnings
  r9345 at Thesaurus (orig r9332):  edenc | 2010-05-08 00:02:00 +0200
  test for the dbicadmin -I option
  r9346 at Thesaurus (orig r9333):  edenc | 2010-05-08 00:02:25 +0200
  fixes to dbicadmin -I test
  r9347 at Thesaurus (orig r9334):  edenc | 2010-05-08 00:02:41 +0200
  -I option functional and passing tests
  r9348 at Thesaurus (orig r9335):  edenc | 2010-05-08 01:39:52 +0200
  moved mock schema out of t/var
  r9375 at Thesaurus (orig r9361):  edenc | 2010-05-14 04:02:41 +0200
  added debug option
  r9376 at Thesaurus (orig r9362):  edenc | 2010-05-14 04:03:00 +0200
  debug and include_dirs integration between dbicadmin and DBIx::Class::Admin
  r9377 at Thesaurus (orig r9363):  edenc | 2010-05-14 04:03:21 +0200
  testing dbicadmin/DBIx::Class::Admin integration
  r9494 at Thesaurus (orig r9480):  rabbit | 2010-05-31 18:03:08 +0200
  Simplify includedir testing
  r9496 at Thesaurus (orig r9482):  rabbit | 2010-05-31 18:47:35 +0200
  Some comments
  r9497 at Thesaurus (orig r9483):  rabbit | 2010-05-31 18:50:50 +0200
  Properly ignore contents of var
  r9498 at Thesaurus (orig r9484):  rabbit | 2010-05-31 18:59:49 +0200
  Remove leftovers
  r9499 at Thesaurus (orig r9485):  rabbit | 2010-05-31 19:24:55 +0200
  Cleanup debug output
  r9500 at Thesaurus (orig r9486):  rabbit | 2010-05-31 19:35:31 +0200
  Fix RT#57732
  r9502 at Thesaurus (orig r9488):  rabbit | 2010-05-31 19:48:41 +0200
  typos
  r9505 at Thesaurus (orig r9491):  rabbit | 2010-06-01 00:08:29 +0200
  Changes
 
 r9514 at Thesaurus (orig r9500):  rabbit | 2010-06-01 00:25:35 +0200
  r9365 at Thesaurus (orig r9351):  ribasushi | 2010-05-12 10:09:54 +0200
  New branch to cleanup resultset-wide update/delete
  r9419 at Thesaurus (orig r9405):  wreis | 2010-05-19 02:49:47 +0200
  failing tests for RS->update
  r9511 at Thesaurus (orig r9497):  rabbit | 2010-06-01 00:20:39 +0200
  Fix update/delete on prefetching resultsets
  r9512 at Thesaurus (orig r9498):  rabbit | 2010-06-01 00:24:54 +0200
  Test cleanup
  r9513 at Thesaurus (orig r9499):  rabbit | 2010-06-01 00:25:14 +0200
  test replication test fail
 
 r9520 at Thesaurus (orig r9506):  frew | 2010-06-01 03:35:36 +0200
 do not lazily set up include dirs and do not localize
 r9523 at Thesaurus (orig r9509):  rabbit | 2010-06-01 13:36:42 +0200
 Versioned compat code connects too early
 r9524 at Thesaurus (orig r9510):  rabbit | 2010-06-01 15:04:49 +0200
 Move order outside of the GenSubQ subquery, to appease retarded Sybase
 r9525 at Thesaurus (orig r9511):  caelum | 2010-06-01 15:08:08 +0200
 fix uninitialized warning
 r9527 at Thesaurus (orig r9513):  rabbit | 2010-06-01 15:46:41 +0200
 Try::Tiny is a part of the clan
 r9529 at Thesaurus (orig r9515):  rabbit | 2010-06-01 16:02:42 +0200
  r7935 at Thesaurus (orig r7923):  ribasushi | 2009-11-19 11:05:04 +0100
  Branches for RTs
  r7936 at Thesaurus (orig r7924):  ribasushi | 2009-11-19 11:10:18 +0100
  Patch by kalex
  r8152 at Thesaurus (orig r8140):  rbo | 2009-12-18 12:51:16 +0100
  Add PRIOR as special and unary op to SQLAHacks::Oracle and use _recurse_where to create the connect_by sql statment
  
  r8968 at Thesaurus (orig r8955):  rabbit | 2010-03-10 10:24:50 +0100
  Merge fallout
  r9004 at Thesaurus (orig r8991):  rbo | 2010-03-12 09:00:54 +0100
  Use SQL::Abstract 1.61_01, add some tests.
  
  r9016 at Thesaurus (orig r9003):  rbo | 2010-03-15 12:48:03 +0100
  Fixed 73oracle.t because of new implementation of connect_by
  
  r9019 at Thesaurus (orig r9006):  rbo | 2010-03-15 15:06:10 +0100
  fixed requirements
  
  r9020 at Thesaurus (orig r9007):  ribasushi | 2010-03-15 15:49:26 +0100
  Rewrite hierarchical query tests
  r9021 at Thesaurus (orig r9008):  ribasushi | 2010-03-15 16:02:54 +0100
  Whoops
  r9022 at Thesaurus (orig r9009):  ribasushi | 2010-03-15 16:03:50 +0100
  Now really fixed
  r9023 at Thesaurus (orig r9010):  rbo | 2010-03-15 16:52:24 +0100
  fixed sql mistakes from hierarchical query tests
  
  r9024 at Thesaurus (orig r9011):  rbo | 2010-03-15 18:24:47 +0100
  Update _order_siblings_by, use _order_by_chunks instead of own stuff
  
  r9025 at Thesaurus (orig r9012):  rbo | 2010-03-15 18:25:56 +0100
  fixed hierarchical query tests, not all pass yet
  
  r9078 at Thesaurus (orig r9065):  rbo | 2010-03-27 10:50:33 +0100
  Fixed typo
  
  r9079 at Thesaurus (orig r9066):  rbo | 2010-03-27 11:50:38 +0100
  Update POD, because of new connect_by implemenation
  
  r9080 at Thesaurus (orig r9067):  rbo | 2010-03-27 12:03:23 +0100
  Support NOCYCLE parameter.
  
  
  r9082 at Thesaurus (orig r9069):  rbo | 2010-03-27 19:10:26 +0100
  Test with the correct arrangement of the tree.
  (oracle hierarchical queries)
  
  
  r9094 at Thesaurus (orig r9081):  rbo | 2010-04-04 12:56:37 +0200
  Add missing nocycle test for hierarchical queries with oracle
  
  r9095 at Thesaurus (orig r9082):  rbo | 2010-04-04 12:57:45 +0200
  Remove todo: 'Check the parameter syntax of connect_by' fixed with new SQLA release
  
  r9129 at Thesaurus (orig r9116):  ribasushi | 2010-04-11 12:53:20 +0200
  Fix rogue tabs
  r9130 at Thesaurus (orig r9117):  ribasushi | 2010-04-11 14:52:07 +0200
  Fix top-level PRIOR with missing '='
  Fix weird AND hardcoded in tests
  Test quotes as well
  r9131 at Thesaurus (orig r9118):  ribasushi | 2010-04-11 14:54:59 +0200
  Move oracle offline test where it belongs
  r9144 at Thesaurus (orig r9131):  rabbit | 2010-04-12 09:06:28 +0200
  Botched merge
  r9145 at Thesaurus (orig r9132):  rabbit | 2010-04-12 09:10:39 +0200
  Pesky tabs
  r9162 at Thesaurus (orig r9149):  rbo | 2010-04-14 20:19:20 +0200
  Fixed t/73oracle.t remove braces
  
  r9312 at Thesaurus (orig r9299):  rabbit | 2010-05-04 10:13:47 +0200
  _select_args processing no longer necessary
  r9382 at Thesaurus (orig r9368):  rbo | 2010-05-14 23:01:42 +0200
  Update TODO
  
  r9384 at Thesaurus (orig r9370):  rbo | 2010-05-14 23:27:13 +0200
  Fixed broken count query, because of new count query implementation.
  
  r9385 at Thesaurus (orig r9371):  rbo | 2010-05-14 23:35:33 +0200
  Rename option nocycle to connect_by_nocycle
  
  r9386 at Thesaurus (orig r9372):  rbo | 2010-05-14 23:43:29 +0200
  Fixed count subquery from disabled test case 
  
  r9387 at Thesaurus (orig r9373):  rbo | 2010-05-14 23:55:28 +0200
  Enabled count sub query test, isn't broken anymore
  
  r9388 at Thesaurus (orig r9374):  rbo | 2010-05-15 00:02:41 +0200
  Use connect_by OR connect_by_nocycle
  
  r9395 at Thesaurus (orig r9381):  rabbit | 2010-05-15 17:01:10 +0200
  Use DDC instead of DD
  r9463 at Thesaurus (orig r9449):  rbo | 2010-05-27 10:17:35 +0200
  Fixed group_by bind position problem, reported and patch by Alexande Keusch. Patch has been adjusted.
  
  r9517 at Thesaurus (orig r9503):  rabbit | 2010-06-01 01:54:18 +0200
  Switch away from explicit count-attr lists - just delete what we do not need
  r9518 at Thesaurus (orig r9504):  rabbit | 2010-06-01 02:26:49 +0200
  Do not pollute sqlmaker while scanning raw sql
  r9519 at Thesaurus (orig r9505):  rabbit | 2010-06-01 02:28:06 +0200
  Minor touches/changes
  r9521 at Thesaurus (orig r9507):  rbo | 2010-06-01 07:13:55 +0200
  Fixed typo in Changes file. Adding myself to the CONTRIBUTORS list
  
  r9528 at Thesaurus (orig r9514):  rabbit | 2010-06-01 16:02:06 +0200
  Add A.Keusch to conrib list
 
 r9532 at Thesaurus (orig r9518):  caelum | 2010-06-01 16:56:48 +0200
 pass postgres_version in producer_args when deploying to Pg
 r9535 at Thesaurus (orig r9521):  caelum | 2010-06-01 17:24:21 +0200
  r24518 at hlagh (orig r9512):  rkitover | 2010-06-01 09:16:09 -0400
  branch to use namespace::clean
  r24523 at hlagh (orig r9517):  rkitover | 2010-06-01 10:47:03 -0400
  use namespace::cleaned out imports for some common utilities
  r24526 at hlagh (orig r9520):  rkitover | 2010-06-01 11:23:36 -0400
  remove useless use of n::c
 
 r9537 at Thesaurus (orig r9523):  rabbit | 2010-06-01 17:46:05 +0200
 Rewrite test (no functional changes)
 r9538 at Thesaurus (orig r9524):  caelum | 2010-06-01 17:58:44 +0200
 fix hang in SQLAHacks
 r9539 at Thesaurus (orig r9525):  caelum | 2010-06-01 19:48:24 +0200
 pass sqlite and pg version properly on ->deploy for SQLT trunk
 r9540 at Thesaurus (orig r9526):  rabbit | 2010-06-01 21:57:46 +0200
 Preserve @_ aliasing semantics on coderefs within try{} blocks
 r9551 at Thesaurus (orig r9537):  frew | 2010-06-02 07:05:39 +0200
 perl modules shouldnt munge @INC; thats the job of the script
 r9552 at Thesaurus (orig r9538):  caelum | 2010-06-02 09:03:33 +0200
 use SET ROWCOUNT for Sybase ASE limits without an offset
 r9553 at Thesaurus (orig r9539):  caelum | 2010-06-02 10:44:44 +0200
 update Changes
 r9554 at Thesaurus (orig r9540):  rabbit | 2010-06-02 11:44:25 +0200
 lib expects a list, not an arrayref
 r9555 at Thesaurus (orig r9541):  rabbit | 2010-06-02 11:53:13 +0200
 Do not run test without sqlt
 r9556 at Thesaurus (orig r9542):  rabbit | 2010-06-02 11:55:48 +0200
 Do not use 2**32 directly - causes %u differences between 32 and 64bit
 r9557 at Thesaurus (orig r9543):  rabbit | 2010-06-02 12:43:31 +0200
 Really fix uninit warning
 r9558 at Thesaurus (orig r9544):  rabbit | 2010-06-02 12:44:59 +0200
 Relax developer policy, allowing to skip optional dependencies when in a checkout
 r9559 at Thesaurus (orig r9545):  rabbit | 2010-06-02 12:55:54 +0200
 Trash unnecessary attributes
 r9560 at Thesaurus (orig r9546):  rabbit | 2010-06-02 13:22:21 +0200
 Make sure ddl_dir is created even if a dir-object is supplied
 r9561 at Thesaurus (orig r9547):  rabbit | 2010-06-02 13:41:40 +0200
 More changelogging
 r9562 at Thesaurus (orig r9548):  rabbit | 2010-06-02 14:14:05 +0200
 Auto-fill rdbms version for sqlt
 r9563 at Thesaurus (orig r9549):  rabbit | 2010-06-02 15:11:09 +0200
 Codify result_class accessor/attribute behavior
 r9564 at Thesaurus (orig r9550):  rabbit | 2010-06-02 15:21:59 +0200
 clarify choice of value



Property changes on: DBIx-Class/0.08/branches/extended_rels
___________________________________________________________________
Modified: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/chaining_fixes:8626
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_pod:8786
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_refactor:8691
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dephandling:8674
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/handle_all_storage_methods_in_replicated:8612
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_quotes:8812
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pod_fixes:8902
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pri_key_refactor:8751
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-interbase:8929
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:8961
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/chaining_fixes:8626
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin-non-versioned:9491
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_pod:8786
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_refactor:8691
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dephandling:8674
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/filter_column:9343
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/handle_all_storage_methods_in_replicated:8612
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/namespace-clean:9520
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_hierarchical_queries_rt39121:9514
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_quotes:8812
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pod_fixes:8902
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pri_key_refactor:8751
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rs_UD_cleanup:9499
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-interbase:8929
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subqueried_limit_fixes:9304
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/try-tiny:9436
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:9550
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510

Modified: DBIx-Class/0.08/branches/extended_rels/Changes
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/Changes	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/Changes	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,19 +1,105 @@
 Revision history for DBIx::Class
 
+    * New Features
+        - Add DBIx::Class::FilterColumn for non-ref filtering
+        - ::Storage::DBI now correctly preserves a parent $dbh from
+          terminating children, even during interpreter-global
+          out-of-order destruction
+        - dbicadmin supports an -I option with the same semantics as
+          perl itself
+        - InflateColumn::DateTime support for MSSQL via DBD::Sybase
+        - Millisecond precision support for MSSQL datetimes for
+          InflateColumn::DateTime
+        - Oracle-specific hierarchical query syntax support:
+          CONNECT BY (NOCYCLE) / START WITH / ORDER SIBLINGS BY
+        - Support connecting using $ENV{DBI_DSN} and $ENV{DBI_DRIVER}
+        - current_source_alias method on ResultSet objects to
+          determine the alias to use in programatically assembled
+          search()es (originally added in 0.08100 but unmentioned)
+        - Rewrite/unification of all subselecting limit emulations
+          (RNO, Top, RowNum) to be much more robust wrt complex joined
+          resultsets
+        - MSSQL limits now don't require nearly as many applications of
+          the unsafe_subselect_ok attribute, due to optimized queries
+        - Support for Generic Subquery limit "emulation" - awfully slow
+          and inefficient but works on almost any db, and is preferred
+          to software limit emulation
+        - Sybase ASE driver now uses SET ROWCOUNT where possible, and
+          Generic Subquery otherwise for limit support instead of always
+          using software limit emulation
+        - create_ddl_dir (and derivatives) now attempt to create the given
+          $ddl_dir if it does not already exist
+        - deployment_statements now automatically supplies the current RDBMS
+          version to SQLT producer_args for MySQL, Pg, SQLite and Oracle
+
+    * Fixes
+        - Fix nasty potentially data-eating bug when deleting/updating
+          a limited resultset
+        - Fix find() to use result_class set on object
+        - Fix result_class setter behaviour to not mistakenly stuff attrs.
+        - Don't try and ensure_class_loaded an object. This doesn't work.
+        - Fix as_subselect_rs to not inject resultset class-wide where
+          conditions outside of the resulting subquery
+        - Fix count() failing with {for} resultset attribute (RT#56257)
+        - Fixed incorrect detection of Limit dialect on unconnected $schema
+        - update() on row not in_storage no longer throws an exception
+          if there are no dirty columns to update (fixes cascaded update
+          annoyances)
+        - update()/delete() on prefetching resultsets no longer results
+          in malformed SQL (some $rs attributes were erroneously left in)
+        - Fix dbicadmin to allow deploy() on non-versioned schema
+        - Fix dbicadmin to respect sql_dir on upgrade() (RT#57732)
+        - Update Schema::Versioned to respect hashref style of
+          connection_info
+        - Do not recreate the same related object twice during MultiCreate
+          (solves the problem of orphaned IC::FS files)
+        - Fully qualify xp_msver selector when using DBD::Sybase with
+          MSSQL (RT#57467)
+        - Fix ::DBI::Storage to always be able to present a full set of
+          connect() attributes to e.g. Schema::Versioned
+        - Fix Oracle auto-inc trigger detection of "INSERT OR UPDATE"-type
+          triggers
+
+    * Misc
+        - Reformatted Changelog \o/
+        - Allow developers to skip optional dependency forcing when working
+          from a checkout
+        - Add a warning to load_namespaces if a class in ResultSet/ is not
+          a subclass of DBIx::Class::ResultSet
+        - All DBIC exception-handling switched to Try::Tiny
+        - All DBIC modules are now free of imports via namespace::clean
+        - Depend on optimized SQL::Abstract (faster SQL generation)
+        - Depend on new Class::Accessor::Grouped reintroducing optional use
+          of Class::XSAccessor (just install C::XSA and get lightning fast
+          column accessors)
+
+0.08121 2010-04-11 18:43:00 (UTC)
         - Support for Firebird RDBMS with DBD::InterBase and ODBC
+        - Add core support for INSERT RETURNING (for storages that
+          supports this syntax, currently PostgreSQL and Firebird)
+        - Fix spurious warnings on multiple UTF8Columns component loads
+        - DBIx::Class::UTF8Columns entered deprecated state
         - DBIx::Class::InflateColumn::File entered deprecated state
         - DBIx::Class::Optional::Dependencies left experimental state
         - Add req_group_list to Opt::Deps (RT#55211)
+        - Add support for mysql-specific STRAIGHT_JOIN (RT#55579)
         - Cascading delete/update are now wrapped in a transaction
           for atomicity
+        - Fix accidental autovivification of ENV vars
+        - Fix update_all and delete_all to be wrapped in a transaction
         - Fix multiple deficiencies when using MultiCreate with
           data-encoder components (e.g. ::EncodedColumn)
         - Fix regression where SQL files with comments were not
           handled properly by ::Schema::Versioned.
         - Fix regression on not properly throwing when $obj->relationship
           is unresolvable
+        - Fix the join-optimiser to consider unqualified column names
+          whenever possible
+        - Fix an issue with multiple same-table joins confusing the join
+          optimizier
         - Add has_relationship method to row objects
         - Fix regression in set_column on PK-less objects
+        - Better error text on malformed/missing relationships
         - Add POD about the significance of PK columns
         - Fix for SQLite to ignore the (unsupported) { for => ... }
           attribute

Modified: DBIx-Class/0.08/branches/extended_rels/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/Makefile.PL	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/Makefile.PL	2010-06-02 17:41:37 UTC (rev 9557)
@@ -12,13 +12,20 @@
 use Config;
 $ENV{PERL5LIB} = join ($Config{path_sep}, @INC);
 
+use Getopt::Long qw/:config gnu_getopt bundling_override no_ignore_case pass_through/;
+my $args = {
+  skip_author_deps => undef,
+};
+GetOptions ($args, 'skip_author_deps');
+if (@ARGV) {
+  warn "\nIgnoring unrecognized option(s): @ARGV\n\n";
+}
 
 ###
 ### DO NOT ADD OPTIONAL DEPENDENCIES HERE, EVEN AS recommends()
 ### All of them should go to DBIx::Class::Optional::Dependencies
 ###
 
-
 name     'DBIx-Class';
 perl_version '5.008001';
 all_from 'lib/DBIx/Class.pm';
@@ -37,7 +44,7 @@
 
 my $runtime_requires = {
   'Carp::Clan'               => '6.0',
-  'Class::Accessor::Grouped' => '0.09002',
+  'Class::Accessor::Grouped' => '0.09003',
   'Class::C3::Componentised' => '1.0005',
   'Class::Inspector'         => '1.24',
   'Data::Page'               => '2.00',
@@ -45,12 +52,14 @@
   'MRO::Compat'              => '0.09',
   'Module::Find'             => '0.06',
   'Path::Class'              => '0.18',
-  'SQL::Abstract'            => '1.61',
+  'SQL::Abstract'            => '1.67',
   'SQL::Abstract::Limit'     => '0.13',
   'Sub::Name'                => '0.04',
   'Data::Dumper::Concise'    => '1.000',
   'Scope::Guard'             => '0.03',
   'Context::Preserve'        => '0.01',
+  'Try::Tiny'                => '0.04',
+  'namespace::clean'         => '0.14',
 };
 
 # this is so we can order requires alphabetically
@@ -63,23 +72,45 @@
 
 
 # require extra modules for testing if we're in a checkout
+my $optdep_msg;
 if ($Module::Install::AUTHOR) {
-  warn <<'EOW';
+  if ($args->{skip_author_deps}) {
+    $optdep_msg = <<'EOW';
+
 ******************************************************************************
 ******************************************************************************
 ***                                                                        ***
+*** IGNORING AUTHOR MODE: no optional test dependencies will be forced.    ***
+***                                                                        ***
+*** If you are using this checkout with the intention of submitting a DBIC ***
+*** patch, you are *STRONGLY ENCOURAGED* to install all dependencies, so   ***
+*** that every possible unit-test will run.                                ***
+***                                                                        ***
+******************************************************************************
+******************************************************************************
+
+EOW
+  }
+  else {
+    $optdep_msg = <<'EOW';
+
+******************************************************************************
+******************************************************************************
+***                                                                        ***
 *** AUTHOR MODE: all optional test dependencies converted to hard requires ***
+***      ( to disabled re-run Makefile.PL with --skip_author_deps )        ***
 ***                                                                        ***
 ******************************************************************************
 ******************************************************************************
 
 EOW
 
-  require DBIx::Class::Optional::Dependencies;
-  $reqs->{test_requires} = {
-    %{$reqs->{test_requires}},
-    map { %$_ } (values %{DBIx::Class::Optional::Dependencies->req_group_list}),
-  };
+    require DBIx::Class::Optional::Dependencies;
+    $reqs->{test_requires} = {
+      %{$reqs->{test_requires}},
+      map { %$_ } (values %{DBIx::Class::Optional::Dependencies->req_group_list}),
+    };
+  }
 }
 
 # compose final req list, for alphabetical ordering
@@ -103,7 +134,10 @@
   $rtype->($mod, $ver);
 }
 
+# output twice since the deplist is > 70 lines
+warn $optdep_msg;
 auto_install();
+warn $optdep_msg;
 
 # re-create various autogenerated documentation bits
 if ($Module::Install::AUTHOR) {
@@ -179,7 +213,7 @@
 
 
 # Re-write META.yml to _exclude_ all forced requires (we do not want to ship this)
-if ($Module::Install::AUTHOR) {
+if ($Module::Install::AUTHOR && ! $args->{skip_author_deps} ) {
 
   # FIXME test_requires is not yet part of META
   my %original_build_requires = ( %$build_requires, %$test_requires );

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Admin.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Admin.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Admin.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -87,14 +87,13 @@
 
 sub _build_schema {
   my ($self)  = @_;
+
   require Class::MOP;
   Class::MOP::load_class($self->schema_class);
-
-  $self->connect_info->[3]->{ignore_version} =1;
-  return $self->schema_class->connect(@{$self->connect_info()} ); # ,  $self->connect_info->[3], { ignore_version => 1} );
+  $self->connect_info->[3]{ignore_version} = 1;
+  return $self->schema_class->connect(@{$self->connect_info});
 }
 
-
 =head2 resultset
 
 a resultset from the schema to operate on
@@ -195,7 +194,7 @@
 
 =head2 config
 
-Instead of loading from a file the configuration can be provided directly as a hash ref.  Please note 
+Instead of loading from a file the configuration can be provided directly as a hash ref.  Please note
 config_stanza will still be required.
 
 =cut
@@ -209,8 +208,8 @@
 sub _build_config {
   my ($self) = @_;
 
-  eval { require Config::Any }
-    or die ("Config::Any is required to parse the config file.\n");
+  try { require Config::Any }
+    catch { die ("Config::Any is required to parse the config file.\n") };
 
   my $cfg = Config::Any->load_files ( {files => [$self->config_file], use_ext =>1, flatten_to_hash=>1});
 
@@ -296,8 +295,9 @@
 
 =back
 
-L<create> will generate sql for the supplied schema_class in sql_dir.  The flavour of sql to 
-generate can be controlled by suppling a sqlt_type which should be a L<SQL::Translator> name.  
+L<create> will generate sql for the supplied schema_class in sql_dir. The
+flavour of sql to generate can be controlled by supplying a sqlt_type which
+should be a L<SQL::Translator> name.
 
 Arguments for L<SQL::Translator> can be supplied in the sqlt_args hashref.
 
@@ -334,10 +334,12 @@
 sub upgrade {
   my ($self) = @_;
   my $schema = $self->schema();
+
   if (!$schema->get_db_version()) {
     # schema is unversioned
     $schema->throw_exception ("Could not determin current schema version, please either install() or deploy().\n");
   } else {
+    $schema->upgrade_directory ($self->sql_dir) if $self->sql_dir;  # this will override whatever default the schema has
     my $ret = $schema->upgrade();
     return $ret;
   }
@@ -352,9 +354,9 @@
 
 =back
 
-install is here to help when you want to move to L<DBIx::Class::Schema::Versioned> and have an existing 
-database.  install will take a version and add the version tracking tables and 'install' the version.  No 
-further ddl modification takes place.  Setting the force attribute to a true value will allow overriding of 
+install is here to help when you want to move to L<DBIx::Class::Schema::Versioned> and have an existing
+database.  install will take a version and add the version tracking tables and 'install' the version.  No
+further ddl modification takes place.  Setting the force attribute to a true value will allow overriding of
 already versioned databases.
 
 =cut
@@ -366,9 +368,9 @@
   $version ||= $self->version();
   if (!$schema->get_db_version() ) {
     # schema is unversioned
-    print "Going to install schema version\n";
+    print "Going to install schema version\n" if (!$self->quiet);
     my $ret = $schema->install($version);
-    print "retun is $ret\n";
+    print "return is $ret\n" if (!$self->quiet);
   }
   elsif ($schema->get_db_version() and $self->force ) {
     carp "Forcing install may not be a good idea";
@@ -391,7 +393,7 @@
 
 =back
 
-deploy will create the schema at the connected database.  C<$args> are passed straight to 
+deploy will create the schema at the connected database.  C<$args> are passed straight to
 L<DBIx::Class::Schema/deploy>.
 
 =cut
@@ -399,13 +401,7 @@
 sub deploy {
   my ($self, $args) = @_;
   my $schema = $self->schema();
-  if (!$schema->get_db_version() ) {
-    # schema is unversioned
-    $schema->deploy( $args, $self->sql_dir)
-      or $schema->throw_exception ("Could not deploy schema.\n"); # FIXME deploy() does not return 1/0 on success/fail
-  } else {
-    $schema->throw_exception("A versioned schema has already been deployed, try upgrade instead.\n");
-  }
+  $schema->deploy( $args, $self->sql_dir );
 }
 
 =head2 insert
@@ -502,7 +498,7 @@
 
 =back
 
-select takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search. 
+select takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search.
 The found data is returned in a array ref where the first row will be the columns list.
 
 =cut
@@ -518,7 +514,7 @@
 
   my @data;
   my @columns = $resultset->result_source->columns();
-  push @data, [@columns];# 
+  push @data, [@columns];#
 
   while (my $row = $resultset->next()) {
     my @fields;
@@ -533,12 +529,14 @@
 
 sub _confirm {
   my ($self) = @_;
-  print "Are you sure you want to do this? (type YES to confirm) \n";
+
   # mainly here for testing
   return 1 if ($self->meta->get_attribute('_confirm')->get_value($self));
+
+  print "Are you sure you want to do this? (type YES to confirm) \n";
   my $response = <STDIN>;
-  return 1 if ($response=~/^YES/);
-  return;
+
+  return ($response=~/^YES/);
 }
 
 sub _find_stanza {

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/CDBICompat/ColumnCase.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/CDBICompat/ColumnCase.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/CDBICompat/ColumnCase.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -25,9 +25,15 @@
 
 sub has_many {
   my ($class, $rel, $f_class, $f_key, @rest) = @_;
-  return $class->next::method($rel, $f_class, ( ref($f_key) ?
-                                                          $f_key :
-                                                          lc($f_key) ), @rest);
+  return $class->next::method(
+    $rel,
+    $f_class,
+    (ref($f_key) ?
+      $f_key :
+      lc($f_key||'')
+    ),
+    @rest
+  );
 }
 
 sub get_inflated_column {

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Componentised.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Componentised.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Componentised.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -8,36 +8,71 @@
 use Carp::Clan qw/^DBIx::Class|^Class::C3::Componentised/;
 use mro 'c3';
 
+my $warned;
+
 # this warns of subtle bugs introduced by UTF8Columns hacky handling of store_column
+# if and only if it is placed before something overriding store_column
 sub inject_base {
   my $class = shift;
-  my $target = shift;
+  my ($target, @complist) = @_;
 
-  my @present_components = (@{mro::get_linear_isa ($target)||[]});
+  # we already did load the component
+  my $keep_checking = ! (
+    $target->isa ('DBIx::Class::UTF8Columns')
+      ||
+    $target->isa ('DBIx::Class::ForceUTF8')
+  );
 
-  no strict 'refs';
-  for my $comp (reverse @_) {
+  my @target_isa;
 
-    if ($comp->isa ('DBIx::Class::UTF8Columns') ) {
-      require B;
+  while ($keep_checking && @complist) {
+
+    @target_isa = do { no strict 'refs'; @{"$target\::ISA"} }
+      unless @target_isa;
+
+    my $comp = pop @complist;
+
+    # warn here on use of either component, as we have no access to ForceUTF8,
+    # the author does not respond, and the Catalyst wiki used to recommend it
+    for (qw/DBIx::Class::UTF8Columns DBIx::Class::ForceUTF8/) {
+      if ($comp->isa ($_) ) {
+        $keep_checking = 0; # no use to check from this point on
+        carp "Use of $_ is strongly discouraged. See documentation of DBIx::Class::UTF8Columns for more info\n"
+          unless ($warned->{UTF8Columns}++ || $ENV{DBIC_UTF8COLUMNS_OK});
+        last;
+      }
+    }
+
+    # something unset $keep_checking - we got a unicode mangler
+    if (! $keep_checking) {
+
+      my $base_store_column = do { require DBIx::Class::Row; DBIx::Class::Row->can ('store_column') };
+
       my @broken;
+      for my $existing_comp (@target_isa) {
+        my $sc = $existing_comp->can ('store_column')
+          or next;
 
-      for (@present_components) {
-        my $cref = $_->can ('store_column')
-         or next;
-        push @broken, $_ if B::svref_2object($cref)->STASH->NAME ne 'DBIx::Class::Row';
+        if ($sc ne $base_store_column) {
+          require B;
+          my $definer = B::svref_2object($sc)->STASH->NAME;
+          push @broken, ($definer eq $existing_comp)
+            ? $existing_comp
+            : "$existing_comp (via $definer)"
+          ;
+        }
       }
 
-      carp "Incorrect loading order of $comp by ${target} will affect other components overriding store_column ("
+      carp "Incorrect loading order of $comp by $target will affect other components overriding 'store_column' ("
           . join (', ', @broken)
           .'). Refer to the documentation of DBIx::Class::UTF8Columns for more info'
-       if @broken;
+        if @broken;
     }
 
-    unshift @present_components, $comp;
+    unshift @target_isa, $comp;
   }
 
-  $class->next::method($target, @_);
+  $class->next::method(@_);
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/DB.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/DB.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/DB.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -8,7 +8,8 @@
 use DBIx::Class::Storage::DBI;
 use DBIx::Class::ClassResolver::PassThrough;
 use DBI;
-use Scalar::Util;
+use Scalar::Util 'blessed';
+use namespace::clean;
 
 unless ($INC{"DBIx/Class/CDBICompat.pm"}) {
   warn "IMPORTANT: DBIx::Class::DB is DEPRECATED AND *WILL* BE REMOVED. DO NOT USE.\n";
@@ -183,7 +184,7 @@
   }
 
   my($source, $result_class) = @{$class->_result_source_instance};
-  return unless Scalar::Util::blessed($source);
+  return unless blessed $source;
 
   if ($result_class ne $class) {  # new class
     # Give this new class its own source and register it.

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Exception.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Exception.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Exception.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,8 +3,10 @@
 use strict;
 use warnings;
 
-use Carp::Clan qw/^DBIx::Class/;
+use Carp::Clan qw/^DBIx::Class|^Try::Tiny/;
 use Scalar::Util qw/blessed/;
+use Try::Tiny;
+use namespace::clean;
 
 use overload
     '""' => sub { shift->{msg} },
@@ -42,7 +44,7 @@
 L<Carp::Clan/croak>.
 
   DBIx::Class::Exception->throw('Foo');
-  eval { ... }; DBIx::Class::Exception->throw($@) if $@;
+  try { ... } catch { DBIx::Class::Exception->throw(shift) }
 
 =cut
 
@@ -54,9 +56,7 @@
 
     # use Carp::Clan's croak if we're not stack tracing
     if(!$stacktrace) {
-        local $@;
-        eval { croak $msg };
-        $msg = $@
+        try { croak $msg } catch { $msg = shift };
     }
     else {
         $msg = Carp::longmess($msg);

Added: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/FilterColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/FilterColumn.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/FilterColumn.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,205 @@
+package DBIx::Class::FilterColumn;
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Row/;
+
+sub filter_column {
+  my ($self, $col, $attrs) = @_;
+
+  $self->throw_exception("FilterColumn does not work with InflateColumn")
+    if $self->isa('DBIx::Class::InflateColumn') &&
+      defined $self->column_info($col)->{_inflate_info};
+
+  $self->throw_exception("No such column $col to filter")
+    unless $self->has_column($col);
+
+  $self->throw_exception("filter_column needs attr hashref")
+    unless ref $attrs eq 'HASH';
+
+  $self->column_info($col)->{_filter_info} = $attrs;
+  my $acc = $self->column_info($col)->{accessor};
+  $self->mk_group_accessors(filtered_column => [ (defined $acc ? $acc : $col), $col]);
+  return 1;
+}
+
+sub _column_from_storage {
+  my ($self, $col, $value) = @_;
+
+  return $value unless defined $value;
+
+  my $info = $self->column_info($col)
+    or $self->throw_exception("No column info for $col");
+
+  return $value unless exists $info->{_filter_info};
+
+  my $filter = $info->{_filter_info}{filter_from_storage};
+  $self->throw_exception("No filter for $col") unless defined $filter;
+
+  return $self->$filter($value);
+}
+
+sub _column_to_storage {
+  my ($self, $col, $value) = @_;
+
+  my $info = $self->column_info($col) or
+    $self->throw_exception("No column info for $col");
+
+  return $value unless exists $info->{_filter_info};
+
+  my $unfilter = $info->{_filter_info}{filter_to_storage};
+  $self->throw_exception("No unfilter for $col") unless defined $unfilter;
+  return $self->$unfilter($value);
+}
+
+sub get_filtered_column {
+  my ($self, $col) = @_;
+
+  $self->throw_exception("$col is not a filtered column")
+    unless exists $self->column_info($col)->{_filter_info};
+
+  return $self->{_filtered_column}{$col}
+    if exists $self->{_filtered_column}{$col};
+
+  my $val = $self->get_column($col);
+
+  return $self->{_filtered_column}{$col} = $self->_column_from_storage($col, $val);
+}
+
+sub get_column {
+  my ($self, $col) = @_;
+  if (exists $self->{_filtered_column}{$col}) {
+    return $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col});
+  }
+
+  return $self->next::method ($col);
+}
+
+# sadly a separate codepath in Row.pm ( used by insert() )
+sub get_columns {
+  my $self = shift;
+
+  foreach my $col (keys %{$self->{_filtered_column}||{}}) {
+    $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col})
+      if exists $self->{_filtered_column}{$col};
+  }
+
+  $self->next::method (@_);
+}
+
+sub store_column {
+  my ($self, $col) = (shift, @_);
+
+  # blow cache
+  delete $self->{_filtered_column}{$col};
+
+  $self->next::method(@_);
+}
+
+sub set_filtered_column {
+  my ($self, $col, $filtered) = @_;
+
+  # do not blow up the cache via set_column unless necessary
+  # (filtering may be expensive!)
+  if (exists $self->{_filtered_column}{$col}) {
+    return $filtered
+      if ($self->_eq_column_values ($col, $filtered, $self->{_filtered_column}{$col} ) );
+
+    $self->make_column_dirty ($col); # so the comparison won't run again
+  }
+
+  $self->set_column($col, $self->_column_to_storage($col, $filtered));
+
+  return $self->{_filtered_column}{$col} = $filtered;
+}
+
+sub update {
+  my ($self, $attrs, @rest) = @_;
+
+  foreach my $key (keys %{$attrs||{}}) {
+    if (
+      $self->has_column($key)
+        &&
+      exists $self->column_info($key)->{_filter_info}
+    ) {
+      $self->set_filtered_column($key, delete $attrs->{$key});
+
+      # FIXME update() reaches directly into the object-hash
+      # and we may *not* have a filtered value there - thus
+      # the void-ctx filter-trigger
+      $self->get_column($key) unless exists $self->{_column_data}{$key};
+    }
+  }
+
+  return $self->next::method($attrs, @rest);
+}
+
+sub new {
+  my ($class, $attrs, @rest) = @_;
+  my $source = $attrs->{-result_source}
+    or $class->throw_exception('Sourceless rows are not supported with DBIx::Class::FilterColumn');
+
+  my $obj = $class->next::method($attrs, @rest);
+  foreach my $key (keys %{$attrs||{}}) {
+    if ($obj->has_column($key) &&
+          exists $obj->column_info($key)->{_filter_info} ) {
+      $obj->set_filtered_column($key, $attrs->{$key});
+    }
+  }
+
+  return $obj;
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::FilterColumn - Automatically convert column data
+
+=head1 SYNOPSIS
+
+ # In your result classes
+ __PACKAGE__->filter_column( money => {
+     filter_to_storage => 'to_pennies',
+     filter_from_storage => 'from_pennies',
+ });
+
+ sub to_pennies   { $_[1] * 100 }
+
+ sub from_pennies { $_[1] / 100 }
+
+ 1;
+
+=head1 DESCRIPTION
+
+This component is meant to be a more powerful, but less DWIM-y,
+L<DBIx::Class::InflateColumn>.  One of the major issues with said component is
+that it B<only> works with references.  Generally speaking anything that can
+be done with L<DBIx::Class::InflateColumn> can be done with this component.
+
+=head1 METHODS
+
+=head2 filter_column
+
+ __PACKAGE__->filter_column( colname => {
+     filter_from_storage => 'method',
+     filter_to_storage   => 'method',
+ })
+
+This is the method that you need to call to set up a filtered column.  It takes
+exactly two arguments; the first being the column name the second being a
+C<HashRef> with C<filter_from_storage> and C<filter_to_storage> having
+something that can be called as a method.  The method will be called with
+the value of the column as the first non-C<$self> argument.
+
+=head2 get_filtered_column
+
+ $obj->get_filtered_column('colname')
+
+Returns the filtered value of the column
+
+=head2 set_filtered_column
+
+ $obj->set_filtered_column(colname => 'new_value')
+
+Sets the filtered value of the column

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn/DateTime.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn/DateTime.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn/DateTime.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -4,6 +4,8 @@
 use warnings;
 use base qw/DBIx::Class/;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
+use namespace::clean;
 
 =head1 NAME
 
@@ -11,7 +13,7 @@
 
 =head1 SYNOPSIS
 
-Load this component and then declare one or more 
+Load this component and then declare one or more
 columns to be of the datetime, timestamp or date datatype.
 
   package Event;
@@ -62,9 +64,9 @@
 
 =head1 DESCRIPTION
 
-This module figures out the type of DateTime::Format::* class to 
-inflate/deflate with based on the type of DBIx::Class::Storage::DBI::* 
-that you are using.  If you switch from one database to a different 
+This module figures out the type of DateTime::Format::* class to
+inflate/deflate with based on the type of DBIx::Class::Storage::DBI::*
+that you are using.  If you switch from one database to a different
 one your code should continue to work without modification (though note
 that this feature is new as of 0.07, so it may not be perfect yet - bug
 reports to the list very much welcome).
@@ -132,7 +134,7 @@
       $info->{_ic_dt_method} ||= "timestamp_without_timezone";
     } elsif ($type eq "smalldatetime") {
       $type = "datetime";
-      $info->{_ic_dt_method} ||= "datetime";
+      $info->{_ic_dt_method} ||= "smalldatetime";
     }
   }
 
@@ -167,13 +169,18 @@
           inflate => sub {
             my ($value, $obj) = @_;
 
-            my $dt = eval { $obj->_inflate_to_datetime( $value, \%info ) };
-            if (my $err = $@ ) {
-              return undef if ($undef_if_invalid);
-              $self->throw_exception ("Error while inflating ${value} for ${column} on ${self}: $err");
-            }
+            my $dt = try
+              { $obj->_inflate_to_datetime( $value, \%info ) }
+              catch {
+                $self->throw_exception ("Error while inflating ${value} for ${column} on ${self}: $_")
+                  unless $undef_if_invalid;
+                undef;  # rv
+              };
 
-            return $obj->_post_inflate_datetime( $dt, \%info );
+            return (defined $dt)
+              ? $obj->_post_inflate_datetime( $dt, \%info )
+              : undef
+            ;
           },
           deflate => sub {
             my ($value, $obj) = @_;
@@ -290,11 +297,11 @@
 
 =over 4
 
-=item More information about the add_columns method, and column metadata, 
+=item More information about the add_columns method, and column metadata,
       can be found in the documentation for L<DBIx::Class::ResultSource>.
 
 =item Further discussion of problems inherent to the Floating timezone:
-      L<Floating DateTimes|DateTime/Floating_DateTimes> 
+      L<Floating DateTimes|DateTime/Floating_DateTimes>
       and L<< $dt->set_time_zone|DateTime/"Set" Methods >>
 
 =back

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/InflateColumn.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -37,7 +37,7 @@
 to work.
 
 If you want to filter plain scalar values and replace them with
-something else, contribute a filtering component.
+something else, see L<DBIx::Class::FilterColumn>.
 
 =head1 METHODS
 
@@ -74,6 +74,11 @@
 
 sub inflate_column {
   my ($self, $col, $attrs) = @_;
+
+  $self->throw_exception("InflateColumn does not work with FilterColumn")
+    if $self->isa('DBIx::Class::FilterColumn') &&
+      defined $self->column_info($col)->{_filter_info};
+
   $self->throw_exception("No such column $col to inflate")
     unless $self->has_column($col);
   $self->throw_exception("inflate_column needs attr hashref")
@@ -146,9 +151,9 @@
   $self->set_column($col, $self->_deflated_column($col, $inflated));
 #  if (blessed $inflated) {
   if (ref $inflated && ref($inflated) ne 'SCALAR') {
-    $self->{_inflated_column}{$col} = $inflated; 
+    $self->{_inflated_column}{$col} = $inflated;
   } else {
-    delete $self->{_inflated_column}{$col};      
+    delete $self->{_inflated_column}{$col};
   }
   return $inflated;
 }

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Cookbook.pod	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Cookbook.pod	2010-06-02 17:41:37 UTC (rev 9557)
@@ -292,7 +292,7 @@
   my $count = $rs->count;
 
   # Equivalent SQL:
-  # SELECT COUNT( * ) FROM (SELECT me.name FROM artist me GROUP BY me.name) count_subq:
+  # SELECT COUNT( * ) FROM (SELECT me.name FROM artist me GROUP BY me.name) me:
 
 =head2 Grouping results
 
@@ -367,8 +367,8 @@
 
 =head2 Predefined searches
 
-You can write your own L<DBIx::Class::ResultSet> class by inheriting from it
-and defining often used searches as methods:
+You can define frequently used searches as methods by subclassing
+L<DBIx::Class::ResultSet>:
 
   package My::DBIC::ResultSet::CD;
   use strict;
@@ -415,6 +415,12 @@
 you create an index on the return value of the function in question.) However,
 it can be accomplished with C<DBIx::Class> when necessary.
 
+Your approach for doing so will depend on whether you have turned
+quoting on via the C<quote_char> and C<name_sep> attributes. If you
+explicitly defined C<quote_char> and C<name_sep> in your
+C<connect_info> (see L<DBIx::Class::Storage::DBI/"connect_info">) then
+you are using quoting, otherwise not.
+
 If you do not have quoting on, simply include the function in your search
 specification as you would any column:
 
@@ -1238,17 +1244,17 @@
     return $genus->species;
   };
 
+  use Try::Tiny;
   my $rs;
-  eval {
+  try {
     $rs = $schema->txn_do($coderef1);
-  };
-
-  if ($@) {                             # Transaction failed
+  } catch {
+    # Transaction failed
     die "the sky is falling!"           #
-      if ($@ =~ /Rollback failed/);     # Rollback failed
+      if ($_ =~ /Rollback failed/);     # Rollback failed
 
     deal_with_failed_transaction();
-  }
+  };
 
 Note: by default C<txn_do> will re-run the coderef one more time if an
 error occurs due to client disconnection (e.g. the server is bounced).
@@ -1275,8 +1281,10 @@
   my $schema = MySchema->connect("dbi:Pg:dbname=my_db");
 
   # Start a transaction. Every database change from here on will only be 
-  # committed into the database if the eval block succeeds.
-  eval {
+  # committed into the database if the try block succeeds.
+  use Try::Tiny;
+  my $exception;
+  try {
     $schema->txn_do(sub {
       # SQL: BEGIN WORK;
 
@@ -1286,7 +1294,7 @@
       for (1..10) {
 
         # Start a nested transaction, which in fact sets a savepoint.
-        eval {
+        try {
           $schema->txn_do(sub {
             # SQL: SAVEPOINT savepoint_0;
 
@@ -1301,8 +1309,7 @@
               #      WHERE ( id = 42 );
             }
           });
-        };
-        if ($@) {
+        } catch {
           # SQL: ROLLBACK TO SAVEPOINT savepoint_0;
 
           # There was an error while creating a $thing. Depending on the error
@@ -1310,14 +1317,14 @@
           # changes related to the creation of this $thing
 
           # Abort the whole job
-          if ($@ =~ /horrible_problem/) {
+          if ($_ =~ /horrible_problem/) {
             print "something horrible happend, aborting job!";
-            die $@;                # rethrow error
+            die $_;                # rethrow error
           }
 
           # Ignore this $thing, report the error, and continue with the
           # next $thing
-          print "Cannot create thing: $@";
+          print "Cannot create thing: $_";
         }
         # There was no error, so save all changes since the last 
         # savepoint.
@@ -1325,8 +1332,11 @@
         # SQL: RELEASE SAVEPOINT savepoint_0;
       }
     });
-  };
-  if ($@) {
+  } catch {
+    $exception = $_;
+  }
+
+  if ($caught) {
     # There was an error while handling the $job. Rollback all changes
     # since the transaction started, including the already committed
     # ('released') savepoints. There will be neither a new $job nor any
@@ -1334,7 +1344,7 @@
 
     # SQL: ROLLBACK;
 
-    print "ERROR: $@\n";
+    print "ERROR: $exception\n";
   }
   else {
     # There was no error while handling the $job. Commit all changes.
@@ -1348,7 +1358,7 @@
 
 In this example it might be hard to see where the rollbacks, releases and
 commits are happening, but it works just the same as for plain L<<txn_do>>: If
-the C<eval>-block around C<txn_do> fails, a rollback is issued. If the C<eval>
+the C<try>-block around C<txn_do> fails, a rollback is issued. If the C<try>
 succeeds, the transaction is committed (or the savepoint released).
 
 While you can get more fine-grained control using C<svp_begin>, C<svp_release>
@@ -1741,6 +1751,75 @@
 arrayrefs together with the column name, like this: C<< [column_name => value]
 >>.
 
+=head2 Using Unicode
+
+When using unicode character data there are two alternatives -
+either your database supports unicode characters (including setting
+the utf8 flag on the returned string), or you need to encode/decode
+data appropriately each time a string field is inserted into or
+retrieved from the database. It is better to avoid
+encoding/decoding data and to use your database's own unicode
+capabilities if at all possible.
+
+The L<DBIx::Class::UTF8Columns> component handles storing selected
+unicode columns in a database that does not directly support
+unicode. If used with a database that does correctly handle unicode
+then strange and unexpected data corrupt B<will> occur.
+
+The Catalyst Wiki Unicode page at
+L<http://wiki.catalystframework.org/wiki/tutorialsandhowtos/using_unicode>
+has additional information on the use of Unicode with Catalyst and
+DBIx::Class.
+
+The following databases do correctly handle unicode data:-
+
+=head3 MySQL
+
+MySQL supports unicode, and will correctly flag utf8 data from the
+database if the C<mysql_enable_utf8> is set in the connect options.
+
+  my $schema = My::Schema->connection('dbi:mysql:dbname=test',
+                                      $user, $pass,
+                                      { mysql_enable_utf8 => 1} );
+  
+
+When set, a data retrieved from a textual column type (char,
+varchar, etc) will have the UTF-8 flag turned on if necessary. This
+enables character semantics on that string. You will also need to
+ensure that your database / table / column is configured to use
+UTF8. See Chapter 10 of the mysql manual for details.
+
+See L<DBD::mysql> for further details.
+
+=head3 Oracle
+
+Information about Oracle support for unicode can be found in
+L<DBD::Oracle/Unicode>.
+
+=head3 PostgreSQL
+
+PostgreSQL supports unicode if the character set is correctly set
+at database creation time. Additionally the C<pg_enable_utf8>
+should be set to ensure unicode data is correctly marked.
+
+  my $schema = My::Schema->connection('dbi:Pg:dbname=test',
+                                      $user, $pass,
+                                      { pg_enable_utf8 => 1} );
+
+Further information can be found in L<DBD::Pg>.
+
+=head3 SQLite
+
+SQLite version 3 and above natively use unicode internally. To
+correctly mark unicode strings taken from the database, the
+C<sqlite_unicode> flag should be set at connect time (in versions
+of L<DBD::SQLite> prior to 1.27 this attribute was named
+C<unicode>).
+
+  my $schema = My::Schema->connection('dbi:SQLite:/tmp/test.db',
+                                      '', '',
+                                      { sqlite_unicode => 1} );
+
 =head1 BOOTSTRAPPING/MIGRATING
 
 =head2 Easy migration from class-based to schema-based setup
@@ -2009,6 +2088,47 @@
 statement and dig down to see if certain parameters cause aberrant behavior.
 You might want to check out L<DBIx::Class::QueryLog> as well.
 
+=head1 IMPROVING PERFORMANCE
+
+=over
+
+=item *
+
+Install L<Class::XSAccessor> to speed up L<Class::Accessor::Grouped>.
+
+=item *
+
+On Perl 5.8 install L<Class::C3::XS>.
+
+=item *
+
+L<prefetch|DBIx::Class::ResultSet/prefetch> relationships, where possible. See
+L</Using joins and prefetch>.
+
+=item *
+
+Use L<populate|DBIx::Class::ResultSet/populate> in void context to insert data
+when you don't need the resulting L<DBIx::Class::Row> objects, if possible, but
+see the caveats.
+
+When inserting many rows, for best results, populate a large number of rows at a
+time, but not so large that the table is locked for an unacceptably long time.
+
+If using L<create|DBIx::Class::ResultSet/create> instead, use a transaction and
+commit every C<X> rows; where C<X> gives you the best performance without
+locking the table for too long. 
+
+=item *
+
+When selecting many rows, if you don't need full-blown L<DBIx::Class::Row>
+objects, consider using L<DBIx::Class::ResultClass::HashRefInflator>.
+
+=item *
+
+See also L</STARTUP SPEED> and L</MEMORY USAGE> in this document.
+
+=back
+
 =head1 STARTUP SPEED
 
 L<DBIx::Class|DBIx::Class> programs can have a significant startup delay

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/FAQ.pod
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/FAQ.pod	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/FAQ.pod	2010-06-02 17:41:37 UTC (rev 9557)
@@ -56,6 +56,12 @@
 L<DBIx::Class::Schema/deploy>. See there for details, or the
 L<DBIx::Class::Manual::Cookbook>.
 
+=item .. store/retrieve Unicode data in my database?
+
+Make sure you database supports Unicode and set the connect
+attributes appropriately - see
+L<DBIx::Class::Manual::Cookbook/Using Unicode>
+
 =item .. connect to my database?
 
 Once you have created all the appropriate table/source classes, and an

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Intro.pod
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Intro.pod	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Manual/Intro.pod	2010-06-02 17:41:37 UTC (rev 9557)
@@ -240,7 +240,7 @@
       { on_connect_do => \@on_connect_sql_statments }
   );
 
-See L<DBIx::Class::Schema::Storage::DBI/connect_info> for more information about
+See L<DBIx::Class::Storage::DBI/connect_info> for more information about
 this and other special C<connect>-time options.
 
 =head3 Via a database handle

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Optional/Dependencies.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Optional/Dependencies.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Optional/Dependencies.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -32,7 +32,6 @@
   replicated => {
     req => {
       %$moose_basic,
-      'namespace::clean'          => '0.11',
       'Hash::Merge'               => '0.12',
     },
     pod => {
@@ -66,7 +65,7 @@
 
   deploy => {
     req => {
-      'SQL::Translator'           => '0.11005',
+      'SQL::Translator'           => '0.11006',
     },
     pod => {
       title => 'Storage::DBI::deploy()',
@@ -283,12 +282,29 @@
 
 # This is to be called by the author only (automatically in Makefile.PL)
 sub _gen_pod {
+
   my $class = shift;
   my $modfn = __PACKAGE__ . '.pm';
   $modfn =~ s/\:\:/\//g;
 
-  require DBIx::Class;
-  my $distver = DBIx::Class->VERSION;
+  my $podfn = __FILE__;
+  $podfn =~ s/\.pm$/\.pod/;
+
+  my $distver =
+    eval { require DBIx::Class; DBIx::Class->VERSION; }
+      ||
+    do {
+      warn
+"\n\n---------------------------------------------------------------------\n" .
+'Unable to load core DBIx::Class module to determine current version, '.
+'possibly due to missing dependencies. Author-mode autodocumentation ' .
+"halted\n\n" . $@ .
+"\n\n---------------------------------------------------------------------\n"
+      ;
+      '*UNKNOWN*';  # rv
+    }
+  ;
+
   my $sqltver = $class->req_list_for ('deploy')->{'SQL::Translator'}
     or die "Hrmm? No sqlt dep?";
 
@@ -341,7 +357,7 @@
     <<'EOD',
 Dependencies are organized in C<groups> and each group can list one or more
 required modules, with an optional minimum version (or 0 for any version).
-The group name can be used in the 
+The group name can be used in the
 EOD
   );
 
@@ -431,10 +447,7 @@
     'You may distribute this code under the same terms as Perl itself',
   );
 
-  my $fn = __FILE__;
-  $fn =~ s/\.pm$/\.pod/;
-
-  open (my $fh, '>', $fn) or croak "Unable to write to $fn: $!";
+  open (my $fh, '>', $podfn) or croak "Unable to write to $podfn: $!";
   print $fh join ("\n\n", @chunks);
   close ($fh);
 }

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/Base.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/Base.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/Base.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,9 +3,12 @@
 use strict;
 use warnings;
 
-use Scalar::Util ();
 use base qw/DBIx::Class/;
 
+use Scalar::Util qw/weaken blessed/;
+use Try::Tiny;
+use namespace::clean;
+
 =head1 NAME
 
 DBIx::Class::Relationship::Base - Inter-table relationships
@@ -118,21 +121,55 @@
 =item is_foreign_key_constraint
 
 If you are using L<SQL::Translator> to create SQL for you and you find that it
-is creating constraints where it shouldn't, or not creating them where it 
+is creating constraints where it shouldn't, or not creating them where it
 should, set this attribute to a true or false value to override the detection
 of when to create constraints.
 
+=item cascade_copy
+
+If C<cascade_copy> is true on a C<has_many> relationship for an
+object, then when you copy the object all the related objects will
+be copied too. To turn this behaviour off, pass C<< cascade_copy => 0 >>
+in the C<$attr> hashref.
+
+The behaviour defaults to C<< cascade_copy => 1 >> for C<has_many>
+relationships.
+
+=item cascade_delete
+
+By default, DBIx::Class cascades deletes across C<has_many>,
+C<has_one> and C<might_have> relationships. You can disable this
+behaviour on a per-relationship basis by supplying
+C<< cascade_delete => 0 >> in the relationship attributes.
+
+The cascaded operations are performed after the requested delete,
+so if your database has a constraint on the relationship, it will
+have deleted/updated the related records or raised an exception
+before DBIx::Class gets to perform the cascaded operation.
+
+=item cascade_update
+
+By default, DBIx::Class cascades updates across C<has_one> and
+C<might_have> relationships. You can disable this behaviour on a
+per-relationship basis by supplying C<< cascade_update => 0 >> in
+the relationship attributes.
+
+This is not a RDMS style cascade update - it purely means that when
+an object has update called on it, all the related objects also
+have update called. It will not change foreign keys automatically -
+you must arrange to do this yourself.
+
 =item on_delete / on_update
 
 If you are using L<SQL::Translator> to create SQL for you, you can use these
-attributes to explicitly set the desired C<ON DELETE> or C<ON UPDATE> constraint 
-type. If not supplied the SQLT parser will attempt to infer the constraint type by 
+attributes to explicitly set the desired C<ON DELETE> or C<ON UPDATE> constraint
+type. If not supplied the SQLT parser will attempt to infer the constraint type by
 interrogating the attributes of the B<opposite> relationship. For any 'multi'
-relationship with C<< cascade_delete => 1 >>, the corresponding belongs_to 
-relationship will be created with an C<ON DELETE CASCADE> constraint. For any 
+relationship with C<< cascade_delete => 1 >>, the corresponding belongs_to
+relationship will be created with an C<ON DELETE CASCADE> constraint. For any
 relationship bearing C<< cascade_copy => 1 >> the resulting belongs_to constraint
 will be C<ON UPDATE CASCADE>. If you wish to disable this autodetection, and just
-use the RDBMS' default constraint type, pass C<< on_delete => undef >> or 
+use the RDBMS' default constraint type, pass C<< on_delete => undef >> or
 C<< on_delete => '' >>, and the same for C<on_update> respectively.
 
 =item is_deferrable
@@ -201,35 +238,33 @@
 
     my $source = $self->result_source;
 
-    # condition resolution may fail if an incomplete master-object prefetch
-    # is encountered - that is ok during prefetch construction (not yet in_storage)
-
     # if $rel_info->{cond} is a CODE, we might need to join from the
     # current resultsource instead of just querying the target
     # resultsource, in that case, the condition might provide an
     # additional condition in order to avoid an unecessary join if
     # that is at all possible.
-    my ($cond, $cond2) =
-      eval { $source->_resolve_condition( $rel_info->{cond}, $rel, $self ) };
-
-    if (my $err = $@) {
+    my ($cond, $cond2) = try {
+      $source->_resolve_condition( $rel_info->{cond}, $rel, $self )
+    }
+    catch {
       if ($self->in_storage) {
-        $self->throw_exception ($err);
+        $self->throw_exception ($_);
       }
-      else {
-        $cond = $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION;
-      }
-    }
 
+      # condition resolution may fail if an incomplete master-object prefetch
+      # is encountered - that is ok during prefetch construction (not yet in_storage)
+      $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION;  # RV
+    };
+
     if ($cond eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) {
       my $reverse = $source->reverse_relationship_info($rel);
       foreach my $rev_rel (keys %$reverse) {
         if ($reverse->{$rev_rel}{attrs}{accessor} && $reverse->{$rev_rel}{attrs}{accessor} eq 'multi') {
           $attrs->{related_objects}{$rev_rel} = [ $self ];
-          Scalar::Util::weaken($attrs->{related_object}{$rev_rel}[0]);
+          weaken $attrs->{related_object}{$rev_rel}[0];
         } else {
           $attrs->{related_objects}{$rev_rel} = $self;
-          Scalar::Util::weaken($attrs->{related_object}{$rev_rel});
+          weaken $attrs->{related_object}{$rev_rel};
         }
       }
     }
@@ -298,7 +333,7 @@
 
   ( $objects_rs ) = $rs->search_related_rs('relname', $cond, $attrs);
 
-This method works exactly the same as search_related, except that 
+This method works exactly the same as search_related, except that
 it guarantees a resultset, even in list context.
 
 =cut
@@ -328,9 +363,9 @@
   my $new_obj = $obj->new_related('relname', \%col_data);
 
 Create a new item of the related foreign class. If called on a
-L<Row|DBIx::Class::Manual::Glossary/"Row"> object, it will magically 
-set any foreign key columns of the new object to the related primary 
-key columns of the source object for you.  The newly created item will 
+L<Row|DBIx::Class::Manual::Glossary/"Row"> object, it will magically
+set any foreign key columns of the new object to the related primary
+key columns of the source object for you.  The newly created item will
 not be saved into your storage until you call L<DBIx::Class::Row/insert>
 on it.
 
@@ -451,7 +486,7 @@
   if (defined $f_obj) {
     my $f_class = $rel_info->{class};
     $self->throw_exception( "Object $f_obj isn't a ".$f_class )
-      unless Scalar::Util::blessed($f_obj) and $f_obj->isa($f_class);
+      unless blessed $f_obj and $f_obj->isa($f_class);
   }
 
   # _resolve_condition might return two hashrefs, specially in the
@@ -532,7 +567,7 @@
 =back
 
   my $actor = $schema->resultset('Actor')->find(1);
-  my @roles = $schema->resultset('Role')->search({ role => 
+  my @roles = $schema->resultset('Role')->search({ role =>
      { '-in' => ['Fred', 'Barney'] } } );
 
   $actor->set_roles(\@roles);

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/BelongsTo.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/BelongsTo.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/BelongsTo.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -6,8 +6,10 @@
 
 use strict;
 use warnings;
+use Try::Tiny;
+use namespace::clean;
 
-our %_pod_inherit_config = 
+our %_pod_inherit_config =
   (
    class_map => { 'DBIx::Class::Relationship::BelongsTo' => 'DBIx::Class::Relationship' }
   );
@@ -16,7 +18,7 @@
   my ($class, $rel, $f_class, $cond, $attrs) = @_;
 
   # assume a foreign key contraint unless defined otherwise
-  $attrs->{is_foreign_key_constraint} = 1 
+  $attrs->{is_foreign_key_constraint} = 1
     if not exists $attrs->{is_foreign_key_constraint};
   $attrs->{undef_on_null_fk} = 1
     if not exists $attrs->{undef_on_null_fk};
@@ -24,10 +26,10 @@
   # no join condition or just a column name
   if (!ref $cond) {
     $class->ensure_class_loaded($f_class);
-    my %f_primaries = map { $_ => 1 } eval { $f_class->_pri_cols };
-    $class->throw_exception(
-      "Can't infer join condition for ${rel} on ${class}: $@"
-    ) if $@;
+    my %f_primaries = map { $_ => 1 } try { $f_class->_pri_cols }
+      catch {
+        $class->throw_exception( "Can't infer join condition for ${rel} on ${class}: $_");
+      };
 
     my ($pri, $too_many) = keys %f_primaries;
     $class->throw_exception(

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasMany.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasMany.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasMany.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,8 +3,10 @@
 
 use strict;
 use warnings;
+use Try::Tiny;
+use namespace::clean;
 
-our %_pod_inherit_config = 
+our %_pod_inherit_config =
   (
    class_map => { 'DBIx::Class::Relationship::HasMany' => 'DBIx::Class::Relationship' }
   );
@@ -14,10 +16,10 @@
 
   unless (ref $cond) {
     $class->ensure_class_loaded($f_class);
-    my ($pri, $too_many) = eval { $class->_pri_cols };
-    $class->throw_exception(
-      "Can't infer join condition for ${rel} on ${class}: $@"
-    ) if $@;
+    my ($pri, $too_many) = try { $class->_pri_cols }
+      catch {
+        $class->throw_exception("Can't infer join condition for ${rel} on ${class}: $_");
+      };
 
     $class->throw_exception(
       "has_many can only infer join for a single primary key; ".
@@ -39,7 +41,7 @@
       $guess = "using our class name '$class' as foreign key";
     }
 
-    my $f_class_loaded = eval { $f_class->columns };
+    my $f_class_loaded = try { $f_class->columns };
     $class->throw_exception(
       "No such column ${f_key} on foreign class ${f_class} ($guess)"
     ) if $f_class_loaded && !$f_class->has_column($f_key);

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasOne.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasOne.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Relationship/HasOne.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -4,8 +4,10 @@
 use strict;
 use warnings;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
+use namespace::clean;
 
-our %_pod_inherit_config = 
+our %_pod_inherit_config =
   (
    class_map => { 'DBIx::Class::Relationship::HasOne' => 'DBIx::Class::Relationship' }
   );
@@ -30,7 +32,7 @@
       "${class} has none"
     ) if !defined $pri && (!defined $cond || !length $cond);
 
-    my $f_class_loaded = eval { $f_class->columns };
+    my $f_class_loaded = try { $f_class->columns };
     my ($f_key,$too_many,$guess);
     if (defined $cond && length $cond) {
       $f_key = $cond;
@@ -60,10 +62,10 @@
 sub _get_primary_key {
   my ( $class, $target_class ) = @_;
   $target_class ||= $class;
-  my ($pri, $too_many) = eval { $target_class->_pri_cols };
-  $class->throw_exception(
-    "Can't infer join condition on ${target_class}: $@"
-  ) if $@;
+  my ($pri, $too_many) = try { $target_class->_pri_cols }
+    catch {
+      $class->throw_exception("Can't infer join condition on ${target_class}: $_");
+    };
 
   $class->throw_exception(
     "might_have/has_one can only infer join for a single primary key; ".

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSet.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSet.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -2,10 +2,7 @@
 
 use strict;
 use warnings;
-use overload
-        '0+'     => "count",
-        'bool'   => "_bool",
-        fallback => 1;
+use base qw/DBIx::Class/;
 use Carp::Clan qw/^DBIx::Class/;
 use DBIx::Class::Exception;
 use Data::Page;
@@ -13,9 +10,14 @@
 use DBIx::Class::ResultSetColumn;
 use DBIx::Class::ResultSourceHandle;
 use List::Util ();
-use Scalar::Util ();
-use base qw/DBIx::Class/;
+use Scalar::Util 'blessed';
+use namespace::clean;
 
+use overload
+        '0+'     => "count",
+        'bool'   => "_bool",
+        fallback => 1;
+
 __PACKAGE__->mk_group_accessors('simple' => qw/_result_class _source_handle/);
 
 =head1 NAME
@@ -57,8 +59,13 @@
 
 The query that the ResultSet represents is B<only> executed against
 the database when these methods are called:
-L</find> L</next> L</all> L</first> L</single> L</count>
+L</find>, L</next>, L</all>, L</first>, L</single>, L</count>.
 
+If a resultset is used in a numeric context it returns the L</count>.
+However, if it is used in a boolean context it is B<always> true.  So if
+you want to check if a resultset has any results, you must use C<if $rs
+!= 0>.
+
 =head1 EXAMPLES
 
 =head2 Chaining resultsets
@@ -101,7 +108,7 @@
 L</join>, L</prefetch>, L</+select>, L</+as> attributes are merged
 into the existing ones from the original resultset.
 
-The L</where>, L</having> attribute, and any search conditions are
+The L</where> and L</having> attributes, and any search conditions, are
 merged with an SQL C<AND> to the existing condition from the original
 resultset.
 
@@ -142,13 +149,6 @@
 
 See: L</search>, L</count>, L</get_column>, L</all>, L</create>.
 
-=head1 OVERLOADING
-
-If a resultset is used in a numeric context it returns the L</count>.
-However, if it is used in a boolean context it is always true.  So if
-you want to check if a resultset has any results use C<if $rs != 0>.
-C<if $rs> will always be true.
-
 =head1 METHODS
 
 =head2 new
@@ -199,7 +199,6 @@
   my $self = {
     _source_handle => $source,
     cond => $attrs->{where},
-    count => undef,
     pager => undef,
     attrs => $attrs
   };
@@ -538,8 +537,8 @@
       : $self->_add_alias($input_query, $alias);
   }
 
-  # Run the query
-  my $rs = $self->search ($query, $attrs);
+  # Run the query, passing the result_class since it should propagate for find
+  my $rs = $self->search ($query, {result_class => $self->result_class, %$attrs});
   if (keys %{$rs->_resolved_attrs->{collapse}}) {
     my $row = $rs->next;
     carp "Query returned more than one row" if $rs->next;
@@ -1007,7 +1006,7 @@
   # without having to contruct the full hash
 
   if (keys %collapse) {
-    my %pri = map { ($_ => 1) } $self->result_source->primary_columns;
+    my %pri = map { ($_ => 1) } $self->result_source->_pri_cols;
     foreach my $i (0 .. $#construct_as) {
       next if defined($construct_as[$i][0]); # only self table
       if (delete $pri{$construct_as[$i][1]}) {
@@ -1138,9 +1137,14 @@
 sub result_class {
   my ($self, $result_class) = @_;
   if ($result_class) {
-    $self->ensure_class_loaded($result_class);
+    unless (ref $result_class) { # don't fire this for an object
+      $self->ensure_class_loaded($result_class);
+    }
     $self->_result_class($result_class);
-    $self->{attrs}{result_class} = $result_class if ref $self;
+    # THIS LINE WOULD BE A BUG - this accessor specifically exists to
+    # permit the user to set result class on one result set only; it only
+    # chains if provided to search()
+    #$self->{attrs}{result_class} = $result_class if ref $self;
   }
   $self->_result_class;
 }
@@ -1236,12 +1240,11 @@
   $attrs ||= $self->_resolved_attrs;
 
   my $tmp_attrs = { %$attrs };
+  # take off any limits, record_filter is cdbi, and no point of ordering nor locking a count
+  delete @{$tmp_attrs}{qw/rows offset order_by record_filter for/};
 
-  # take off any limits, record_filter is cdbi, and no point of ordering a count
-  delete $tmp_attrs->{$_} for (qw/select as rows offset order_by record_filter/);
-
   # overwrite the selector (supplied by the storage)
-  $tmp_attrs->{select} = $rsrc->storage->_count_select ($rsrc, $tmp_attrs);
+  $tmp_attrs->{select} = $rsrc->storage->_count_select ($rsrc, $attrs);
   $tmp_attrs->{as} = 'count';
 
   my $tmp_rs = $rsrc->resultset_class->new($rsrc, $tmp_attrs)->get_column ('count');
@@ -1256,37 +1259,42 @@
   my ($self, $attrs) = @_;
 
   my $rsrc = $self->result_source;
-  $attrs ||= $self->_resolved_attrs_copy;
+  $attrs ||= $self->_resolved_attrs;
 
   my $sub_attrs = { %$attrs };
+  # extra selectors do not go in the subquery and there is no point of ordering it, nor locking it
+  delete @{$sub_attrs}{qw/collapse select _prefetch_select as order_by for/};
 
-  # extra selectors do not go in the subquery and there is no point of ordering it
-  delete $sub_attrs->{$_} for qw/collapse select _prefetch_select as order_by/;
-
   # if we multi-prefetch we group_by primary keys only as this is what we would
   # get out of the rs via ->next/->all. We *DO WANT* to clobber old group_by regardless
   if ( keys %{$attrs->{collapse}}  ) {
     $sub_attrs->{group_by} = [ map { "$attrs->{alias}.$_" } ($rsrc->_pri_cols) ]
   }
 
-  $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $attrs);
+  # Calculate subquery selector
+  if (my $g = $sub_attrs->{group_by}) {
 
-  # this is so that the query can be simplified e.g.
-  # * ordering can be thrown away in things like Top limit
-  $sub_attrs->{-for_count_only} = 1;
+    # necessary as the group_by may refer to aliased functions
+    my $sel_index;
+    for my $sel (@{$attrs->{select}}) {
+      $sel_index->{$sel->{-as}} = $sel
+        if (ref $sel eq 'HASH' and $sel->{-as});
+    }
 
-  my $sub_rs = $rsrc->resultset_class->new ($rsrc, $sub_attrs);
+    for my $g_part (@$g) {
+      push @{$sub_attrs->{select}}, $sel_index->{$g_part} || $g_part;
+    }
+  }
+  else {
+    my @pcols = map { "$attrs->{alias}.$_" } ($rsrc->primary_columns);
+    $sub_attrs->{select} = @pcols ? \@pcols : [ 1 ];
+  }
 
-  $attrs->{from} = [{
-    -alias => 'count_subq',
-    -source_handle => $rsrc->handle,
-    count_subq => $sub_rs->as_query,
-  }];
-
-  # the subquery replaces this
-  delete $attrs->{$_} for qw/where bind collapse group_by having having_bind rows offset/;
-
-  return $self->_count_rs ($attrs);
+  return $rsrc->resultset_class
+               ->new ($rsrc, $sub_attrs)
+                ->as_subselect_rs
+                 ->search ({}, { columns => { count => $rsrc->storage->_count_select ($rsrc, $attrs) } })
+                  ->get_column ('count');
 }
 
 sub _bool {
@@ -1417,14 +1425,15 @@
   my $cond = $rsrc->schema->storage->_strip_cond_qualifiers ($self->{cond});
 
   my $needs_group_by_subq = $self->_has_resolved_attr (qw/collapse group_by -join/);
-  my $needs_subq = $needs_group_by_subq || (not defined $cond) || $self->_has_resolved_attr(qw/row offset/);
+  my $needs_subq = $needs_group_by_subq || (not defined $cond) || $self->_has_resolved_attr(qw/rows offset/);
 
   if ($needs_group_by_subq or $needs_subq) {
 
     # make a new $rs selecting only the PKs (that's all we really need)
     my $attrs = $self->_resolved_attrs_copy;
 
-    delete $attrs->{$_} for qw/collapse select as/;
+
+    delete $attrs->{$_} for qw/collapse _collapse_order_by select _prefetch_select as/;
     $attrs->{columns} = [ map { "$attrs->{alias}.$_" } ($self->result_source->_pri_cols) ];
 
     if ($needs_group_by_subq) {
@@ -1458,7 +1467,6 @@
     }
 
     my $subrs = (ref $self)->new($rsrc, $attrs);
-
     return $self->result_source->storage->_subq_update_delete($subrs, $op, $values);
   }
   else {
@@ -1513,9 +1521,10 @@
   my ($self, $values) = @_;
   $self->throw_exception('Values for update_all must be a hash')
     unless ref $values eq 'HASH';
-  foreach my $obj ($self->all) {
-    $obj->set_columns($values)->update;
-  }
+
+  my $guard = $self->result_source->schema->txn_scope_guard;
+  $_->update($values) for $self->all;
+  $guard->commit;
   return 1;
 }
 
@@ -1566,7 +1575,9 @@
   $self->throw_exception('delete_all does not accept any arguments')
     if @_;
 
+  my $guard = $self->result_source->schema->txn_scope_guard;
   $_->delete for $self->all;
+  $guard->commit;
   return 1;
 }
 
@@ -1921,7 +1932,7 @@
   my $value = shift;
   my $ref_type = ref $value;
   return 1 if $ref_type eq '' || $ref_type eq 'SCALAR';
-  return 1 if Scalar::Util::blessed($value);
+  return 1 if blessed $value;
   return 0;
 }
 
@@ -2295,7 +2306,7 @@
     producer => $producer,
     name => 'harry',
   }, {
-    key => 'primary,
+    key => 'primary',
   });
 
 
@@ -2665,16 +2676,26 @@
 =cut
 
 sub as_subselect_rs {
-   my $self = shift;
+  my $self = shift;
 
-   return $self->result_source->resultset->search( undef, {
-      alias => $self->current_source_alias,
-      from => [{
-            $self->current_source_alias => $self->as_query,
-            -alias         => $self->current_source_alias,
-            -source_handle => $self->result_source->handle,
-         }]
-   });
+  my $attrs = $self->_resolved_attrs;
+
+  my $fresh_rs = (ref $self)->new (
+    $self->result_source
+  );
+
+  # these pieces will be locked in the subquery
+  delete $fresh_rs->{cond};
+  delete @{$fresh_rs->{attrs}}{qw/where bind/};
+
+  return $fresh_rs->search( {}, {
+    from => [{
+      $attrs->{alias} => $self->as_query,
+      -alias         => $attrs->{alias},
+      -source_handle => $self->result_source->handle,
+    }],
+    alias => $attrs->{alias},
+  });
 }
 
 # This code is called by search_related, and makes sure there
@@ -2699,7 +2720,7 @@
   # ->_resolve_join as otherwise they get lost - captainL
   my $join = $self->_merge_attr( $attrs->{join}, $attrs->{prefetch} );
 
-  delete @{$attrs}{qw/join prefetch collapse distinct select as columns +select +as +columns/};
+  delete @{$attrs}{qw/join prefetch collapse group_by distinct select as columns +select +as +columns/};
 
   my $seen = { %{ (delete $attrs->{seen_join}) || {} } };
 
@@ -2725,7 +2746,7 @@
       -alias => $attrs->{alias},
       $attrs->{alias} => $rs_copy->as_query,
     }];
-    delete @{$attrs}{@force_subq_attrs, 'where'};
+    delete @{$attrs}{@force_subq_attrs, qw/where bind/};
     $seen->{-relation_chain_depth} = 0;
   }
   elsif ($attrs->{from}) {  #shallow copy suffices

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSetColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSetColumn.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSetColumn.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -7,7 +7,7 @@
 
 use Carp::Clan qw/^DBIx::Class/;
 use DBIx::Class::Exception;
-use List::Util;
+use List::Util ();
 
 =head1 NAME
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSource.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSource.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSource.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -8,6 +8,9 @@
 
 use DBIx::Class::Exception;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
+use List::Util 'first';
+use namespace::clean;
 
 use base qw/DBIx::Class/;
 
@@ -142,7 +145,7 @@
 If a column name beginning with a plus sign ('+col1') is provided, the
 attributes provided will be merged with any existing attributes for the
 column, with the new attributes taking precedence in the case that an
-attribute already exists. Using this without a hashref 
+attribute already exists. Using this without a hashref
 (C<< $source->add_columns(qw/+col1 +col2/) >>) is legal, but useless --
 it does the same thing it would do without the plus.
 
@@ -174,7 +177,7 @@
 
 This contains the column type. It is automatically filled if you use the
 L<SQL::Translator::Producer::DBIx::Class::File> producer, or the
-L<DBIx::Class::Schema::Loader> module. 
+L<DBIx::Class::Schema::Loader> module.
 
 Currently there is no standard set of values for the data_type. Use
 whatever your database supports.
@@ -367,9 +370,10 @@
     $self->{_columns_info_loaded}++;
     my $info = {};
     my $lc_info = {};
-    # eval for the case of storage without table
-    eval { $info = $self->storage->columns_info_for( $self->from ) };
-    unless ($@) {
+
+    # try for the case of storage without table
+    try {
+      $info = $self->storage->columns_info_for( $self->from );
       for my $realcol ( keys %{$info} ) {
         $lc_info->{lc $realcol} = $info->{$realcol};
       }
@@ -379,7 +383,7 @@
           %{ $info->{$col} || $lc_info->{lc $col} || {} }
         };
       }
-    }
+    };
   }
   return $self->_columns->{$column};
 }
@@ -519,12 +523,15 @@
   return @{shift->_primaries||[]};
 }
 
+# a helper method that will automatically die with a descriptive message if
+# no pk is defined on the source in question. For internal use to save
+# on if @pks... boilerplate
 sub _pri_cols {
   my $self = shift;
   my @pcols = $self->primary_columns
     or $self->throw_exception (sprintf(
-      'Operation requires a primary key to be declared on %s via set_primary_key',
-      ref $self,
+      "Operation requires a primary key to be declared on '%s' via set_primary_key",
+      $self->source_name,
     ));
   return @pcols;
 }
@@ -894,7 +901,7 @@
 
   my $schema = $source->schema();
 
-Returns the L<DBIx::Class::Schema> object that this result source 
+Returns the L<DBIx::Class::Schema> object that this result source
 belongs to.
 
 =head2 storage
@@ -1019,7 +1026,7 @@
 
   return $self;
 
-  # XXX disabled. doesn't work properly currently. skip in tests.
+# XXX disabled. doesn't work properly currently. skip in tests.
 
   my $f_source = $self->schema->source($f_source_name);
   unless ($f_source) {
@@ -1032,13 +1039,14 @@
   }
   return unless $f_source; # Can't test rel without f_source
 
-  eval { $self->_resolve_join($rel, 'me', {}, []) };
+  try { $self->_resolve_join($rel, 'me', {}, []) }
+  catch {
+    # If the resolve failed, back out and re-throw the error
+    delete $rels{$rel};
+    $self->_relationships(\%rels);
+    $self->throw_exception("Error creating relationship $rel: $_");
+  };
 
-  if ($@) { # If the resolve failed, back out and re-throw the error
-    delete $rels{$rel}; #
-    $self->_relationships(\%rels);
-    $self->throw_exception("Error creating relationship $rel: $@");
-  }
   1;
 }
 
@@ -1242,7 +1250,7 @@
     for my $rel (keys %$join) {
 
       my $rel_info = $self->relationship_info($rel)
-        or $self->throw_exception("No such relationship ${rel}");
+        or $self->throw_exception("No such relationship '$rel' on " . $self->source_name);
 
       my $force_left = $parent_force_left;
       $force_left ||= lc($rel_info->{attrs}{join_type}||'') eq 'left';
@@ -1272,7 +1280,7 @@
     );
 
     my $rel_info = $self->relationship_info($join)
-      or $self->throw_exception("No such relationship ${join}");
+      or $self->throw_exception("No such relationship $join on " . $self->source_name);
 
     my $rel_src = $self->related_source($join);
     return [ { $as => $rel_src->from,
@@ -1285,7 +1293,7 @@
                -is_single => (
                   $rel_info->{attrs}{accessor}
                     &&
-                  List::Util::first { $rel_info->{attrs}{accessor} eq $_ } (qw/single filter/)
+                  first { $rel_info->{attrs}{accessor} eq $_ } (qw/single filter/)
                 ),
                -alias => $as,
                -relation_chain_depth => $seen->{-relation_chain_depth} || 0,
@@ -1458,7 +1466,7 @@
     my $as = shift @{$p->{-join_aliases}};
 
     my $rel_info = $self->relationship_info( $pre );
-    $self->throw_exception( $self->name . " has no such relationship '$pre'" )
+    $self->throw_exception( $self->source_name . " has no such relationship '$pre'" )
       unless $rel_info;
     my $as_prefix = ($alias =~ /^.*?\.(.+)$/ ? $1.'.' : '');
     my $rel_source = $self->related_source($pre);
@@ -1483,14 +1491,14 @@
       }
       #my @col = map { (/^self\.(.+)$/ ? ("${as_prefix}.$1") : ()); }
       #              values %{$rel_info->{cond}};
-      $collapse->{".${as_prefix}${pre}"} = [ $rel_source->primary_columns ];
+      $collapse->{".${as_prefix}${pre}"} = [ $rel_source->_pri_cols ];
         # action at a distance. prepending the '.' allows simpler code
         # in ResultSet->_collapse_result
       my @key = map { (/^foreign\.(.+)$/ ? ($1) : ()); }
                     keys %{$rel_info->{cond}};
       my @ord = (ref($rel_info->{attrs}{order_by}) eq 'ARRAY'
                    ? @{$rel_info->{attrs}{order_by}}
-   
+
                 : (defined $rel_info->{attrs}{order_by}
                        ? ($rel_info->{attrs}{order_by})
                        : ()));
@@ -1519,7 +1527,7 @@
 sub related_source {
   my ($self, $rel) = @_;
   if( !$self->has_relationship( $rel ) ) {
-    $self->throw_exception("No such relationship '$rel'");
+    $self->throw_exception("No such relationship '$rel' on " . $self->source_name);
   }
   return $self->schema->source($self->relationship_info($rel)->{source});
 }
@@ -1541,14 +1549,14 @@
 sub related_class {
   my ($self, $rel) = @_;
   if( !$self->has_relationship( $rel ) ) {
-    $self->throw_exception("No such relationship '$rel'");
+    $self->throw_exception("No such relationship '$rel' on " . $self->source_name);
   }
   return $self->schema->class($self->relationship_info($rel)->{source});
 }
 
 =head2 handle
 
-Obtain a new handle to this source. Returns an instance of a 
+Obtain a new handle to this source. Returns an instance of a
 L<DBIx::Class::ResultSourceHandle>.
 
 =cut

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSourceProxy/Table.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSourceProxy/Table.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/ResultSourceProxy/Table.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -6,7 +6,8 @@
 use base qw/DBIx::Class::ResultSourceProxy/;
 
 use DBIx::Class::ResultSource::Table;
-use Scalar::Util ();
+use Scalar::Util 'blessed';
+use namespace::clean;
 
 __PACKAGE__->mk_classdata(table_class => 'DBIx::Class::ResultSource::Table');
 
@@ -80,7 +81,7 @@
   my ($class, $table) = @_;
   return $class->result_source_instance->name unless $table;
 
-  unless (Scalar::Util::blessed($table) && $table->isa($class->table_class)) {
+  unless (blessed $table && $table->isa($class->table_class)) {
 
     my $table_class = $class->table_class;
     $class->ensure_class_loaded($table_class);

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Row.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Row.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Row.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -6,7 +6,9 @@
 use base qw/DBIx::Class/;
 
 use DBIx::Class::Exception;
-use Scalar::Util ();
+use Scalar::Util 'blessed';
+use Try::Tiny;
+use namespace::clean;
 
 ###
 ### Internal method
@@ -106,10 +108,10 @@
 sub __new_related_find_or_new_helper {
   my ($self, $relname, $data) = @_;
 
+  my $rsrc = $self->result_source;
+
   # create a mock-object so all new/set_column component overrides will run:
-  my $rel_rs = $self->result_source
-                    ->related_source($relname)
-                    ->resultset;
+  my $rel_rs = $rsrc->related_source($relname)->resultset;
   my $new_rel_obj = $rel_rs->new_result($data);
   my $proc_data = { $new_rel_obj->get_columns };
 
@@ -117,7 +119,7 @@
     MULTICREATE_DEBUG and warn "MC $self constructing $relname via new_result";
     return $new_rel_obj;
   }
-  elsif ($self->result_source->_pk_depends_on($relname, $proc_data )) {
+  elsif ($rsrc->_pk_depends_on($relname, $proc_data )) {
     if (! keys %$proc_data) {
       # there is nothing to search for - blind create
       MULTICREATE_DEBUG and warn "MC $self constructing default-insert $relname";
@@ -132,7 +134,7 @@
     return $new_rel_obj;
   }
   else {
-    my $us = $self->source_name;
+    my $us = $rsrc->source_name;
     $self->throw_exception ("'$us' neither depends nor is depended on by '$relname', something is wrong...");
   }
 }
@@ -188,7 +190,7 @@
         my $acc_type = $info->{attrs}{accessor} || '';
         if ($acc_type eq 'single') {
           my $rel_obj = delete $attrs->{$key};
-          if(!Scalar::Util::blessed($rel_obj)) {
+          if(!blessed $rel_obj) {
             $rel_obj = $new->__new_related_find_or_new_helper($key, $rel_obj);
           }
 
@@ -208,7 +210,7 @@
           my @objects;
           foreach my $idx (0 .. $#$others) {
             my $rel_obj = $others->[$idx];
-            if(!Scalar::Util::blessed($rel_obj)) {
+            if(!blessed $rel_obj) {
               $rel_obj = $new->__new_related_find_or_new_helper($key, $rel_obj);
             }
 
@@ -226,7 +228,7 @@
         elsif ($acc_type eq 'filter') {
           ## 'filter' should disappear and get merged in with 'single' above!
           my $rel_obj = delete $attrs->{$key};
-          if(!Scalar::Util::blessed($rel_obj)) {
+          if(!blessed $rel_obj) {
             $rel_obj = $new->__new_related_find_or_new_helper($key, $rel_obj);
           }
           if ($rel_obj->in_storage) {
@@ -302,8 +304,7 @@
     my $rel_obj = $related_stuff{$relname};
 
     if (! $self->{_rel_in_storage}{$relname}) {
-      next unless (Scalar::Util::blessed($rel_obj)
-                    && $rel_obj->isa('DBIx::Class::Row'));
+      next unless (blessed $rel_obj && $rel_obj->isa('DBIx::Class::Row'));
 
       next unless $source->_pk_depends_on(
                     $relname, { $rel_obj->get_columns }
@@ -314,7 +315,7 @@
 
       MULTICREATE_DEBUG and warn "MC $self pre-reconstructing $relname $rel_obj\n";
 
-      my $them = { %{$rel_obj->{_relationship_data} || {} }, $rel_obj->get_inflated_columns };
+      my $them = { %{$rel_obj->{_relationship_data} || {} }, $rel_obj->get_columns };
       my $existing;
 
       # if there are no keys - nothing to search for
@@ -342,31 +343,48 @@
     $rollback_guard ||= $source->storage->txn_scope_guard
   }
 
+  ## PK::Auto
+  my %auto_pri;
+  my $auto_idx = 0;
+  for ($self->primary_columns) {
+    if (
+      not defined $self->get_column($_)
+        ||
+      (ref($self->get_column($_)) eq 'SCALAR')
+    ) {
+      my $col_info = $source->column_info($_);
+      $auto_pri{$_} = $auto_idx++ unless $col_info->{auto_nextval};   # auto_nextval's are pre-fetched in the storage
+    }
+  }
+
   MULTICREATE_DEBUG and do {
     no warnings 'uninitialized';
     warn "MC $self inserting (".join(', ', $self->get_columns).")\n";
   };
-  my $updated_cols = $source->storage->insert($source, { $self->get_columns });
+  my $updated_cols = $source->storage->insert(
+    $source,
+    { $self->get_columns },
+    (keys %auto_pri) && $source->storage->_supports_insert_returning
+      ? { returning => [ sort { $auto_pri{$a} <=> $auto_pri{$b} } keys %auto_pri ] }
+      : ()
+    ,
+  );
+
   foreach my $col (keys %$updated_cols) {
     $self->store_column($col, $updated_cols->{$col});
+    delete $auto_pri{$col};
   }
 
-  ## PK::Auto
-  my @auto_pri = grep {
-                  (not defined $self->get_column($_))
-                    ||
-                  (ref($self->get_column($_)) eq 'SCALAR')
-                 } $self->primary_columns;
-
-  if (@auto_pri) {
-    MULTICREATE_DEBUG and warn "MC $self fetching missing PKs ".join(', ', @auto_pri)."\n";
+  if (keys %auto_pri) {
+    my @missing = sort { $auto_pri{$a} <=> $auto_pri{$b} } keys %auto_pri;
+    MULTICREATE_DEBUG and warn "MC $self fetching missing PKs ".join(', ', @missing )."\n";
     my $storage = $self->result_source->storage;
     $self->throw_exception( "Missing primary key but Storage doesn't support last_insert_id" )
       unless $storage->can('last_insert_id');
-    my @ids = $storage->last_insert_id($self->result_source, at auto_pri);
+    my @ids = $storage->last_insert_id($self->result_source, @missing);
     $self->throw_exception( "Can't get last insert id" )
-      unless (@ids == @auto_pri);
-    $self->store_column($auto_pri[$_] => $ids[$_]) for 0 .. $#ids;
+      unless (@ids == @missing);
+    $self->store_column($missing[$_] => $ids[$_]) for 0 .. $#missing;
   }
 
   $self->{_dirty_columns} = {};
@@ -380,26 +398,19 @@
       : $related_stuff{$relname}
     ;
 
-    if (@cands
-          && Scalar::Util::blessed($cands[0])
-            && $cands[0]->isa('DBIx::Class::Row')
+    if (@cands && blessed $cands[0] && $cands[0]->isa('DBIx::Class::Row')
     ) {
       my $reverse = $source->reverse_relationship_info($relname);
       foreach my $obj (@cands) {
         $obj->set_from_related($_, $self) for keys %$reverse;
-        my $them = { %{$obj->{_relationship_data} || {} }, $obj->get_inflated_columns };
         if ($self->__their_pk_needs_us($relname)) {
           if (exists $self->{_ignore_at_insert}{$relname}) {
             MULTICREATE_DEBUG and warn "MC $self skipping post-insert on $relname";
-          } else {
-            MULTICREATE_DEBUG and warn "MC $self re-creating $relname $obj";
-            my $re = $self->result_source
-                          ->related_source($relname)
-                          ->resultset
-                          ->create($them);
-            %{$obj} = %{$re};
-            MULTICREATE_DEBUG and warn "MC $self new $relname $obj";
           }
+          else {
+            MULTICREATE_DEBUG and warn "MC $self inserting $relname $obj";
+            $obj->insert;
+          }
         } else {
           MULTICREATE_DEBUG and warn "MC $self post-inserting $obj";
           $obj->insert();
@@ -467,7 +478,7 @@
 L<significance of primary keys|DBIx::Class::Manual::Intro/The Significance and Importance of Primary Keys>
 for more details).
 
-Also takes an optional hashref of C<< column_name => value> >> pairs
+Also takes an optional hashref of C<< column_name => value >> pairs
 to update on the object first. Be aware that the hashref will be
 passed to C<set_inflated_columns>, which might edit it in place, so
 don't rely on it being the same after a call to C<update>.  If you
@@ -501,16 +512,18 @@
 
 sub update {
   my ($self, $upd) = @_;
-  $self->throw_exception( "Not in database" ) unless $self->in_storage;
 
   my $ident_cond = $self->{_orig_ident} || $self->ident_condition;
 
+  $self->set_inflated_columns($upd) if $upd;
+  my %to_update = $self->get_dirty_columns;
+  return $self unless keys %to_update;
+
+  $self->throw_exception( "Not in database" ) unless $self->in_storage;
+
   $self->throw_exception('Unable to update a row with incomplete or no identity')
     if ! keys %$ident_cond;
 
-  $self->set_inflated_columns($upd) if $upd;
-  my %to_update = $self->get_dirty_columns;
-  return $self unless keys %to_update;
   my $rows = $self->result_source->storage->update(
     $self->result_source, \%to_update, $ident_cond
   );
@@ -843,34 +856,20 @@
   my ($self, $column, $new_value) = @_;
 
   # if we can't get an ident condition on first try - mark the object as unidentifiable
-  $self->{_orig_ident} ||= (eval { $self->ident_condition }) || {};
+  $self->{_orig_ident} ||= (try { $self->ident_condition }) || {};
 
   my $old_value = $self->get_column($column);
   $new_value = $self->store_column($column, $new_value);
 
-  my $dirty;
-  if (!$self->in_storage) { # no point tracking dirtyness on uninserted data
-    $dirty = 1;
-  }
-  elsif (defined $old_value xor defined $new_value) {
-    $dirty = 1;
-  }
-  elsif (not defined $old_value) {  # both undef
-    $dirty = 0;
-  }
-  elsif ($old_value eq $new_value) {
-    $dirty = 0;
-  }
-  else {  # do a numeric comparison if datatype allows it
-    if ($self->_is_column_numeric($column)) {
-      $dirty = $old_value != $new_value;
-    }
-    else {
-      $dirty = 1;
-    }
-  }
+  my $dirty =
+    $self->{_dirty_columns}{$column}
+      ||
+    $self->in_storage # no point tracking dirtyness on uninserted data
+      ? ! $self->_eq_column_values ($column, $old_value, $new_value)
+      : 1
+  ;
 
-  # sadly the update code just checks for keys, not for their value
+  # FIXME sadly the update code just checks for keys, not for their value
   $self->{_dirty_columns}{$column} = 1 if $dirty;
 
   # XXX clear out the relation cache for this column
@@ -879,6 +878,26 @@
   return $new_value;
 }
 
+sub _eq_column_values {
+  my ($self, $col, $old, $new) = @_;
+
+  if (defined $old xor defined $new) {
+    return 0;
+  }
+  elsif (not defined $old) {  # both undef
+    return 1;
+  }
+  elsif ($old eq $new) {
+    return 1;
+  }
+  elsif ($self->_is_column_numeric($col)) {  # do a numeric comparison if datatype allows it
+    return $old == $new;
+  }
+  else {
+    return 0;
+  }
+}
+
 =head2 set_columns
 
   $row->set_columns({ $col => $val, ... });
@@ -1094,7 +1113,7 @@
 
   if ($source->isa('DBIx::Class::ResultSourceHandle')) {
     $source = $source_handle->resolve
-  } 
+  }
   else {
     $source_handle = $source->handle
   }
@@ -1358,7 +1377,6 @@
 
 sub discard_changes {
   my ($self, $attrs) = @_;
-  delete $self->{_dirty_columns};
   return unless $self->in_storage; # Don't reload if we aren't real!
 
   # add a replication default to read from the master only

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/MySQL.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/MySQL.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/MySQL.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -21,4 +21,14 @@
   return $self->SUPER::insert (@_);
 }
 
+# Allow STRAIGHT_JOIN's
+sub _generate_join_clause {
+    my ($self, $join_type) = @_;
+
+    if( $join_type && $join_type =~ /^STRAIGHT\z/i ) {
+        return ' STRAIGHT_JOIN '
+    }
+
+    return $self->SUPER::_generate_join_clause( $join_type );
+}
 1;

Added: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/Oracle.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/Oracle.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/Oracle.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,120 @@
+package # Hide from PAUSE
+  DBIx::Class::SQLAHacks::Oracle;
+
+use warnings;
+use strict;
+
+use base qw( DBIx::Class::SQLAHacks );
+use Carp::Clan qw/^DBIx::Class|^SQL::Abstract/;
+
+sub new {
+  my $self = shift;
+  my %opts = (ref $_[0] eq 'HASH') ? %{$_[0]} : @_;
+  push @{$opts{special_ops}}, {
+    regex => qr/^prior$/i,
+    handler => '_where_field_PRIOR',
+  };
+
+  $self->SUPER::new (\%opts);
+}
+
+sub _assemble_binds {
+  my $self = shift;
+  return map { @{ (delete $self->{"${_}_bind"}) || [] } } (qw/from where oracle_connect_by having order/);
+}
+
+
+sub _parse_rs_attrs {
+    my $self = shift;
+    my ($rs_attrs) = @_;
+
+    my ($cb_sql, @cb_bind) = $self->_connect_by($rs_attrs);
+    push @{$self->{oracle_connect_by_bind}}, @cb_bind;
+
+    my $sql = $self->SUPER::_parse_rs_attrs(@_);
+
+    return "$cb_sql $sql";
+}
+
+sub _connect_by {
+    my ($self, $attrs) = @_;
+
+    my $sql = '';
+    my @bind;
+
+    if ( ref($attrs) eq 'HASH' ) {
+        if ( $attrs->{'start_with'} ) {
+            my ($ws, @wb) = $self->_recurse_where( $attrs->{'start_with'} );
+            $sql .= $self->_sqlcase(' start with ') . $ws;
+            push @bind, @wb;
+        }
+        if ( my $connect_by = $attrs->{'connect_by'} || $attrs->{'connect_by_nocycle'} ) {
+            my ($connect_by_sql, @connect_by_sql_bind) = $self->_recurse_where( $connect_by );
+            $sql .= sprintf(" %s %s",
+                ( $attrs->{'connect_by_nocycle'} ) ? $self->_sqlcase('connect by nocycle')
+                    : $self->_sqlcase('connect by'),
+                $connect_by_sql,
+            );
+            push @bind, @connect_by_sql_bind;
+        }
+        if ( $attrs->{'order_siblings_by'} ) {
+            $sql .= $self->_order_siblings_by( $attrs->{'order_siblings_by'} );
+        }
+    }
+
+    return wantarray ? ($sql, @bind) : $sql;
+}
+
+sub _order_siblings_by {
+    my ( $self, $arg ) = @_;
+
+    my ( @sql, @bind );
+    for my $c ( $self->_order_by_chunks($arg) ) {
+        $self->_SWITCH_refkind(
+            $c,
+            {
+                SCALAR   => sub { push @sql, $c },
+                ARRAYREF => sub { push @sql, shift @$c; push @bind, @$c },
+            }
+        );
+    }
+
+    my $sql =
+      @sql
+      ? sprintf( '%s %s', $self->_sqlcase(' order siblings by'), join( ', ', @sql ) )
+      : '';
+
+    return wantarray ? ( $sql, @bind ) : $sql;
+}
+
+# we need to add a '=' only when PRIOR is used against a column diretly
+# i.e. when it is invoked by a special_op callback
+sub _where_field_PRIOR {
+  my ($self, $lhs, $op, $rhs) = @_;
+  my ($sql, @bind) = $self->_recurse_where ($rhs);
+
+  $sql = sprintf ('%s = %s %s ',
+    $self->_convert($self->_quote($lhs)),
+    $self->_sqlcase ($op),
+    $sql
+  );
+
+  return ($sql, @bind);
+}
+
+1;
+
+__END__
+
+=pod
+
+=head1 NAME
+
+DBIx::Class::SQLAHacks::Oracle - adds hierarchical query support for Oracle to SQL::Abstract
+
+=head1 DESCRIPTION
+
+See L<DBIx::Class::Storage::DBI::Oracle::Generic> for more informations about
+how to use hierarchical queries with DBIx::Class.
+
+=cut

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/OracleJoins.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/OracleJoins.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/OracleJoins.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,13 +5,13 @@
 use Carp::Clan qw/^DBIx::Class|^SQL::Abstract/;
 
 sub select {
-  my ($self, $table, $fields, $where, $order, @rest) = @_;
+  my ($self, $table, $fields, $where, $rs_attrs, @rest) = @_;
 
   if (ref($table) eq 'ARRAY') {
     $where = $self->_oracle_joins($where, @{ $table });
   }
 
-  return $self->SUPER::select($table, $fields, $where, $order, @rest);
+  return $self->SUPER::select($table, $fields, $where, $rs_attrs, @rest);
 }
 
 sub _recurse_from {

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/SQLite.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/SQLite.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks/SQLite.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -6,12 +6,16 @@
 
 #
 # SQLite does not understand SELECT ... FOR UPDATE
-# Adjust SQL here instead
+# Disable it here
 #
-sub select {
-  my $self = shift;
-  local $self->{_dbic_rs_attrs}{for} = undef;
-  return $self->SUPER::select (@_);
+sub _parse_rs_attrs {
+  my ($self, $attrs) = @_;
+
+  return $self->SUPER::_parse_rs_attrs ($attrs)
+    if ref $attrs ne 'HASH';
+
+  local $attrs->{for};
+  return $self->SUPER::_parse_rs_attrs ($attrs);
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/SQLAHacks.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -8,8 +8,10 @@
 use base qw/SQL::Abstract::Limit/;
 use strict;
 use warnings;
-use Carp::Clan qw/^DBIx::Class|^SQL::Abstract/;
-use Sub::Name();
+use List::Util 'first';
+use Sub::Name 'subname';
+use namespace::clean;
+use Carp::Clan qw/^DBIx::Class|^SQL::Abstract|^Try::Tiny/;
 
 BEGIN {
   # reinstall the carp()/croak() functions imported into SQL::Abstract
@@ -19,7 +21,7 @@
   for my $f (qw/carp croak/) {
 
     my $orig = \&{"SQL::Abstract::$f"};
-    *{"SQL::Abstract::$f"} = Sub::Name::subname "SQL::Abstract::$f" =>
+    *{"SQL::Abstract::$f"} = subname "SQL::Abstract::$f" =>
       sub {
         if (Carp::longmess() =~ /DBIx::Class::SQLAHacks::[\w]+ .+? called \s at/x) {
           __PACKAGE__->can($f)->(@_);
@@ -31,7 +33,12 @@
   }
 }
 
+# the "oh noes offset/top without limit" constant
+# limited to 32 bits for sanity (and since it is fed
+# to sprintf %u)
+sub __max_int { 0xFFFFFFFF };
 
+
 # Tries to determine limit dialect.
 #
 sub new {
@@ -46,32 +53,165 @@
   $self;
 }
 
+# !!! THIS IS ALSO HORRIFIC !!! /me ashamed
+#
+# Generates inner/outer select lists for various limit dialects
+# which result in one or more subqueries (e.g. RNO, Top, RowNum)
+# Any non-root-table columns need to have their table qualifier
+# turned into a column alias (otherwise names in subqueries clash
+# and/or lose their source table)
+#
+# Returns inner/outer strings of SQL QUOTED selectors with aliases
+# (to be used in whatever select statement), and an alias index hashref
+# of QUOTED SEL => QUOTED ALIAS pairs (to maybe be used for string-subst
+# higher up).
+# If an order_by is supplied, the inner select needs to bring out columns
+# used in implicit (non-selected) orders, and the order condition itself
+# needs to be realiased to the proper names in the outer query. Thus we
+# also return a hashref (order doesn't matter) of QUOTED EXTRA-SEL =>
+# QUOTED ALIAS pairs, which is a list of extra selectors that do *not*
+# exist in the original select list
 
-# ANSI standard Limit/Offset implementation. DB2 and MSSQL use this
+sub _subqueried_limit_attrs {
+  my ($self, $rs_attrs) = @_;
+
+  croak 'Limit dialect implementation usable only in the context of DBIC (missing $rs_attrs)'
+    unless ref ($rs_attrs) eq 'HASH';
+
+  my ($re_sep, $re_alias) = map { quotemeta $_ } (
+    $self->name_sep || '.',
+    $rs_attrs->{alias},
+  );
+
+  # correlate select and as, build selection index
+  my (@sel, $in_sel_index);
+  for my $i (0 .. $#{$rs_attrs->{select}}) {
+
+    my $s = $rs_attrs->{select}[$i];
+    my $sql_sel = $self->_recurse_fields ($s);
+    my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef;
+
+
+    push @sel, {
+      sql => $sql_sel,
+      unquoted_sql => do { local $self->{quote_char}; $self->_recurse_fields ($s) },
+      as =>
+        $sql_alias
+          ||
+        $rs_attrs->{as}[$i]
+          ||
+        croak "Select argument $i ($s) without corresponding 'as'"
+      ,
+    };
+
+    $in_sel_index->{$sql_sel}++;
+    $in_sel_index->{$self->_quote ($sql_alias)}++ if $sql_alias;
+
+    # record unqualified versions too, so we do not have
+    # to reselect the same column twice (in qualified and
+    # unqualified form)
+    if (! ref $s && $sql_sel =~ / $re_sep (.+) $/x) {
+      $in_sel_index->{$1}++;
+    }
+  }
+
+
+  # re-alias and remove any name separators from aliases,
+  # unless we are dealing with the current source alias
+  # (which will transcend the subqueries as it is necessary
+  # for possible further chaining)
+  my (@in_sel, @out_sel, %renamed);
+  for my $node (@sel) {
+    if (first { $_ =~ / (?<! $re_alias ) $re_sep /x } ($node->{as}, $node->{unquoted_sql}) )  {
+      $node->{as} =~ s/ $re_sep /__/xg;
+      my $quoted_as = $self->_quote($node->{as});
+      push @in_sel, sprintf '%s AS %s', $node->{sql}, $quoted_as;
+      push @out_sel, $quoted_as;
+      $renamed{$node->{sql}} = $quoted_as;
+    }
+    else {
+      push @in_sel, $node->{sql};
+      push @out_sel, $self->_quote ($node->{as});
+    }
+  }
+
+  # see if the order gives us anything
+  my %extra_order_sel;
+  for my $chunk ($self->_order_by_chunks ($rs_attrs->{order_by})) {
+    # order with bind
+    $chunk = $chunk->[0] if (ref $chunk) eq 'ARRAY';
+    $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
+
+    next if $in_sel_index->{$chunk};
+
+    $extra_order_sel{$chunk} ||= $self->_quote (
+      'ORDER__BY__' . scalar keys %extra_order_sel
+    );
+  }
+
+  return (
+    (map { join (', ', @$_ ) } (
+      \@in_sel,
+      \@out_sel)
+    ),
+    \%renamed,
+    keys %extra_order_sel ? \%extra_order_sel : (),
+  );
+}
+
+# ANSI standard Limit/Offset implementation. DB2 and MSSQL >= 2005 use this
 sub _RowNumberOver {
-  my ($self, $sql, $order, $rows, $offset ) = @_;
+  my ($self, $sql, $rs_attrs, $rows, $offset ) = @_;
 
-  # get the select to make the final amount of columns equal the original one
-  my ($select) = $sql =~ /^ \s* SELECT \s+ (.+?) \s+ FROM/ix
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
     or croak "Unrecognizable SELECT: $sql";
 
-  # get the order_by only (or make up an order if none exists)
-  my $order_by = $self->_order_by(
-    (delete $order->{order_by}) || $self->_rno_default_order
-  );
+  # get selectors, and scan the order_by (if any)
+  my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+    = $self->_subqueried_limit_attrs ( $rs_attrs );
 
-  # whatever is left of the order_by
-  my $group_having = $self->_order_by($order);
+  # make up an order if none exists
+  my $requested_order = (delete $rs_attrs->{order_by}) || $self->_rno_default_order;
+  my $rno_ord = $self->_order_by ($requested_order);
 
-  my $qalias = $self->_quote ($self->{_dbic_rs_attrs}{alias});
+  # this is the order supplement magic
+  my $mid_sel = $out_sel;
+  if ($extra_order_sel) {
+    for my $extra_col (sort
+      { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+      keys %$extra_order_sel
+    ) {
+      $in_sel .= sprintf (', %s AS %s',
+        $extra_col,
+        $extra_order_sel->{$extra_col},
+      );
 
+      $mid_sel .= ', ' . $extra_order_sel->{$extra_col};
+    }
+  }
+
+  # and this is order re-alias magic
+  for ($extra_order_sel, $alias_map) {
+    for my $col (keys %$_) {
+      my $re_col = quotemeta ($col);
+      $rno_ord =~ s/$re_col/$_->{$col}/;
+    }
+  }
+
+  # whatever is left of the order_by (only where is processed at this point)
+  my $group_having = $self->_parse_rs_attrs($rs_attrs);
+
+  my $qalias = $self->_quote ($rs_attrs->{alias});
+  my $idx_name = $self->_quote ('rno__row__index');
+
   $sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
 
-SELECT $select FROM (
-  SELECT $qalias.*, ROW_NUMBER() OVER($order_by ) AS rno__row__index FROM (
-    ${sql}${group_having}
+SELECT $out_sel FROM (
+  SELECT $mid_sel, ROW_NUMBER() OVER( $rno_ord ) AS $idx_name FROM (
+    SELECT $in_sel ${sql}${group_having}
   ) $qalias
-) $qalias WHERE rno__row__index BETWEEN %d AND %d
+) $qalias WHERE $idx_name BETWEEN %u AND %u
 
 EOS
 
@@ -86,233 +226,300 @@
 
 # Informix specific limit, almost like LIMIT/OFFSET
 sub _SkipFirst {
-  my ($self, $sql, $order, $rows, $offset) = @_;
+  my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
 
   $sql =~ s/^ \s* SELECT \s+ //ix
     or croak "Unrecognizable SELECT: $sql";
 
   return sprintf ('SELECT %s%s%s%s',
     $offset
-      ? sprintf ('SKIP %d ', $offset)
+      ? sprintf ('SKIP %u ', $offset)
       : ''
     ,
-    sprintf ('FIRST %d ', $rows),
+    sprintf ('FIRST %u ', $rows),
     $sql,
-    $self->_order_by ($order),
+    $self->_parse_rs_attrs ($rs_attrs),
   );
 }
 
 # Firebird specific limit, reverse of _SkipFirst for Informix
 sub _FirstSkip {
-  my ($self, $sql, $order, $rows, $offset) = @_;
+  my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
 
   $sql =~ s/^ \s* SELECT \s+ //ix
     or croak "Unrecognizable SELECT: $sql";
 
   return sprintf ('SELECT %s%s%s%s',
-    sprintf ('FIRST %d ', $rows),
+    sprintf ('FIRST %u ', $rows),
     $offset
-      ? sprintf ('SKIP %d ', $offset)
+      ? sprintf ('SKIP %u ', $offset)
       : ''
     ,
     $sql,
-    $self->_order_by ($order),
+    $self->_parse_rs_attrs ($rs_attrs),
   );
 }
 
-# Crappy Top based Limit/Offset support. Legacy from MSSQL.
+# WhOracle limits
+sub _RowNum {
+  my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_;
+
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
+    or croak "Unrecognizable SELECT: $sql";
+
+  my ($insel, $outsel) = $self->_subqueried_limit_attrs ($rs_attrs);
+
+  my $qalias = $self->_quote ($rs_attrs->{alias});
+  my $idx_name = $self->_quote ('rownum__index');
+  my $order_group_having = $self->_parse_rs_attrs($rs_attrs);
+
+  $sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
+
+SELECT $outsel FROM (
+  SELECT $outsel, ROWNUM $idx_name FROM (
+    SELECT $insel ${sql}${order_group_having}
+  ) $qalias
+) $qalias WHERE $idx_name BETWEEN %u AND %u
+
+EOS
+
+  $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger
+  return $sql;
+}
+
+# Crappy Top based Limit/Offset support. Legacy for MSSQL < 2005
 sub _Top {
-  my ( $self, $sql, $order, $rows, $offset ) = @_;
+  my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_;
 
-  # mangle the input sql so it can be properly aliased in the outer queries
-  $sql =~ s/^ \s* SELECT \s+ (.+?) \s+ (?=FROM)//ix
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
     or croak "Unrecognizable SELECT: $sql";
-  my $sql_select = $1;
-  my @sql_select = split (/\s*,\s*/, $sql_select);
 
-  # we can't support subqueries (in fact MSSQL can't) - croak
-  if (@sql_select != @{$self->{_dbic_rs_attrs}{select}}) {
-    croak (sprintf (
-      'SQL SELECT did not parse cleanly - retrieved %d comma separated elements, while '
-    . 'the resultset select attribure contains %d elements: %s',
-      scalar @sql_select,
-      scalar @{$self->{_dbic_rs_attrs}{select}},
-      $sql_select,
-    ));
-  }
+  # get selectors
+  my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+    = $self->_subqueried_limit_attrs ($rs_attrs);
 
-  my $name_sep = $self->name_sep || '.';
-  my $esc_name_sep = "\Q$name_sep\E";
-  my $col_re = qr/ ^ (?: (.+) $esc_name_sep )? ([^$esc_name_sep]+) $ /x;
+  my $requested_order = delete $rs_attrs->{order_by};
 
-  my $rs_alias = $self->{_dbic_rs_attrs}{alias};
-  my $quoted_rs_alias = $self->_quote ($rs_alias);
+  my $order_by_requested = $self->_order_by ($requested_order);
 
-  # construct the new select lists, rename(alias) some columns if necessary
-  my (@outer_select, @inner_select, %seen_names, %col_aliases, %outer_col_aliases);
+  # make up an order unless supplied
+  my $inner_order = ($order_by_requested
+    ? $requested_order
+    : [ map
+      { join ('', $rs_attrs->{alias}, $self->{name_sep}||'.', $_ ) }
+      ( $rs_attrs->{_rsroot_source_handle}->resolve->_pri_cols )
+    ]
+  );
 
-  for (@{$self->{_dbic_rs_attrs}{select}}) {
-    next if ref $_;
-    my ($table, $orig_colname) = ( $_ =~ $col_re );
-    next unless $table;
-    $seen_names{$orig_colname}++;
-  }
+  my ($order_by_inner, $order_by_reversed);
 
-  for my $i (0 .. $#sql_select) {
+  # localise as we already have all the bind values we need
+  {
+    local $self->{order_bind};
+    $order_by_inner = $self->_order_by ($inner_order);
 
-    my $colsel_arg = $self->{_dbic_rs_attrs}{select}[$i];
-    my $colsel_sql = $sql_select[$i];
+    my @out_chunks;
+    for my $ch ($self->_order_by_chunks ($inner_order)) {
+      $ch = $ch->[0] if ref $ch eq 'ARRAY';
 
-    # this may or may not work (in case of a scalarref or something)
-    my ($table, $orig_colname) = ( $colsel_arg =~ $col_re );
+      $ch =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+      my $dir = uc ($1||'ASC');
 
-    my $quoted_alias;
-    # do not attempt to understand non-scalar selects - alias numerically
-    if (ref $colsel_arg) {
-      $quoted_alias = $self->_quote ('column_' . (@inner_select + 1) );
+      push @out_chunks, \join (' ', $ch, $dir eq 'ASC' ? 'DESC' : 'ASC' );
     }
-    # column name seen more than once - alias it
-    elsif ($orig_colname &&
-          ($seen_names{$orig_colname} && $seen_names{$orig_colname} > 1) ) {
-      $quoted_alias = $self->_quote ("${table}__${orig_colname}");
-    }
 
-    # we did rename - make a record and adjust
-    if ($quoted_alias) {
-      # alias inner
-      push @inner_select, "$colsel_sql AS $quoted_alias";
+    $order_by_reversed = $self->_order_by (\@out_chunks);
+  }
 
-      # push alias to outer
-      push @outer_select, $quoted_alias;
+  # this is the order supplement magic
+  my $mid_sel = $out_sel;
+  if ($extra_order_sel) {
+    for my $extra_col (sort
+      { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+      keys %$extra_order_sel
+    ) {
+      $in_sel .= sprintf (', %s AS %s',
+        $extra_col,
+        $extra_order_sel->{$extra_col},
+      );
 
-      # Any aliasing accumulated here will be considered
-      # both for inner and outer adjustments of ORDER BY
-      $self->__record_alias (
-        \%col_aliases,
-        $quoted_alias,
-        $colsel_arg,
-        $table ? $orig_colname : undef,
-      );
+      $mid_sel .= ', ' . $extra_order_sel->{$extra_col};
     }
 
-    # otherwise just leave things intact inside, and use the abbreviated one outside
-    # (as we do not have table names anymore)
-    else {
-      push @inner_select, $colsel_sql;
+    # since whatever order bindvals there are, they will be realiased
+    # and need to show up in front of the entire initial inner subquery
+    # Unshift *from_bind* to make this happen (horrible, horrible, but
+    # we don't have another mechanism yet)
+    unshift @{$self->{from_bind}}, @{$self->{order_bind}};
+  }
 
-      my $outer_quoted = $self->_quote ($orig_colname);  # it was not a duplicate so should just work
-      push @outer_select, $outer_quoted;
-      $self->__record_alias (
-        \%outer_col_aliases,
-        $outer_quoted,
-        $colsel_arg,
-        $table ? $orig_colname : undef,
-      );
+  # and this is order re-alias magic
+  for my $map ($extra_order_sel, $alias_map) {
+    for my $col (keys %$map) {
+      my $re_col = quotemeta ($col);
+      $_ =~ s/$re_col/$map->{$col}/
+        for ($order_by_reversed, $order_by_requested);
     }
   }
 
-  my $outer_select = join (', ', @outer_select );
-  my $inner_select = join (', ', @inner_select );
+  # generate the rest of the sql
+  my $grpby_having = $self->_parse_rs_attrs ($rs_attrs);
 
-  %outer_col_aliases = (%outer_col_aliases, %col_aliases);
+  my $quoted_rs_alias = $self->_quote ($rs_attrs->{alias});
 
-  # deal with order
-  croak '$order supplied to SQLAHacks limit emulators must be a hash'
-    if (ref $order ne 'HASH');
+  $sql = sprintf ('SELECT TOP %u %s %s %s %s',
+    $rows + ($offset||0),
+    $in_sel,
+    $sql,
+    $grpby_having,
+    $order_by_inner,
+  );
 
-  $order = { %$order }; #copy
+  $sql = sprintf ('SELECT TOP %u %s FROM ( %s ) %s %s',
+    $rows,
+    $mid_sel,
+    $sql,
+    $quoted_rs_alias,
+    $order_by_reversed,
+  ) if $offset;
 
-  my $req_order = $order->{order_by};
+  $sql = sprintf ('SELECT TOP %u %s FROM ( %s ) %s %s',
+    $rows,
+    $out_sel,
+    $sql,
+    $quoted_rs_alias,
+    $order_by_requested,
+  ) if ( ($offset && $order_by_requested) || ($mid_sel ne $out_sel) );
 
-  # examine normalized version, collapses nesting
-  my $limit_order;
-  if (scalar $self->_order_by_chunks ($req_order)) {
-    $limit_order = $req_order;
-  }
-  else {
-    $limit_order = [ map
-      { join ('', $rs_alias, $name_sep, $_ ) }
-      ( $self->{_dbic_rs_attrs}{_source_handle}->resolve->primary_columns )
-    ];
-  }
+  $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger
+  return $sql;
+}
 
-  my ( $order_by_inner, $order_by_outer ) = $self->_order_directions($limit_order);
-  my $order_by_requested = $self->_order_by ($req_order);
+# This for Sybase ASE, to use SET ROWCOUNT when there is no offset, and
+# GenericSubQ otherwise.
+sub _RowCountOrGenericSubQ {
+  my $self = shift;
+  my ($sql, $rs_attrs, $rows, $offset) = @_;
 
-  # generate the rest
-  delete $order->{order_by};
-  my $grpby_having = $self->_order_by ($order);
+  return $self->_GenericSubQ(@_) if $offset;
 
-  # short circuit for counts - the ordering complexity is needless
-  if ($self->{_dbic_rs_attrs}{-for_count_only}) {
-    return "SELECT TOP $rows $inner_select $sql $grpby_having $order_by_outer";
-  }
+  return sprintf <<"EOF", $rows, $sql;
+SET ROWCOUNT %d
+%s
+SET ROWCOUNT 0
+EOF
+}
 
-  # we can't really adjust the order_by columns, as introspection is lacking
-  # resort to simple substitution
-  for my $col (keys %outer_col_aliases) {
-    for ($order_by_requested, $order_by_outer) {
-      $_ =~ s/\s+$col\s+/ $outer_col_aliases{$col} /g;
-    }
+# This is the most evil limit "dialect" (more of a hack) for *really*
+# stupid databases. It works by ordering the set by some unique column,
+# and calculating amount of rows that have a less-er value (thus
+# emulating a RowNum-like index). Of course this implies the set can
+# only be ordered by a single unique columns.
+sub _GenericSubQ {
+  my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
+
+  my $root_rsrc = $rs_attrs->{_rsroot_source_handle}->resolve;
+  my $root_tbl_name = $root_rsrc->name;
+
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
+    or croak "Unrecognizable SELECT: $sql";
+
+  my ($order_by, @rest) = do {
+    local $self->{quote_char};
+    $self->_order_by_chunks ($rs_attrs->{order_by})
+  };
+
+  unless (
+    $order_by
+      &&
+    ! @rest
+      &&
+    ( ! ref $order_by
+        ||
+      ( ref $order_by eq 'ARRAY' and @$order_by == 1 )
+    )
+  ) {
+    croak (
+      'Generic Subquery Limit does not work on resultsets without an order, or resultsets '
+    . 'with complex order criteria (multicolumn and/or functions). Provide a single, '
+    . 'unique-column order criteria.'
+    );
   }
-  for my $col (keys %col_aliases) {
-    $order_by_inner =~ s/\s+$col\s+/ $col_aliases{$col} /g;
-  }
 
+  ($order_by) = @$order_by if ref $order_by;
 
-  my $inner_lim = $rows + $offset;
+  $order_by =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+  my $direction = lc ($1 || 'asc');
 
-  $sql = "SELECT TOP $inner_lim $inner_select $sql $grpby_having $order_by_inner";
+  my ($unq_sort_col) = $order_by =~ /(?:^|\.)([^\.]+)$/;
 
-  if ($offset) {
-    $sql = <<"SQL";
+  my $inf = $root_rsrc->storage->_resolve_column_info (
+    $rs_attrs->{from}, [$order_by, $unq_sort_col]
+  );
 
-    SELECT TOP $rows $outer_select FROM
-    (
-      $sql
-    ) $quoted_rs_alias
-    $order_by_outer
-SQL
+  my $ord_colinfo = $inf->{$order_by} || croak "Unable to determine source of order-criteria '$order_by'";
 
+  if ($ord_colinfo->{-result_source}->name ne $root_tbl_name) {
+    croak "Generic Subquery Limit order criteria can be only based on the root-source '"
+        . $root_rsrc->source_name . "' (aliased as '$rs_attrs->{alias}')";
   }
 
-  if ($order_by_requested) {
-    $sql = <<"SQL";
+  # make sure order column is qualified
+  $order_by = "$rs_attrs->{alias}.$order_by"
+    unless $order_by =~ /^$rs_attrs->{alias}\./;
 
-    SELECT $outer_select FROM
-      ( $sql ) $quoted_rs_alias
-    $order_by_requested
-SQL
-
+  my $is_u;
+  my $ucs = { $root_rsrc->unique_constraints };
+  for (values %$ucs ) {
+    if (@$_ == 1 && "$rs_attrs->{alias}.$_->[0]" eq $order_by) {
+      $is_u++;
+      last;
+    }
   }
+  croak "Generic Subquery Limit order criteria column '$order_by' must be unique (no unique constraint found)"
+    unless $is_u;
 
-  $sql =~ s/\s*\n\s*/ /g; # parsing out multiline statements is harder than a single line
-  return $sql;
-}
+  my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+    = $self->_subqueried_limit_attrs ($rs_attrs);
 
-# action at a distance to shorten Top code above
-sub __record_alias {
-  my ($self, $register, $alias, $fqcol, $col) = @_;
+  my $cmp_op = $direction eq 'desc' ? '>' : '<';
+  my $count_tbl_alias = 'rownum__emulation';
 
-  # record qualified name
-  $register->{$fqcol} = $alias;
-  $register->{$self->_quote($fqcol)} = $alias;
+  my $order_sql = $self->_order_by (delete $rs_attrs->{order_by});
+  my $group_having_sql = $self->_parse_rs_attrs($rs_attrs);
 
-  return unless $col;
+  # add the order supplement (if any) as this is what will be used for the outer WHERE
+  $in_sel .= ", $_" for keys %{$extra_order_sel||{}};
 
-  # record unqualified name, undef (no adjustment) if a duplicate is found
-  if (exists $register->{$col}) {
-    $register->{$col} = undef;
-  }
-  else {
-    $register->{$col} = $alias;
-  }
+  $sql = sprintf (<<EOS,
+SELECT $out_sel
+  FROM (
+    SELECT $in_sel ${sql}${group_having_sql}
+  ) %s
+WHERE ( SELECT COUNT(*) FROM %s %s WHERE %s $cmp_op %s ) %s
+$order_sql
+EOS
+    ( map { $self->_quote ($_) } (
+      $rs_attrs->{alias},
+      $root_tbl_name,
+      $count_tbl_alias,
+      "$count_tbl_alias.$unq_sort_col",
+      $order_by,
+    )),
+    $offset
+      ? sprintf ('BETWEEN %u AND %u', $offset, $offset + $rows - 1)
+      : sprintf ('< %u', $rows )
+    ,
+  );
 
-  $register->{$self->_quote($col)} = $register->{$col};
+  $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger
+  return $sql;
 }
 
 
-
 # While we're at it, this should make LIMIT queries more efficient,
 #  without digging into things too deeply
 sub _find_syntax {
@@ -320,36 +527,35 @@
   return $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax);
 }
 
-my $for_syntax = {
-  update => 'FOR UPDATE',
-  shared => 'FOR SHARE',
-};
 # Quotes table names, handles "limit" dialects (e.g. where rownum between x and
-# y), supports SELECT ... FOR UPDATE and SELECT ... FOR SHARE.
+# y)
 sub select {
-  my ($self, $table, $fields, $where, $order, @rest) = @_;
+  my ($self, $table, $fields, $where, $rs_attrs, @rest) = @_;
 
-  $self->{"${_}_bind"} = [] for (qw/having from order/);
-
   if (not ref($table) or ref($table) eq 'SCALAR') {
     $table = $self->_quote($table);
   }
 
-  local $self->{rownum_hack_count} = 1
-    if (defined $rest[0] && $self->{limit_dialect} eq 'RowNum');
   @rest = (-1) unless defined $rest[0];
   croak "LIMIT 0 Does Not Compute" if $rest[0] == 0;
     # and anyway, SQL::Abstract::Limit will cause a barf if we don't first
-  my ($sql, @where_bind) = $self->SUPER::select(
-    $table, $self->_recurse_fields($fields), $where, $order, @rest
+
+  my ($sql, @bind) = $self->SUPER::select(
+    $table, $self->_recurse_fields($fields), $where, $rs_attrs, @rest
   );
-  if (my $for = delete $self->{_dbic_rs_attrs}{for}) {
-    $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
-  }
+  push @{$self->{where_bind}}, @bind;
 
-  return wantarray ? ($sql, @{$self->{from_bind}}, @where_bind, @{$self->{having_bind}}, @{$self->{order_bind}} ) : $sql;
+# this *must* be called, otherwise extra binds will remain in the sql-maker
+  my @all_bind = $self->_assemble_binds;
+
+  return wantarray ? ($sql, @all_bind) : $sql;
 }
 
+sub _assemble_binds {
+  my $self = shift;
+  return map { @{ (delete $self->{"${_}_bind"}) || [] } } (qw/from where having order/);
+}
+
 # Quotes table names, and handles default inserts
 sub insert {
   my $self = shift;
@@ -362,9 +568,8 @@
   if (! $_[0] or (ref $_[0] eq 'HASH' and !keys %{$_[0]} ) ) {
     my $sql = "INSERT INTO ${table} DEFAULT VALUES";
 
-    if (my @returning = @{ ($_[1]||{})->{returning} || [] }) {
-      $sql .= ' RETURNING (' . (join ', ' => map $self->_quote($_), @returning)
-            . ')';
+    if (my $ret = ($_[1]||{})->{returning} ) {
+      $sql .= $self->_insert_returning ($ret);
     }
 
     return $sql;
@@ -391,35 +596,36 @@
 
 sub _emulate_limit {
   my $self = shift;
+  # my ( $syntax, $sql, $order, $rows, $offset ) = @_;
+
   if ($_[3] == -1) {
-    return $_[1].$self->_order_by($_[2]);
+    return $_[1] . $self->_parse_rs_attrs($_[2]);
   } else {
     return $self->SUPER::_emulate_limit(@_);
   }
 }
 
 sub _recurse_fields {
-  my ($self, $fields, $params) = @_;
+  my ($self, $fields) = @_;
   my $ref = ref $fields;
   return $self->_quote($fields) unless $ref;
   return $$fields if $ref eq 'SCALAR';
 
   if ($ref eq 'ARRAY') {
-    return join(', ', map {
-      $self->_recurse_fields($_)
-        .(exists $self->{rownum_hack_count} && !($params && $params->{no_rownum_hack})
-          ? ' AS col'.$self->{rownum_hack_count}++
-          : '')
-      } @$fields);
+    return join(', ', map { $self->_recurse_fields($_) } @$fields);
   }
   elsif ($ref eq 'HASH') {
-    my %hash = %$fields;
+    my %hash = %$fields;  # shallow copy
 
     my $as = delete $hash{-as};   # if supplied
 
-    my ($func, $args) = each %hash;
-    delete $hash{$func};
+    my ($func, $args, @toomany) = %hash;
 
+    # there should be only one pair
+    if (@toomany) {
+      croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
+    }
+
     if (lc ($func) eq 'distinct' && ref $args eq 'ARRAY' && @$args > 1) {
       croak (
         'The select => { distinct => ... } syntax is not supported for multiple columns.'
@@ -436,11 +642,6 @@
         : ''
     );
 
-    # there should be nothing left
-    if (keys %hash) {
-      croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
-    }
-
     return $select;
   }
   # Is the second check absolutely necessary?
@@ -452,34 +653,55 @@
   }
 }
 
-sub _order_by {
+my $for_syntax = {
+  update => 'FOR UPDATE',
+  shared => 'FOR SHARE',
+};
+
+# this used to be a part of _order_by but is broken out for clarity.
+# What we have been doing forever is hijacking the $order arg of
+# SQLA::select to pass in arbitrary pieces of data (first the group_by,
+# then pretty much the entire resultset attr-hash, as more and more
+# things in the SQLA space need to have mopre info about the $rs they
+# create SQL for. The alternative would be to keep expanding the
+# signature of _select with more and more positional parameters, which
+# is just gross. All hail SQLA2!
+sub _parse_rs_attrs {
   my ($self, $arg) = @_;
 
-  if (ref $arg eq 'HASH' and keys %$arg and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) {
+  my $sql = '';
 
-    my $ret = '';
+  if (my $g = $self->_recurse_fields($arg->{group_by}) ) {
+    $sql .= $self->_sqlcase(' group by ') . $g;
+  }
 
-    if (my $g = $self->_recurse_fields($arg->{group_by}, { no_rownum_hack => 1 }) ) {
-      $ret = $self->_sqlcase(' group by ') . $g;
-    }
+  if (defined $arg->{having}) {
+    my ($frag, @bind) = $self->_recurse_where($arg->{having});
+    push(@{$self->{having_bind}}, @bind);
+    $sql .= $self->_sqlcase(' having ') . $frag;
+  }
 
-    if (defined $arg->{having}) {
-      my ($frag, @bind) = $self->_recurse_where($arg->{having});
-      push(@{$self->{having_bind}}, @bind);
-      $ret .= $self->_sqlcase(' having ').$frag;
-    }
+  if (defined $arg->{order_by}) {
+    $sql .= $self->_order_by ($arg->{order_by});
+  }
 
-    if (defined $arg->{order_by}) {
-      my ($frag, @bind) = $self->SUPER::_order_by($arg->{order_by});
-      push(@{$self->{order_bind}}, @bind);
-      $ret .= $frag;
-    }
+  if (my $for = $arg->{for}) {
+    $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
+  }
 
-    return $ret;
+  return $sql;
+}
+
+sub _order_by {
+  my ($self, $arg) = @_;
+
+  # check that we are not called in legacy mode (order_by as 4th argument)
+  if (ref $arg eq 'HASH' and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) {
+    return $self->_parse_rs_attrs ($arg);
   }
   else {
     my ($sql, @bind) = $self->SUPER::_order_by ($arg);
-    push(@{$self->{order_bind}}, @bind);
+    push @{$self->{order_bind}}, @bind;
     return $sql;
   }
 }
@@ -510,6 +732,14 @@
   }
 }
 
+sub _generate_join_clause {
+    my ($self, $join_type) = @_;
+
+    return sprintf ('%s JOIN ',
+      $join_type ?  ' ' . uc($join_type) : ''
+    );
+}
+
 sub _recurse_from {
   my ($self, $from, @join) = @_;
   my @sqlf;
@@ -528,10 +758,7 @@
 
     $join_type = $self->{_default_jointype} if not defined $join_type;
 
-    my $join_clause = sprintf ('%s JOIN ',
-      $join_type ?  ' ' . uc($join_type) : ''
-    );
-    push @sqlf, $join_clause;
+    push @sqlf, $self->_generate_join_clause( $join_type );
 
     if (ref $to eq 'ARRAY') {
       push(@sqlf, '(', $self->_recurse_from(@$to), ')');
@@ -592,26 +819,12 @@
   }
 }
 
-sub _quote {
-  my ($self, $label) = @_;
-  return '' unless defined $label;
-  return $$label if ref($label) eq 'SCALAR';
-  return "*" if $label eq '*';
-  return $label unless $self->{quote_char};
-  if(ref $self->{quote_char} eq "ARRAY"){
-    return $self->{quote_char}->[0] . $label . $self->{quote_char}->[1]
-      if !defined $self->{name_sep};
-    my $sep = $self->{name_sep};
-    return join($self->{name_sep},
-        map { $self->{quote_char}->[0] . $_ . $self->{quote_char}->[1]  }
-       split(/\Q$sep\E/,$label));
-  }
-  return $self->SUPER::_quote($label);
-}
-
 sub limit_dialect {
     my $self = shift;
-    $self->{limit_dialect} = shift if @_;
+    if (@_) {
+      $self->{limit_dialect} = shift;
+      undef $self->{_cached_syntax};
+    }
     return $self->{limit_dialect};
 }
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema/Versioned.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema/Versioned.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema/Versioned.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -182,6 +182,8 @@
 
 use Carp::Clan qw/^DBIx::Class/;
 use Time::HiRes qw/gettimeofday/;
+use Try::Tiny;
+use namespace::clean;
 
 __PACKAGE__->mk_classdata('_filedata');
 __PACKAGE__->mk_classdata('upgrade_directory');
@@ -225,7 +227,7 @@
 
   # must be called on a fresh database
   if ($self->get_db_version()) {
-    carp 'Install not possible as versions table already exists in database';
+      $self->throw_exception("A versioned schema has already been deployed, try upgrade instead.\n");
   }
 
   # default to current version if none passed
@@ -503,7 +505,7 @@
     my ($self, $rs) = @_;
 
     my $vtable = $self->{vschema}->resultset('Table');
-    my $version = eval {
+    my $version = try {
       $vtable->search({}, { order_by => { -desc => 'installed' }, rows => 1 } )
               ->get_column ('version')
                ->next;
@@ -558,30 +560,30 @@
 sub connection {
   my $self = shift;
   $self->next::method(@_);
-  $self->_on_connect($_[3]);
+  $self->_on_connect();
   return $self;
 }
 
 sub _on_connect
 {
-  my ($self, $args) = @_;
+  my ($self) = @_;
 
-  $args = {} unless $args;
+  my $conn_info = $self->storage->connect_info;
+  $self->{vschema} = DBIx::Class::Version->connect(@$conn_info);
+  my $conn_attrs = $self->{vschema}->storage->_dbic_connect_attributes || {};
 
-  $self->{vschema} = DBIx::Class::Version->connect(@{$self->storage->connect_info()});
   my $vtable = $self->{vschema}->resultset('Table');
 
   # useful when connecting from scripts etc
-  return if ($args->{ignore_version} || ($ENV{DBIC_NO_VERSION_CHECK} && !exists $args->{ignore_version}));
+  return if ($conn_attrs->{ignore_version} || ($ENV{DBIC_NO_VERSION_CHECK} && !exists $conn_attrs->{ignore_version}));
 
   # check for legacy versions table and move to new if exists
-  my $vschema_compat = DBIx::Class::VersionCompat->connect(@{$self->storage->connect_info()});
   unless ($self->_source_exists($vtable)) {
-    my $vtable_compat = $vschema_compat->resultset('TableCompat');
+    my $vtable_compat = DBIx::Class::VersionCompat->connect(@$conn_info)->resultset('TableCompat');
     if ($self->_source_exists($vtable_compat)) {
       $self->{vschema}->deploy;
       map { $vtable->create({ installed => $_->Installed, version => $_->Version }) } $vtable_compat->all;
-      $self->storage->dbh->do("DROP TABLE " . $vtable_compat->result_source->from);
+      $self->storage->_get_dbh->do("DROP TABLE " . $vtable_compat->result_source->from);
     }
   }
 
@@ -681,13 +683,13 @@
   # This is necessary since there are legitimate cases when upgrades can happen
   # back to back within the same second. This breaks things since we relay on the
   # ability to sort by the 'installed' value. The logical choice of an autoinc
-  # is not possible, as it will break multiple legacy installations. Also it is 
+  # is not possible, as it will break multiple legacy installations. Also it is
   # not possible to format the string sanely, as the column is a varchar(20).
   # The 'v' character is added to the front of the string, so that any version
   # formatted by this new function will sort _after_ any existing 200... strings.
   my @tm = gettimeofday();
   my @dt = gmtime ($tm[0]);
-  my $o = $vtable->create({ 
+  my $o = $vtable->create({
     version => $version,
     installed => sprintf("v%04d%02d%02d_%02d%02d%02d.%03.0f",
       $dt[5] + 1900,
@@ -723,12 +725,9 @@
 {
     my ($self, $rs) = @_;
 
-    my $c = eval {
-        $rs->search({ 1, 0 })->count;
-    };
-    return 0 if $@ || !defined $c;
+    my $c = try { $rs->search({ 1, 0 })->count };
 
-    return 1;
+    return (defined $c) ? 1 : 0;
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Schema.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,10 +5,13 @@
 
 use DBIx::Class::Exception;
 use Carp::Clan qw/^DBIx::Class/;
-use Scalar::Util ();
+use Try::Tiny;
+use Scalar::Util 'weaken';
 use File::Spec;
-use Sub::Name ();
+use Sub::Name 'subname';
 use Module::Find();
+use Storable();
+use namespace::clean;
 
 use base qw/DBIx::Class/;
 
@@ -98,7 +101,7 @@
 If a Result class is found to already have a ResultSet class set using
 L</resultset_class> to some other class, you will be warned like this:
 
-  We found ResultSet class '$rs_class' for '$result', but it seems 
+  We found ResultSet class '$rs_class' for '$result', but it seems
   that you had already set '$result' to use '$rs_set' instead
 
 Both of the sub-namespaces are configurable if you don't like the defaults,
@@ -271,6 +274,10 @@
       }
       elsif($rs_class ||= $default_resultset_class) {
         $class->ensure_class_loaded($rs_class);
+        if(!$rs_class->isa("DBIx::Class::ResultSet")) {
+            carp "load_namespaces found ResultSet class $rs_class that does not subclass DBIx::Class::ResultSet";
+        }
+
         $class->_ns_get_rsrc_instance ($result_class)->resultset_class($rs_class);
       }
 
@@ -315,7 +322,7 @@
 If any classes found do not appear to be Result class files, you will
 get the following warning:
 
-   Failed to load $comp_class. Can't find source_name method. Is 
+   Failed to load $comp_class. Can't find source_name method. Is
    $comp_class really a full DBIC result class? Fix it, move it elsewhere,
    or make your load_classes call more specific.
 
@@ -475,11 +482,11 @@
 
 =back
 
-An optional sub which you can declare in your own Schema class that will get 
+An optional sub which you can declare in your own Schema class that will get
 passed the L<SQL::Translator::Schema> object when you deploy the schema via
 L</create_ddl_dir> or L</deploy>.
 
-For an example of what you can do with this, see 
+For an example of what you can do with this, see
 L<DBIx::Class::Manual::Cookbook/Adding Indexes And Functions To Your SQL>.
 
 Note that sqlt_deploy_hook is called by L</deployment_statements>, which in turn
@@ -651,7 +658,7 @@
 
 =head2 txn_scope_guard
 
-Runs C<txn_scope_guard> on the schema's storage. See 
+Runs C<txn_scope_guard> on the schema's storage. See
 L<DBIx::Class::Storage/txn_scope_guard>.
 
 =cut
@@ -669,7 +676,7 @@
 
 Begins a transaction (does nothing if AutoCommit is off). Equivalent to
 calling $schema->storage->txn_begin. See
-L<DBIx::Class::Storage::DBI/"txn_begin"> for more information.
+L<DBIx::Class::Storage/"txn_begin"> for more information.
 
 =cut
 
@@ -685,7 +692,7 @@
 =head2 txn_commit
 
 Commits the current transaction. Equivalent to calling
-$schema->storage->txn_commit. See L<DBIx::Class::Storage::DBI/"txn_commit">
+$schema->storage->txn_commit. See L<DBIx::Class::Storage/"txn_commit">
 for more information.
 
 =cut
@@ -703,7 +710,7 @@
 
 Rolls back the current transaction. Equivalent to calling
 $schema->storage->txn_rollback. See
-L<DBIx::Class::Storage::DBI/"txn_rollback"> for more information.
+L<DBIx::Class::Storage/"txn_rollback"> for more information.
 
 =cut
 
@@ -737,7 +744,7 @@
 
 Pass this method a resultsource name, and an arrayref of
 arrayrefs. The arrayrefs should contain a list of column names,
-followed by one or many sets of matching data for the given columns. 
+followed by one or many sets of matching data for the given columns.
 
 In void context, C<insert_bulk> in L<DBIx::Class::Storage::DBI> is used
 to insert the data, as this is a fast method. However, insert_bulk currently
@@ -757,16 +764,16 @@
     ...
   ]);
 
-Since wantarray context is basically the same as looping over $rs->create(...) 
+Since wantarray context is basically the same as looping over $rs->create(...)
 you won't see any performance benefits and in this case the method is more for
 convenience. Void context sends the column information directly to storage
-using <DBI>s bulk insert method. So the performance will be much better for 
+using <DBI>s bulk insert method. So the performance will be much better for
 storages that support this method.
 
-Because of this difference in the way void context inserts rows into your 
+Because of this difference in the way void context inserts rows into your
 database you need to note how this will effect any loaded components that
-override or augment insert.  For example if you are using a component such 
-as L<DBIx::Class::UUIDColumns> to populate your primary keys you MUST use 
+override or augment insert.  For example if you are using a component such
+as L<DBIx::Class::UUIDColumns> to populate your primary keys you MUST use
 wantarray context if you want the PKs automatically created.
 
 =cut
@@ -780,7 +787,7 @@
         $rs->populate($data);
     }
   } else {
-      $self->throw_exception("$name is not a resultset"); 
+      $self->throw_exception("$name is not a resultset");
   }
 }
 
@@ -808,15 +815,19 @@
   my ($self, @info) = @_;
   return $self if !@info && $self->storage;
 
-  my ($storage_class, $args) = ref $self->storage_type ? 
+  my ($storage_class, $args) = ref $self->storage_type ?
     ($self->_normalize_storage_type($self->storage_type),{}) : ($self->storage_type, {});
 
   $storage_class = 'DBIx::Class::Storage'.$storage_class
     if $storage_class =~ m/^::/;
-  eval { $self->ensure_class_loaded ($storage_class) };
-  $self->throw_exception(
-    "No arguments to load_classes and couldn't load ${storage_class} ($@)"
-  ) if $@;
+  try {
+    $self->ensure_class_loaded ($storage_class);
+  }
+  catch {
+    $self->throw_exception(
+      "No arguments to load_classes and couldn't load ${storage_class} ($_)"
+    );
+  };
   my $storage = $storage_class->new($self=>$args);
   $storage->connect_info(\@info);
   $self->storage($storage);
@@ -909,7 +920,7 @@
     no strict 'refs';
     no warnings 'redefine';
     foreach my $meth (qw/class source resultset/) {
-      *{"${target}::${meth}"} = Sub::Name::subname "${target}::${meth}" =>
+      *{"${target}::${meth}"} = subname "${target}::${meth}" =>
         sub { shift->schema->$meth(@_) };
     }
   }
@@ -925,9 +936,9 @@
 
 =head2 svp_begin
 
-Creates a new savepoint (does nothing outside a transaction). 
+Creates a new savepoint (does nothing outside a transaction).
 Equivalent to calling $schema->storage->svp_begin.  See
-L<DBIx::Class::Storage::DBI/"svp_begin"> for more information.
+L<DBIx::Class::Storage/"svp_begin"> for more information.
 
 =cut
 
@@ -942,9 +953,9 @@
 
 =head2 svp_release
 
-Releases a savepoint (does nothing outside a transaction). 
+Releases a savepoint (does nothing outside a transaction).
 Equivalent to calling $schema->storage->svp_release.  See
-L<DBIx::Class::Storage::DBI/"svp_release"> for more information.
+L<DBIx::Class::Storage/"svp_release"> for more information.
 
 =cut
 
@@ -959,9 +970,9 @@
 
 =head2 svp_rollback
 
-Rollback to a savepoint (does nothing outside a transaction). 
+Rollback to a savepoint (does nothing outside a transaction).
 Equivalent to calling $schema->storage->svp_rollback.  See
-L<DBIx::Class::Storage::DBI/"svp_rollback"> for more information.
+L<DBIx::Class::Storage/"svp_rollback"> for more information.
 
 =cut
 
@@ -1043,8 +1054,8 @@
 created. For quoting purposes supply C<quote_table_names> and
 C<quote_field_names>.
 
-Additionally, the DBIx::Class parser accepts a C<sources> parameter as a hash 
-ref or an array ref, containing a list of source to deploy. If present, then 
+Additionally, the DBIx::Class parser accepts a C<sources> parameter as a hash
+ref or an array ref, containing a list of source to deploy. If present, then
 only the sources listed will get deployed. Furthermore, you can use the
 C<add_fk_index> parser parameter to prevent the parser from creating an index for each
 FK.
@@ -1091,7 +1102,7 @@
 
 =back
 
-A convenient shortcut to 
+A convenient shortcut to
 C<< $self->storage->create_ddl_dir($self, @args) >>.
 
 Creates an SQL file based on the Schema, for each of the specified
@@ -1132,7 +1143,7 @@
     my $filename = $table->ddl_filename($type, $dir, $version, $preversion)
 
  In recent versions variables $dir and $version were reversed in order to
- bring the signature in line with other Schema/Storage methods. If you 
+ bring the signature in line with other Schema/Storage methods. If you
  really need to maintain backward compatibility, you can do the following
  in any overriding methods:
 
@@ -1153,7 +1164,7 @@
 
 =head2 thaw
 
-Provided as the recommended way of thawing schema objects. You can call 
+Provided as the recommended way of thawing schema objects. You can call
 C<Storable::thaw> directly if you wish, but the thawed objects will not have a
 reference to any schema, so are rather useless.
 
@@ -1230,7 +1241,7 @@
 
 =back
 
-This method is called by L</load_namespaces> and L</load_classes> to install the found classes into your Schema. You should be using those instead of this one. 
+This method is called by L</load_namespaces> and L</load_classes> to install the found classes into your Schema. You should be using those instead of this one.
 
 You will only need this method if you have your Result classes in
 files which are not named after the packages (or all in the same
@@ -1295,7 +1306,7 @@
 
 =back
 
-As L</register_source> but should be used if the result class already 
+As L</register_source> but should be used if the result class already
 has a source and you want to register an extra one.
 
 =cut
@@ -1313,7 +1324,7 @@
 
   $source = $source->new({ %$source, source_name => $moniker });
   $source->schema($self);
-  Scalar::Util::weaken($source->{schema}) if ref($self);
+  weaken $source->{schema} if ref($self);
 
   my $rs_class = $source->result_class;
 
@@ -1340,7 +1351,7 @@
 
 sub _unregister_source {
     my ($self, $moniker) = @_;
-    my %reg = %{$self->source_registrations}; 
+    my %reg = %{$self->source_registrations};
 
     my $source = delete $reg{$moniker};
     $self->source_registrations(\%reg);
@@ -1396,10 +1407,13 @@
       unless ($INC{"DBIx/Class/CDBICompat.pm"} || $warn++);
 
     my $base = 'DBIx::Class::ResultSetProxy';
-    eval "require ${base};";
-    $self->throw_exception
-      ("No arguments to load_classes and couldn't load ${base} ($@)")
-        if $@;
+    try {
+      eval "require ${base};"
+    }
+    catch {
+      $self->throw_exception
+        ("No arguments to load_classes and couldn't load ${base} ($_)")
+    };
 
     if ($self eq $target) {
       # Pathological case, largely caused by the docs on early C::M::DBIC::Plain
@@ -1418,7 +1432,7 @@
     {
       no strict 'refs';
       my $name = join '::', $target, 'schema';
-      *$name = Sub::Name::subname $name, sub { $schema };
+      *$name = subname $name, sub { $schema };
     }
 
     $schema->connection(@info);

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ADO.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ADO.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ADO.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -2,6 +2,8 @@
     DBIx::Class::Storage::DBI::ADO;
 
 use base 'DBIx::Class::Storage::DBI';
+use Try::Tiny;
+use namespace::clean;
 
 sub _rebless {
   my $self = shift;
@@ -10,20 +12,18 @@
 # XXX This should be using an OpenSchema method of some sort, but I don't know
 # how.
 # Current version is stolen from Sybase.pm
-  my $dbtype = eval {
-    @{$self->_get_dbh
+  try {
+    my $dbtype = @{$self->_get_dbh
       ->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})
-    }[2]
-  };
+    }[2];
 
-  unless ($@) {
     $dbtype =~ s/\W/_/gi;
     my $subclass = "DBIx::Class::Storage::DBI::ADO::${dbtype}";
     if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
       bless $self, $subclass;
       $self->_rebless;
     }
-  }
+  };
 }
 
 # Here I was just experimenting with ADO cursor types, left in as a comment in

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/AutoCast.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/AutoCast.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/AutoCast.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -29,7 +29,8 @@
 
   CAST(? as $mapped_type)
 
-This option can also be enabled in L<DBIx::Class::Storage::DBI/connect_info> as:
+This option can also be enabled in
+L<connect_info|DBIx::Class::Storage::DBI/connect_info> as:
 
   on_connect_call => ['set_auto_cast']
 
@@ -76,7 +77,7 @@
 
     on_connect_call => ['set_auto_cast']
 
-in L<DBIx::Class::Storage::DBI/connect_info>.
+in L<connect_info|DBIx::Class::Storage::DBI/connect_info>.
 
 =cut
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Cursor.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Cursor.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Cursor.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,6 +5,9 @@
 
 use base qw/DBIx::Class::Cursor/;
 
+use Try::Tiny;
+use namespace::clean;
+
 __PACKAGE__->mk_group_accessors('simple' =>
     qw/sth/
 );
@@ -150,7 +153,8 @@
   my ($self) = @_;
 
   # No need to care about failures here
-  eval { $self->sth->finish if $self->sth && $self->sth->{Active} };
+  try { $self->sth->finish }
+    if $self->sth && $self->sth->{Active};
   $self->_soft_reset;
   return undef;
 }
@@ -176,8 +180,8 @@
   my ($self) = @_;
 
   # None of the reasons this would die matter if we're in DESTROY anyways
-  local $@;
-  eval { $self->sth->finish if $self->sth && $self->sth->{Active} };
+  try { $self->sth->finish }
+    if $self->sth && $self->sth->{Active};
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Informix.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Informix.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Informix.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,11 +3,26 @@
 use warnings;
 
 use base qw/DBIx::Class::Storage::DBI/;
-
 use mro 'c3';
 
+use Scope::Guard ();
+use Context::Preserve 'preserve_context';
+use namespace::clean;
+
 __PACKAGE__->mk_group_accessors('simple' => '__last_insert_id');
 
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Informix - Base Storage Class for Informix Support
+
+=head1 DESCRIPTION
+
+This class implements storage-specific support for the Informix RDBMS
+
+=head1 METHODS
+
+=cut
+
 sub _execute {
   my $self = shift;
   my ($op) = @_;
@@ -32,24 +47,141 @@
   return { limit_dialect => 'SkipFirst', %{$self->{_sql_maker_opts}||{}} };
 }
 
-1;
+sub _svp_begin {
+    my ($self, $name) = @_;
 
-__END__
+    $self->_get_dbh->do("SAVEPOINT $name");
+}
 
-=head1 NAME
+# can't release savepoints
+sub _svp_release { 1 }
 
-DBIx::Class::Storage::DBI::Informix - Base Storage Class for INFORMIX Support
+sub _svp_rollback {
+    my ($self, $name) = @_;
 
-=head1 SYNOPSIS
+    $self->_get_dbh->do("ROLLBACK TO SAVEPOINT $name")
+}
 
-=head1 DESCRIPTION
+sub with_deferred_fk_checks {
+  my ($self, $sub) = @_;
 
-This class implements storage-specific support for Informix
+  my $txn_scope_guard = $self->txn_scope_guard;
 
-=head1 AUTHORS
+  $self->_do_query('SET CONSTRAINTS ALL DEFERRED');
 
-See L<DBIx::Class/CONTRIBUTORS>
+  my $sg = Scope::Guard->new(sub {
+    $self->_do_query('SET CONSTRAINTS ALL IMMEDIATE');
+  });
 
+  return preserve_context { $sub->() } after => sub { $txn_scope_guard->commit };
+}
+
+=head2 connect_call_datetime_setup
+
+Used as:
+
+  on_connect_call => 'datetime_setup'
+
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set the C<DATE> and
+C<DATETIME> formats.
+
+Sets the following environment variables:
+
+    GL_DATE="%m/%d/%Y"
+    GL_DATETIME="%Y-%m-%d %H:%M:%S%F5"
+
+The C<DBDATE> and C<DBCENTURY> environment variables are cleared.
+
+B<NOTE:> setting the C<GL_DATE> environment variable seems to have no effect
+after the process has started, so the default format is used. The C<GL_DATETIME>
+setting does take effect however.
+
+The C<DATETIME> data type supports up to 5 digits after the decimal point for
+second precision, depending on how you have declared your column. The full
+possible precision is used.
+
+The column declaration for a C<DATETIME> with maximum precision is:
+
+  column_name DATETIME YEAR TO FRACTION(5)
+
+The C<DATE> data type stores the date portion only, and it B<MUST> be declared
+with:
+
+  data_type => 'date'
+
+in your Result class.
+
+You will need the L<DateTime::Format::Strptime> module for inflation to work.
+
+=cut
+
+sub connect_call_datetime_setup {
+  my $self = shift;
+
+  delete @ENV{qw/DBDATE DBCENTURY/};
+
+  $ENV{GL_DATE}     = "%m/%d/%Y";
+  $ENV{GL_DATETIME} = "%Y-%m-%d %H:%M:%S%F5";
+}
+
+sub datetime_parser_type {
+  'DBIx::Class::Storage::DBI::Informix::DateTime::Format'
+}
+
+package # hide from PAUSE
+  DBIx::Class::Storage::DBI::Informix::DateTime::Format;
+
+my $timestamp_format = '%Y-%m-%d %H:%M:%S.%5N'; # %F %T
+my $date_format      = '%m/%d/%Y';
+
+my ($timestamp_parser, $date_parser);
+
+sub parse_datetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $timestamp_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $timestamp_format,
+    on_error => 'croak',
+  );
+  return $timestamp_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $timestamp_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $timestamp_format,
+    on_error => 'croak',
+  );
+  return $timestamp_parser->format_datetime(shift);
+}
+
+sub parse_date {
+  shift;
+  require DateTime::Format::Strptime;
+  $date_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $date_format,
+    on_error => 'croak',
+  );
+  return $date_parser->parse_datetime(shift);
+}
+
+sub format_date {
+  shift;
+  require DateTime::Format::Strptime;
+  $date_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $date_format,
+    on_error => 'croak',
+  );
+  return $date_parser->format_datetime(shift);
+}
+
+1;
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
 =head1 LICENSE
 
 You may distribute this code under the same terms as Perl itself.

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/InterBase.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/InterBase.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/InterBase.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,17 +1,13 @@
 package DBIx::Class::Storage::DBI::InterBase;
 
-# partly stolen from DBIx::Class::Storage::DBI::MSSQL
-
 use strict;
 use warnings;
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
-use List::Util();
+use List::Util 'first';
+use Try::Tiny;
+use namespace::clean;
 
-__PACKAGE__->mk_group_accessors(simple => qw/
-  _auto_incs
-/);
-
 =head1 NAME
 
 DBIx::Class::Storage::DBI::InterBase - Driver for the Firebird RDBMS
@@ -26,66 +22,17 @@
 L<disable_sth_caching|DBIx::Class::Storage::DBI/disable_sth_caching> option or
 L</connect_call_use_softcommit> (see L</CAVEATS>) for your code to function
 correctly with this driver. Otherwise you will likely get bizarre error messages
-such as C<no statement executing>.
+such as C<no statement executing>. The alternative is to use the
+L<ODBC|DBIx::Class::Storage::DBI::ODBC::Firebird> driver, which is more suitable
+for long running processes such as under L<Catalyst>.
 
-For ODBC support, see L<DBIx::Class::Storage::DBI::ODBC::Firebird>.
-
 To turn on L<DBIx::Class::InflateColumn::DateTime> support, see
 L</connect_call_datetime_setup>.
 
 =cut
 
-sub _prep_for_execute {
-  my $self = shift;
-  my ($op, $extra_bind, $ident, $args) = @_;
+sub _supports_insert_returning { 1 }
 
-  if ($op eq 'insert') {
-    $self->_auto_incs([]);
-
-    my %pk;
-    @pk{$ident->primary_columns} = ();
-
-    my @auto_inc_cols = grep {
-      my $inserting = $args->[0]{$_};
-
-      ($ident->column_info($_)->{is_auto_increment}
-        || exists $pk{$_})
-      && (
-        (not defined $inserting)
-        ||
-        (ref $inserting eq 'SCALAR' && $$inserting =~ /^null\z/i)
-      )
-    } $ident->columns;
-
-    if (@auto_inc_cols) {
-      $args->[1]{returning} = \@auto_inc_cols;
-
-      $self->_auto_incs->[0] = \@auto_inc_cols;
-    }
-  }
-
-  return $self->next::method(@_);
-}
-
-sub _execute {
-  my $self = shift;
-  my ($op) = @_;
-
-  my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_);
-
-  if ($op eq 'insert' && $self->_auto_incs) {
-    local $@;
-    my (@auto_incs) = eval {
-      local $SIG{__WARN__} = sub {};
-      $sth->fetchrow_array
-    };
-    $self->_auto_incs->[1] = \@auto_incs;
-    $sth->finish;
-  }
-
-  return wantarray ? ($rv, $sth, @bind) : $rv;
-}
-
 sub _sequence_fetch {
   my ($self, $nextval, $sequence) = @_;
 
@@ -94,13 +41,13 @@
   }
 
   $self->throw_exception('No sequence to fetch') unless $sequence;
-  
+
   my ($val) = $self->_get_dbh->selectrow_array(
 'SELECT GEN_ID(' . $self->sql_maker->_quote($sequence) .
 ', 1) FROM rdb$database');
 
   return $val;
-} 
+}
 
 sub _dbh_get_autoinc_seq {
   my ($self, $dbh, $source, $col) = @_;
@@ -133,7 +80,7 @@
       $generator = uc $generator unless $quoted;
 
       return $generator
-        if List::Util::first {
+        if first {
           $self->sql_maker->quote_char ? ($_ eq $col) : (uc($_) eq uc($col))
         } @trig_cols;
     }
@@ -142,34 +89,6 @@
   return undef;
 }
 
-sub last_insert_id {
-  my ($self, $source, @cols) = @_;
-  my @result;
-
-  my %auto_incs;
-  @auto_incs{ @{ $self->_auto_incs->[0] } } =
-    @{ $self->_auto_incs->[1] };
-
-  push @result, $auto_incs{$_} for @cols;
-
-  return @result;
-}
-
-sub insert {
-  my $self = shift;
-
-  my $updated_cols = $self->next::method(@_);
-
-  if ($self->_auto_incs->[0]) {
-    my %auto_incs;
-    @auto_incs{ @{ $self->_auto_incs->[0] } } = @{ $self->_auto_incs->[1] };
-
-    $updated_cols = { %$updated_cols, %auto_incs };
-  }
-
-  return $updated_cols;
-}
-
 # this sub stolen from DB2
 
 sub _sql_maker_opts {
@@ -206,12 +125,14 @@
   my $dbh = $self->_dbh or return 0;
 
   local $dbh->{RaiseError} = 1;
+  local $dbh->{PrintError} = 0;
 
-  eval {
+  return try {
     $dbh->do('select 1 from rdb$database');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 # We want dialect 3 for new features and quoting to work, DBD::InterBase uses
@@ -237,6 +158,16 @@
   }
 }
 
+sub _get_server_version {
+  my $self = shift;
+
+  return $self->next::method(@_) if ref $self ne __PACKAGE__;
+
+  local $SIG{__WARN__} = sub {}; # silence warning due to bug in DBD::InterBase
+
+  return $self->next::method(@_);
+}
+
 =head2 connect_call_use_softcommit
 
 Used as:
@@ -248,7 +179,8 @@
 
 You need either this option or C<< disable_sth_caching => 1 >> for
 L<DBIx::Class> code to function correctly (otherwise you may get C<no statement
-executing> errors.)
+executing> errors.) Or use the L<ODBC|DBIx::Class::Storage::DBI::ODBC::Firebird>
+driver.
 
 The downside of using this option is that your process will B<NOT> see UPDATEs,
 INSERTs and DELETEs from other processes for already open statements.
@@ -365,6 +297,8 @@
 workaround for the C<no statement executing> errors, this of course adversely
 affects performance.
 
+Alternately, use the L<ODBC|DBIx::Class::Storage::DBI::ODBC::Firebird> driver.
+
 =item *
 
 C<last_insert_id> support by default only works for Firebird versions 2 or
@@ -373,7 +307,8 @@
 
 =item *
 
-Sub-second precision for TIMESTAMPs is not currently available with ODBC.
+Sub-second precision for TIMESTAMPs is not currently available when using the
+L<ODBC|DBIx::Class::Storage::DBI::ODBC::Firebird> driver.
 
 =back
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,11 +3,12 @@
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Storage::DBI/;
+use base qw/DBIx::Class::Storage::DBI::UniqueIdentifier/;
 use mro 'c3';
+use Try::Tiny;
+use List::Util 'first';
+use namespace::clean;
 
-use List::Util();
-
 __PACKAGE__->mk_group_accessors(simple => qw/
   _identity _identity_method
 /);
@@ -23,13 +24,13 @@
   );
 
   my $dbh = $self->_get_dbh;
-  eval { $dbh->do ($sql) };
-  if ($@) {
+  try { $dbh->do ($sql) }
+  catch {
     $self->throw_exception (sprintf "Error executing '%s': %s",
       $sql,
       $dbh->errstr,
     );
-  }
+  };
 }
 
 sub _unset_identity_insert {
@@ -48,12 +49,8 @@
   my $self = shift;
   my ($source, $cols, $data) = @_;
 
-  my $is_identity_insert = (List::Util::first
-      { $source->column_info ($_)->{is_auto_increment} }
-      (@{$cols})
-  )
-     ? 1
-     : 0;
+  my $is_identity_insert =
+    (first { $source->column_info ($_)->{is_auto_increment} } @{$cols}) ? 1 : 0;
 
   if ($is_identity_insert) {
      $self->_set_identity_insert ($source->name);
@@ -66,58 +63,25 @@
   }
 }
 
-# support MSSQL GUID column types
-
 sub insert {
   my $self = shift;
   my ($source, $to_insert) = @_;
 
   my $supplied_col_info = $self->_resolve_column_info($source, [keys %$to_insert] );
 
-  my %guid_cols;
-  my @pk_cols = $source->primary_columns;
-  my %pk_cols;
-  @pk_cols{@pk_cols} = ();
+  my $is_identity_insert =
+    (first { $_->{is_auto_increment} } values %$supplied_col_info) ? 1 : 0;
 
-  my @pk_guids = grep {
-    $source->column_info($_)->{data_type}
-    &&
-    $source->column_info($_)->{data_type} =~ /^uniqueidentifier/i
-  } @pk_cols;
-
-  my @auto_guids = grep {
-    $source->column_info($_)->{data_type}
-    &&
-    $source->column_info($_)->{data_type} =~ /^uniqueidentifier/i
-    &&
-    $source->column_info($_)->{auto_nextval}
-  } grep { not exists $pk_cols{$_} } $source->columns;
-
-  my @get_guids_for =
-    grep { not exists $to_insert->{$_} } (@pk_guids, @auto_guids);
-
-  my $updated_cols = {};
-
-  for my $guid_col (@get_guids_for) {
-    my ($new_guid) = $self->_get_dbh->selectrow_array('SELECT NEWID()');
-    $updated_cols->{$guid_col} = $to_insert->{$guid_col} = $new_guid;
-  }
-
-  my $is_identity_insert = (List::Util::first { $_->{is_auto_increment} } (values %$supplied_col_info) )
-     ? 1
-     : 0;
-
   if ($is_identity_insert) {
      $self->_set_identity_insert ($source->name);
   }
 
-  $updated_cols = { %$updated_cols, %{ $self->next::method(@_) } };
+  my $updated_cols = $self->next::method(@_);
 
   if ($is_identity_insert) {
      $self->_unset_identity_insert ($source->name);
   }
 
-
   return $updated_cols;
 }
 
@@ -160,7 +124,7 @@
 
     # this should bring back the result of SELECT SCOPE_IDENTITY() we tacked
     # on in _prep_for_execute above
-    my ($identity) = eval { $sth->fetchrow_array };
+    my ($identity) = try { $sth->fetchrow_array };
 
     # SCOPE_IDENTITY failed, but we can do something else
     if ( (! $identity) && $self->_identity_method) {
@@ -190,11 +154,15 @@
 
   # see if this is an ordered subquery
   my $attrs = $_[3];
-  if ( scalar $self->_parse_order_by ($attrs->{order_by}) ) {
+  if (
+    $sql !~ /^ \s* SELECT \s+ TOP \s+ \d+ \s+ /xi
+      &&
+    scalar $self->_parse_order_by ($attrs->{order_by})
+  ) {
     $self->throw_exception(
       'An ordered subselect encountered - this is not safe! Please see "Ordered Subselects" in DBIx::Class::Storage::DBI::MSSQL
     ') unless $attrs->{unsafe_subselect_ok};
-    my $max = 2 ** 32;
+    my $max = $self->sql_maker->__max_int;
     $sql =~ s/^ \s* SELECT \s/SELECT TOP $max /xi;
   }
 
@@ -222,37 +190,35 @@
   $self->_get_dbh->do("ROLLBACK TRANSACTION $name");
 }
 
-sub build_datetime_parser {
-  my $self = shift;
-  my $type = "DateTime::Format::Strptime";
-  eval "use ${type}";
-  $self->throw_exception("Couldn't load ${type}: $@") if $@;
-  return $type->new( pattern => '%Y-%m-%d %H:%M:%S' );  # %F %T
+sub datetime_parser_type {
+  'DBIx::Class::Storage::DBI::MSSQL::DateTime::Format'
 }
 
 sub sqlt_type { 'SQLServer' }
 
-sub _get_mssql_version {
-  my $self = shift;
-
-  my $data = $self->_get_dbh->selectrow_hashref('xp_msver ProductVersion');
-
-  if ($data->{Character_Value} =~ /^(\d+)\./) {
-    return $1;
-  } else {
-    $self->throw_exception(q{Your ProductVersion's Character_Value is missing or malformed!});
-  }
-}
-
 sub sql_maker {
   my $self = shift;
 
   unless ($self->_sql_maker) {
     unless ($self->{_sql_maker_opts}{limit_dialect}) {
-      my $version = eval { $self->_get_mssql_version; } || 0;
+      my $have_rno = 0;
 
+      if (exists $self->_server_info->{normalized_dbms_version}) {
+        $have_rno = 1 if $self->_server_info->{normalized_dbms_version} >= 9;
+      }
+      else {
+        # User is connecting via DBD::Sybase and has no permission to run
+        # stored procedures like xp_msver, or version detection failed for some
+        # other reason.
+        # So, we use a query to check if RNO is implemented.
+        try {
+          $self->_get_dbh->selectrow_array('SELECT row_number() OVER (ORDER BY rand())');
+          $have_rno = 1;
+        };
+      }
+
       $self->{_sql_maker_opts} = {
-        limit_dialect => ($version >= 9 ? 'RowNumberOver' : 'Top'),
+        limit_dialect => ($have_rno ? 'RowNumberOver' : 'Top'),
         %{$self->{_sql_maker_opts}||{}}
       };
     }
@@ -263,6 +229,70 @@
   return $self->_sql_maker;
 }
 
+sub _ping {
+  my $self = shift;
+
+  my $dbh = $self->_dbh or return 0;
+
+  local $dbh->{RaiseError} = 1;
+  local $dbh->{PrintError} = 0;
+
+  return try {
+    $dbh->do('select 1');
+    1;
+  } catch {
+    0;
+  };
+}
+
+package # hide from PAUSE
+  DBIx::Class::Storage::DBI::MSSQL::DateTime::Format;
+
+my $datetime_format      = '%Y-%m-%d %H:%M:%S.%3N'; # %F %T
+my $smalldatetime_format = '%Y-%m-%d %H:%M:%S';
+
+my ($datetime_parser, $smalldatetime_parser);
+
+sub parse_datetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $datetime_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $datetime_format,
+    on_error => 'croak',
+  );
+  return $datetime_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $datetime_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $datetime_format,
+    on_error => 'croak',
+  );
+  return $datetime_parser->format_datetime(shift);
+}
+
+sub parse_smalldatetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $smalldatetime_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $smalldatetime_format,
+    on_error => 'croak',
+  );
+  return $smalldatetime_parser->parse_datetime(shift);
+}
+
+sub format_smalldatetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $smalldatetime_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $smalldatetime_format,
+    on_error => 'croak',
+  );
+  return $smalldatetime_parser->format_datetime(shift);
+}
+
 1;
 
 =head1 NAME
@@ -358,7 +388,7 @@
 
 =head1 AUTHOR
 
-See L<DBIx::Class/CONTRIBUTORS>.
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
 
 =head1 LICENSE
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Firebird.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -10,7 +10,7 @@
 DBIx::Class::Storage::DBI::ODBC::Firebird - Driver for using the Firebird RDBMS
 through ODBC
 
-=head1 SYNOPSIS
+=head1 DESCRIPTION
 
 Most functionality is provided by L<DBIx::Class::Storage::DBI::Interbase>, see
 that module for details.
@@ -19,6 +19,11 @@
 
 L<http://www.firebirdnews.org/?p=1324>
 
+This driver does not suffer from the nested statement handles across commits
+issue that the L<DBD::InterBase|DBIx::Class::Storage::DBI::InterBase> based
+driver does. This makes it more suitable for long running processes such as
+under L<Catalyst>.
+
 =cut
 
 # XXX seemingly no equivalent to ib_time_all from DBD::InterBase via ODBC

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -4,10 +4,10 @@
 
 use base qw/DBIx::Class::Storage::DBI::MSSQL/;
 use mro 'c3';
+use Scalar::Util 'reftype';
+use Try::Tiny;
+use namespace::clean;
 
-use List::Util();
-use Scalar::Util ();
-
 __PACKAGE__->mk_group_accessors(simple => qw/
   _using_dynamic_cursors
 /);
@@ -37,7 +37,7 @@
 
   on_connect_call => 'use_dynamic_cursors'
 
-in your L<DBIx::Class::Storage::DBI/connect_info> as one way to enable multiple
+in your L<connect_info|DBIx::Class::Storage::DBI/connect_info> as one way to enable multiple
 concurrent statements.
 
 Will add C<< odbc_cursortype => 2 >> to your DBI connection attributes. See
@@ -66,7 +66,7 @@
 
   my $dbi_attrs = $self->_dbi_connect_info->[-1];
 
-  unless (ref($dbi_attrs) && Scalar::Util::reftype($dbi_attrs) eq 'HASH') {
+  unless (ref($dbi_attrs) && reftype $dbi_attrs eq 'HASH') {
     $dbi_attrs = {};
     push @{ $self->_dbi_connect_info }, $dbi_attrs;
   }
@@ -84,18 +84,17 @@
   my $self = shift;
   my $dbh  = $self->_get_dbh;
 
-  eval {
+  try {
     local $dbh->{RaiseError} = 1;
     local $dbh->{PrintError} = 0;
     $dbh->do('SELECT @@IDENTITY');
-  };
-  if ($@) {
+  } catch {
     $self->throw_exception (<<'EOF');
 
 Your drivers do not seem to support dynamic cursors (odbc_cursortype => 2),
 if you're using FreeTDS, make sure to set tds_version to 8.0 or greater.
 EOF
-  }
+  };
 
   $self->_using_dynamic_cursors(1);
   $self->_identity_method('@@identity');
@@ -175,14 +174,6 @@
   }
 }
 
-sub _get_mssql_version {
-  my $self = shift;
-
-  my ($version) = $self->_get_dbh->get_info(18) =~ /^(\d+)/;
-
-  return $version;
-}
-
 1;
 
 =head1 AUTHOR

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/ODBC.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -4,21 +4,24 @@
 
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
+use Try::Tiny;
+use namespace::clean;
 
 sub _rebless {
-    my ($self) = @_;
+  my ($self) = @_;
 
-    my $dbtype = eval { $self->_get_dbh->get_info(17) };
+  try {
+    my $dbtype = $self->_get_dbh->get_info(17);
 
-    unless ( $@ ) {
-        # Translate the backend name into a perl identifier
-        $dbtype =~ s/\W/_/gi;
-        my $subclass = "DBIx::Class::Storage::DBI::ODBC::${dbtype}";
-        if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
-            bless $self, $subclass;
-            $self->_rebless;
-        }
+    # Translate the backend name into a perl identifier
+    $dbtype =~ s/\W/_/gi;
+    my $subclass = "DBIx::Class::Storage::DBI::ODBC::${dbtype}";
+
+    if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
+      bless $self, $subclass;
+      $self->_rebless;
     }
+  };
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,7 +3,9 @@
 use strict;
 use warnings;
 use Scope::Guard ();
-use Context::Preserve ();
+use Context::Preserve 'preserve_context';
+use Try::Tiny;
+use namespace::clean;
 
 =head1 NAME
 
@@ -17,6 +19,51 @@
   __PACKAGE__->set_primary_key('id');
   __PACKAGE__->sequence('mysequence');
 
+  # Somewhere in your Code
+  # add some data to a table with a hierarchical relationship
+  $schema->resultset('Person')->create ({
+        firstname => 'foo',
+        lastname => 'bar',
+        children => [
+            {
+                firstname => 'child1',
+                lastname => 'bar',
+                children => [
+                    {
+                        firstname => 'grandchild',
+                        lastname => 'bar',
+                    }
+                ],
+            },
+            {
+                firstname => 'child2',
+                lastname => 'bar',
+            },
+        ],
+    });
+
+  # select from the hierarchical relationship
+  my $rs = $schema->resultset('Person')->search({},
+    {
+      'start_with' => { 'firstname' => 'foo', 'lastname' => 'bar' },
+      'connect_by' => { 'parentid' => { '-prior' => \'persionid' },
+      'order_siblings_by' => { -asc => 'name' },
+    };
+  );
+
+  # this will select the whole tree starting from person "foo bar", creating
+  # following query:
+  # SELECT
+  #     me.persionid me.firstname, me.lastname, me.parentid
+  # FROM
+  #     person me
+  # START WITH
+  #     firstname = 'foo' and lastname = 'bar'
+  # CONNECT BY
+  #     parentid = prior persionid
+  # ORDER SIBLINGS BY
+  #     firstname ASC
+
 =head1 DESCRIPTION
 
 This class implements base Oracle support. The subclass
@@ -30,6 +77,8 @@
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
 
+__PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::Oracle');
+
 sub deployment_statements {
   my $self = shift;;
   my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_;
@@ -39,10 +88,14 @@
   $sqltargs->{quote_table_names} = $quote_char ? 1 : 0;
   $sqltargs->{quote_field_names} = $quote_char ? 1 : 0;
 
-  my $oracle_version = eval { $self->_get_dbh->get_info(18) };
+  if (
+    ! exists $sqltargs->{producer_args}{oracle_version}
+      and
+    my $dver = $self->_server_info->{dbms_version}
+  ) {
+    $sqltargs->{producer_args}{oracle_version} = $dver;
+  }
 
-  $sqltargs->{producer_args}{oracle_version} = $oracle_version;
-
   $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest);
 }
 
@@ -85,7 +138,7 @@
     {
       $schema ? (owner => $schema) : (),
       table_name => $table || $source_name,
-      triggering_event => 'INSERT',
+      triggering_event => { -like => '%INSERT%' },
       status => 'ENABLED',
      },
   );
@@ -110,46 +163,53 @@
   my $dbh = $self->_dbh or return 0;
 
   local $dbh->{RaiseError} = 1;
+  local $dbh->{PrintError} = 0;
 
-  eval {
-    $dbh->do("select 1 from dual");
+  return try {
+    $dbh->do('select 1 from dual');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 sub _dbh_execute {
   my $self = shift;
   my ($dbh, $op, $extra_bind, $ident, $bind_attributes, @args) = @_;
 
-  my $wantarray = wantarray;
+  my (@res, $tried);
+  my $wantarray = wantarray();
+  my $next = $self->next::can;
+  do {
+    try {
+      my $exec = sub { $self->$next($dbh, $op, $extra_bind, $ident, $bind_attributes, @args) };
 
-  my (@res, $exception, $retried);
+      if (!defined $wantarray) {
+        $exec->();
+      }
+      elsif (! $wantarray) {
+        $res[0] = $exec->();
+      }
+      else {
+        @res = $exec->();
+      }
 
-  RETRY: {
-    do {
-      eval {
-        if ($wantarray) {
-          @res    = $self->next::method(@_);
-        } else {
-          $res[0] = $self->next::method(@_);
-        }
-      };
-      $exception = $@;
-      if ($exception =~ /ORA-01003/) {
+      $tried++;
+    }
+    catch {
+      if (! $tried and $_ =~ /ORA-01003/) {
         # ORA-01003: no statement parsed (someone changed the table somehow,
         # invalidating your cursor.)
         my ($sql, $bind) = $self->_prep_for_execute($op, $extra_bind, $ident, \@args);
         delete $dbh->{CachedKids}{$sql};
-      } else {
-        last RETRY;
       }
-    } while (not $retried++);
-  }
+      else {
+        $self->throw_exception($_);
+      }
+    };
+  } while (! $tried++);
 
-  $self->throw_exception($exception) if $exception;
-
-  $wantarray ? @res : $res[0]
+  return $wantarray ? @res : $res[0];
 }
 
 =head2 get_autoinc_seq
@@ -164,19 +224,6 @@
   $self->dbh_do('_dbh_get_autoinc_seq', $source, $col);
 }
 
-=head2 columns_info_for
-
-This wraps the superclass version of this method to force table
-names to uppercase
-
-=cut
-
-sub columns_info_for {
-  my ($self, $table) = @_;
-
-  $self->next::method($table);
-}
-
 =head2 datetime_parser_type
 
 This sets the proper DateTime::Format module for use with
@@ -192,10 +239,10 @@
 
     on_connect_call => 'datetime_setup'
 
-In L<DBIx::Class::Storage::DBI/connect_info> to set the session nls date, and
-timestamp values for use with L<DBIx::Class::InflateColumn::DateTime> and the
-necessary environment variables for L<DateTime::Format::Oracle>, which is used
-by it.
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set the session nls
+date, and timestamp values for use with L<DBIx::Class::InflateColumn::DateTime>
+and the necessary environment variables for L<DateTime::Format::Oracle>, which
+is used by it.
 
 Maximum allowable precision is used, unless the environment variables have
 already been set.
@@ -360,15 +407,99 @@
   my $txn_scope_guard = $self->txn_scope_guard;
 
   $self->_do_query('alter session set constraints = deferred');
-  
+
   my $sg = Scope::Guard->new(sub {
     $self->_do_query('alter session set constraints = immediate');
   });
 
-  return Context::Preserve::preserve_context(sub { $sub->() },
-    after => sub { $txn_scope_guard->commit });
+  return
+    preserve_context { $sub->() } after => sub { $txn_scope_guard->commit };
 }
 
+=head1 ATTRIBUTES
+
+Following additional attributes can be used in resultsets.
+
+=head2 connect_by or connect_by_nocycle
+
+=over 4
+
+=item Value: \%connect_by
+
+=back
+
+A hashref of conditions used to specify the relationship between parent rows
+and child rows of the hierarchy.
+
+
+  connect_by => { parentid => 'prior personid' }
+
+  # adds a connect by statement to the query:
+  # SELECT
+  #     me.persionid me.firstname, me.lastname, me.parentid
+  # FROM
+  #     person me
+  # CONNECT BY
+  #     parentid = prior persionid
+  
+
+  connect_by_nocycle => { parentid => 'prior personid' }
+
+  # adds a connect by statement to the query:
+  # SELECT
+  #     me.persionid me.firstname, me.lastname, me.parentid
+  # FROM
+  #     person me
+  # CONNECT BY NOCYCLE
+  #     parentid = prior persionid
+
+
+=head2 start_with
+
+=over 4
+
+=item Value: \%condition
+
+=back
+
+A hashref of conditions which specify the root row(s) of the hierarchy.
+
+It uses the same syntax as L<DBIx::Class::ResultSet/search>
+
+  start_with => { firstname => 'Foo', lastname => 'Bar' }
+
+  # SELECT
+  #     me.persionid me.firstname, me.lastname, me.parentid
+  # FROM
+  #     person me
+  # START WITH
+  #     firstname = 'foo' and lastname = 'bar'
+  # CONNECT BY
+  #     parentid = prior persionid
+
+=head2 order_siblings_by
+
+=over 4
+
+=item Value: ($order_siblings_by | \@order_siblings_by)
+
+=back
+
+Which column(s) to order the siblings by.
+
+It uses the same syntax as L<DBIx::Class::ResultSet/order_by>
+
+  'order_siblings_by' => 'firstname ASC'
+
+  # SELECT
+  #     me.persionid me.firstname, me.lastname, me.parentid
+  # FROM
+  #     person me
+  # CONNECT BY
+  #     parentid = prior persionid
+  # ORDER SIBLINGS BY
+  #     firstname ASC
+
 =head1 AUTHOR
 
 See L<DBIx::Class/CONTRIBUTORS>.

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Oracle.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,23 +5,25 @@
 
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
+use Try::Tiny;
+use namespace::clean;
 
 sub _rebless {
     my ($self) = @_;
 
-    my $version = eval { $self->_get_dbh->get_info(18); };
+    try {
+      my $version = $self->_get_dbh->get_info(18);
 
-    if ( !$@ ) {
-        my ($major, $minor, $patchlevel) = split(/\./, $version);
+      my ($major, $minor, $patchlevel) = split(/\./, $version);
 
-        # Default driver
-        my $class = $major <= 8
-          ? 'DBIx::Class::Storage::DBI::Oracle::WhereJoins'
-          : 'DBIx::Class::Storage::DBI::Oracle::Generic';
+      # Default driver
+      my $class = $major <= 8
+        ? 'DBIx::Class::Storage::DBI::Oracle::WhereJoins'
+        : 'DBIx::Class::Storage::DBI::Oracle::Generic';
 
-        $self->ensure_class_loaded ($class);
-        bless $self, $class;
-    }
+      $self->ensure_class_loaded ($class);
+      bless $self, $class;
+    };
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Pg.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Pg.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Pg.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,22 +3,44 @@
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Storage::DBI::MultiColumnIn/;
+use base qw/
+    DBIx::Class::Storage::DBI::MultiColumnIn
+/;
 use mro 'c3';
 
 use DBD::Pg qw(:pg_types);
+use Scope::Guard ();
+use Context::Preserve 'preserve_context';
+use namespace::clean;
 
 # Ask for a DBD::Pg with array support
 warn __PACKAGE__.": DBD::Pg 2.9.2 or greater is strongly recommended\n"
   if ($DBD::Pg::VERSION < 2.009002);  # pg uses (used?) version::qv()
 
+sub _supports_insert_returning {
+  my $self = shift;
+
+  return 1
+    if $self->_server_info->{normalized_dbms_version} >= 8.002;
+
+  return 0;
+}
+
 sub with_deferred_fk_checks {
   my ($self, $sub) = @_;
 
-  $self->_get_dbh->do('SET CONSTRAINTS ALL DEFERRED');
-  $sub->();
+  my $txn_scope_guard = $self->txn_scope_guard;
+
+  $self->_do_query('SET CONSTRAINTS ALL DEFERRED');
+
+  my $sg = Scope::Guard->new(sub {
+    $self->_do_query('SET CONSTRAINTS ALL IMMEDIATE');
+  });
+
+  return preserve_context { $sub->() } after => sub { $txn_scope_guard->commit };
 }
 
+# only used when INSERT ... RETURNING is disabled
 sub last_insert_id {
   my ($self,$source, at cols) = @_;
 
@@ -32,20 +54,24 @@
           $col,
       ));
 
-    push @values, $self->_dbh_last_insert_id ($self->_dbh, $seq);
+    push @values, $self->_dbh->last_insert_id(undef, undef, undef, undef, {sequence => $seq});
   }
 
   return @values;
 }
 
-# there seems to be absolutely no reason to have this as a separate method,
-# but leaving intact in case someone is already overriding it
-sub _dbh_last_insert_id {
-  my ($self, $dbh, $seq) = @_;
-  $dbh->last_insert_id(undef, undef, undef, undef, {sequence => $seq});
+sub _sequence_fetch {
+  my ($self, $function, $sequence) = @_;
+
+  $self->throw_exception('No sequence to fetch') unless $sequence;
+
+  my ($val) = $self->_get_dbh->selectrow_array(
+    sprintf ("select %s('%s')", $function, $sequence)
+  );
+
+  return $val;
 }
 
-
 sub _dbh_get_autoinc_seq {
   my ($self, $dbh, $source, $col) = @_;
 
@@ -155,12 +181,6 @@
   }
 }
 
-sub _sequence_fetch {
-  my ( $self, $type, $seq ) = @_;
-  my ($id) = $self->_get_dbh->selectrow_array("SELECT nextval('${seq}')");
-  return $id;
-}
-
 sub _svp_begin {
     my ($self, $name) = @_;
 
@@ -179,6 +199,23 @@
     $self->_get_dbh->pg_rollback_to($name);
 }
 
+sub deployment_statements {
+  my $self = shift;;
+  my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_;
+
+  $sqltargs ||= {};
+
+  if (
+    ! exists $sqltargs->{producer_args}{postgres_version}
+      and
+    my $dver = $self->_server_info->{normalized_dbms_version}
+  ) {
+    $sqltargs->{producer_args}{postgres_version} = $dver;
+  }
+
+  $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest);
+}
+
 1;
 
 __END__

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -8,6 +8,7 @@
 use Carp::Clan qw/^DBIx::Class/;
 use MooseX::Types::Moose qw/Num Int ClassName HashRef/;
 use DBIx::Class::Storage::DBI::Replicated::Types 'DBICStorageDBI';
+use Try::Tiny;
 
 use namespace::clean -except => 'meta';
 
@@ -293,18 +294,16 @@
 sub _safely {
   my ($self, $replicant, $name, $code) = @_;
 
-  eval {
-    $code->()
-  };
-  if ($@) {
+  return try {
+    $code->();
+    1;
+  } catch {
     $replicant->debugobj->print(sprintf(
       "Exception trying to $name for replicant %s, error is %s",
-      $replicant->_dbi_connect_info->[0], $@)
+      $replicant->_dbi_connect_info->[0], $_)
     );
-    return undef;
-  }
-
-  return 1;
+    undef;
+  };
 }
 
 =head2 connected_replicants

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -4,6 +4,7 @@
 use Scalar::Util 'reftype';
 requires qw/_query_start/;
 
+use Try::Tiny;
 use namespace::clean -except => 'meta';
 
 =head1 NAME
@@ -32,7 +33,7 @@
 around '_query_start' => sub {
   my ($method, $self, $sql, @bind) = @_;
 
-  my $dsn = eval { $self->dsn } || $self->_dbi_connect_info->[0];
+  my $dsn = (try { $self->dsn }) || $self->_dbi_connect_info->[0];
 
   my($op, $rest) = (($sql=~m/^(\w+)(.+)$/),'NOP', 'NO SQL');
   my $storage_type = $self->can('active') ? 'REPLICANT' : 'MASTER';
@@ -41,7 +42,7 @@
     if ((reftype($dsn)||'') ne 'CODE') {
       "$op [DSN_$storage_type=$dsn]$rest";
     }
-    elsif (my $id = eval { $self->id }) {
+    elsif (my $id = try { $self->id }) {
       "$op [$storage_type=$id]$rest";
     }
     else {

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Replicated.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -15,7 +15,9 @@
 use MooseX::Types::Moose qw/ClassName HashRef Object/;
 use Scalar::Util 'reftype';
 use Hash::Merge;
-use List::Util qw/min max/;
+use List::Util qw/min max reduce/;
+use Try::Tiny;
+use namespace::clean;
 
 use namespace::clean -except => 'meta';
 
@@ -37,7 +39,7 @@
   $schema->storage_type( ['::DBI::Replicated', {balancer=>'::Random'}] );
   $schema->connection(...);
 
-Next, you need to add in the Replicants.  Basically this is an array of 
+Next, you need to add in the Replicants.  Basically this is an array of
 arrayrefs, where each arrayref is database connect information.  Think of these
 arguments as what you'd pass to the 'normal' $schema->connect method.
 
@@ -58,7 +60,7 @@
   my $RS = $schema->resultset('Source')->search(undef, {force_pool=>'master'});
 
 Now $RS will force everything (both reads and writes) to use whatever was setup
-as the master storage.  'master' is hardcoded to always point to the Master, 
+as the master storage.  'master' is hardcoded to always point to the Master,
 but you can also use any Replicant name.  Please see:
 L<DBIx::Class::Storage::DBI::Replicated::Pool> and the replicants attribute for more.
 
@@ -123,7 +125,7 @@
 
 =head2 pool_type
 
-Contains the classname which will instantiate the L</pool> object.  Defaults 
+Contains the classname which will instantiate the L</pool> object.  Defaults
 to: L<DBIx::Class::Storage::DBI::Replicated::Pool>.
 
 =cut
@@ -205,7 +207,7 @@
 
 =head2 balancer
 
-Is a <DBIx::Class::Storage::DBI::Replicated::Balancer> or derived class.  This 
+Is a <DBIx::Class::Storage::DBI::Replicated::Balancer> or derived class.  This
 is a class that takes a pool (<DBIx::Class::Storage::DBI::Replicated::Pool>)
 
 =cut
@@ -235,7 +237,7 @@
 
 =head1 ATTRIBUTES IMPLEMENTING THE DBIx::Storage::DBI INTERFACE
 
-The following methods are delegated all the methods required for the 
+The following methods are delegated all the methods required for the
 L<DBIx::Class::Storage::DBI> interface.
 
 =head2 read_handler
@@ -252,7 +254,7 @@
     select
     select_single
     columns_info_for
-    _dbh_columns_info_for 
+    _dbh_columns_info_for
     _select
   /],
 );
@@ -306,8 +308,8 @@
 
     backup
     is_datatype_numeric
+    _supports_insert_returning
     _count_select
-    _subq_count_select
     _subq_update_delete
     svp_rollback
     svp_begin
@@ -318,6 +320,7 @@
     _fix_bind_params
     _default_dbi_connect_attributes
     _dbi_connect_info
+    _dbic_connect_attributes
     auto_savepoint
     _sqlt_version_ok
     _query_end
@@ -342,7 +345,6 @@
     _dbh_commit
     _execute_array
     _placeholders_supported
-    _verify_pid
     savepoints
     _sqlt_minimum_version
     _sql_maker_opts
@@ -365,9 +367,24 @@
     _do_query
     _dbh_sth
     _dbh_execute
+    _prefetch_insert_auto_nextvals
+    _server_info_hash
   /],
 );
 
+my @unimplemented = qw(
+  _arm_global_destructor
+  _preserve_foreign_dbh
+  _verify_pid
+  _verify_tid
+);
+
+for my $method (@unimplemented) {
+  __PACKAGE__->meta->add_method($method, sub {
+    croak "$method must not be called on ".(blessed shift).' objects';
+  });
+}
+
 has _master_connect_info_opts =>
   (is => 'rw', isa => HashRef, default => sub { {} });
 
@@ -452,7 +469,7 @@
 =cut
 
 sub BUILDARGS {
-  my ($class, $schema, $storage_type_args, @args) = @_;  
+  my ($class, $schema, $storage_type_args, @args) = @_;
 
   return {
     schema=>$schema,
@@ -605,7 +622,7 @@
   my $reliably = sub {
     my $name = shift @_;
     $schema->resultset('User')->create({name=>$name});
-    my $user_rs = $schema->resultset('User')->find({name=>$name}); 
+    my $user_rs = $schema->resultset('User')->find({name=>$name});
     return $user_rs;
   };
 
@@ -636,7 +653,7 @@
   my @result;
   my $want_array = wantarray;
 
-  eval {
+  try {
     if($want_array) {
       @result = $coderef->(@args);
     } elsif(defined $want_array) {
@@ -644,19 +661,14 @@
     } else {
       $coderef->(@args);
     }
+  } catch {
+    $self->throw_exception("coderef returned an error: $_");
+  } finally {
+    ##Reset to the original state
+    $self->read_handler($current);
   };
 
-  ##Reset to the original state
-  $self->read_handler($current);
-
-  ##Exception testing has to come last, otherwise you might leave the 
-  ##read_handler set to master.
-
-  if($@) {
-    $self->throw_exception("coderef returned an error: $@");
-  } else {
-    return $want_array ? @result : $result[0];
-  }
+  return $want_array ? @result : $result[0];
 }
 
 =head2 set_reliable_storage
@@ -908,7 +920,7 @@
   my $self = shift;
 
   return max map $_->lag_behind_master, $self->replicants;
-} 
+}
 
 =head2 is_replicating
 
@@ -956,7 +968,7 @@
 
 sub _driver_determined {
   my $self = shift;
-  
+
   if (@_) {
     $_->_driver_determined(@_) for $self->all_storages;
   }
@@ -966,19 +978,19 @@
 
 sub _init {
   my $self = shift;
-  
+
   $_->_init for $self->all_storages;
 }
 
 sub _run_connection_actions {
   my $self = shift;
-  
+
   $_->_run_connection_actions for $self->all_storages;
 }
 
 sub _do_connection_actions {
   my $self = shift;
-  
+
   if (@_) {
     $_->_do_connection_actions(@_) for $self->all_storages;
   }
@@ -1006,6 +1018,36 @@
   return min map $_->_ping, $self->all_storages;
 }
 
+my $numify_ver = sub {
+  my $ver = shift;
+  my @numparts = split /\D+/, $ver;
+  my $format = '%d.' . (join '', ('%05d') x (@numparts - 1));
+
+  return sprintf $format, @numparts;
+};
+
+sub _server_info {
+  my $self = shift;
+
+  if (not $self->_server_info_hash) {
+    my $min_version_info = (
+      reduce { $a->[0] < $b->[0] ? $a : $b }
+      map [ $numify_ver->($_->{dbms_version}), $_ ],
+      map $_->_server_info, $self->all_storages
+    )->[1];
+
+    $self->_server_info_hash($min_version_info); # on master
+  }
+
+  return $self->_server_info_hash;
+}
+
+sub _get_server_version {
+  my $self = shift;
+
+  return $self->_server_info->{dbms_version};
+}
+
 =head1 GOTCHAS
 
 Due to the fact that replicants can lag behind a master, you must take care to

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -2,9 +2,11 @@
 
 use strict;
 use warnings;
-use base qw/DBIx::Class::Storage::DBI/;
+use base qw/DBIx::Class::Storage::DBI::UniqueIdentifier/;
 use mro 'c3';
-use List::Util ();
+use List::Util 'first';
+use Try::Tiny;
+use namespace::clean;
 
 __PACKAGE__->mk_group_accessors(simple => qw/
   _identity
@@ -35,18 +37,21 @@
 
 sub last_insert_id { shift->_identity }
 
+sub _new_uuid { 'UUIDTOSTR(NEWID())' }
+
 sub insert {
   my $self = shift;
   my ($source, $to_insert) = @_;
 
-  my $identity_col = List::Util::first {
-      $source->column_info($_)->{is_auto_increment} 
-  } $source->columns;
+  my $identity_col =
+    first { $source->column_info($_)->{is_auto_increment} } $source->columns;
 
 # user might have an identity PK without is_auto_increment
   if (not $identity_col) {
     foreach my $pk_col ($source->primary_columns) {
-      if (not exists $to_insert->{$pk_col}) {
+      if (not exists $to_insert->{$pk_col} &&
+          $source->column_info($pk_col)->{data_type} !~ /^uniqueidentifier/i)
+      {
         $identity_col = $pk_col;
         last;
       }
@@ -58,11 +63,36 @@
     my $table_name = $source->from;
     $table_name    = $$table_name if ref $table_name;
 
-    my ($identity) = $dbh->selectrow_array("SELECT GET_IDENTITY('$table_name')");
+    my ($identity) = try {
+      $dbh->selectrow_array("SELECT GET_IDENTITY('$table_name')")
+    };
 
-    $to_insert->{$identity_col} = $identity;
+    if (defined $identity) {
+      $to_insert->{$identity_col} = $identity;
+      $self->_identity($identity);
+    }
+  }
 
-    $self->_identity($identity);
+  return $self->next::method(@_);
+}
+
+# convert UUIDs to strings in selects
+sub _select_args {
+  my $self = shift;
+  my ($ident, $select) = @_;
+
+  my $col_info = $self->_resolve_column_info($ident);
+
+  for my $select_idx (0..$#$select) {
+    my $selected = $select->[$select_idx];
+
+    next if ref $selected;
+
+    my $data_type = $col_info->{$selected}{data_type};
+
+    if ($data_type && lc($data_type) eq 'uniqueidentifier') {
+      $select->[$select_idx] = { UUIDTOSTR => $selected };
+    }
   }
 
   return $self->next::method(@_);
@@ -85,8 +115,13 @@
 sub build_datetime_parser {
   my $self = shift;
   my $type = "DateTime::Format::Strptime";
-  eval "use ${type}";
-  $self->throw_exception("Couldn't load ${type}: $@") if $@;
+  try {
+    eval "require ${type}"
+  }
+  catch {
+    $self->throw_exception("Couldn't load ${type}: $_");
+  };
+
   return $type->new( pattern => '%Y-%m-%d %H:%M:%S.%6N' );
 }
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLite.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLite.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/SQLite.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -45,23 +45,49 @@
 }
 
 sub deployment_statements {
-  my $self = shift;;
+  my $self = shift;
   my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_;
 
   $sqltargs ||= {};
 
-  my $sqlite_version = $self->_get_dbh->{sqlite_version};
+  if (
+    ! exists $sqltargs->{producer_args}{sqlite_version}
+      and
+    my $dver = $self->_server_info->{normalized_dbms_version}
+  ) {
+    $sqltargs->{producer_args}{sqlite_version} = $dver;
+  }
 
-  # numify, SQLT does a numeric comparison
-  $sqlite_version =~ s/^(\d+) \. (\d+) (?: \. (\d+))? .*/${1}.${2}/x;
-
-  $sqltargs->{producer_args}{sqlite_version} = $sqlite_version;
-
   $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest);
 }
 
 sub datetime_parser_type { return "DateTime::Format::SQLite"; } 
 
+=head2 connect_call_use_foreign_keys
+
+Used as:
+
+    on_connect_call => 'use_foreign_keys'
+
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to turn on foreign key
+(including cascading) support for recent versions of SQLite and L<DBD::SQLite>.
+
+Executes:
+
+  PRAGMA foreign_keys = ON 
+
+See L<http://www.sqlite.org/foreignkeys.html> for more information.
+
+=cut
+
+sub connect_call_use_foreign_keys {
+  my $self = shift;
+
+  $self->_do_query(
+    'PRAGMA foreign_keys = ON'
+  );
+}
+
 1;
 
 =head1 NAME

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE/NoBindVars.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,8 +5,9 @@
   DBIx::Class::Storage::DBI::Sybase::ASE
 /;
 use mro 'c3';
-use List::Util ();
-use Scalar::Util ();
+use List::Util 'first';
+use Scalar::Util 'looks_like_number';
+use namespace::clean;
 
 sub _init {
   my $self = shift;
@@ -17,7 +18,7 @@
 
 sub _fetch_identity_sql { 'SELECT ' . $_[0]->_identity_method }
 
-my $number = sub { Scalar::Util::looks_like_number($_[0]) };
+my $number = sub { looks_like_number $_[0] };
 
 my $decimal = sub { $_[0] =~ /^ [-+]? \d+ (?:\.\d*)? \z/x };
 
@@ -38,7 +39,7 @@
 
   return $self->next::method(@_) if not defined $value or not defined $type;
 
-  if (my $key = List::Util::first { $type =~ /$_/i } keys %noquote) {
+  if (my $key = first { $type =~ /$_/i } keys %noquote) {
     return 1 if $noquote{$key}->($value);
   }
   elsif ($self->is_datatype_numeric($type) && $number->($value)) {

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -9,10 +9,12 @@
 /;
 use mro 'c3';
 use Carp::Clan qw/^DBIx::Class/;
-use Scalar::Util();
-use List::Util();
+use Scalar::Util 'blessed';
+use List::Util 'first';
 use Sub::Name();
-use Data::Dumper::Concise();
+use Data::Dumper::Concise 'Dumper';
+use Try::Tiny;
+use namespace::clean;
 
 __PACKAGE__->mk_group_accessors('simple' =>
     qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
@@ -49,7 +51,7 @@
 without doing a C<SELECT MAX(col)>. This is done safely in a transaction
 (locking the table.) See L</INSERTS WITH PLACEHOLDERS>.
 
-A recommended L<DBIx::Class::Storage::DBI/connect_info> setting:
+A recommended L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting:
 
   on_connect_call => [['datetime_setup'], ['blob_setup', log_on_update => 0]]
 
@@ -150,6 +152,16 @@
   };
 }
 
+sub _sql_maker_opts {
+  my ( $self, $opts ) = @_;
+
+  if ( $opts ) {
+    $self->{_sql_maker_opts} = { %$opts };
+  }
+
+  return { limit_dialect => 'RowCountOrGenericSubQ', %{$self->{_sql_maker_opts}||{}} };
+}
+
 sub disconnect {
   my $self = shift;
 
@@ -247,19 +259,18 @@
 
   my ($sql, $bind) = $self->next::method (@_);
 
-  my $table = Scalar::Util::blessed($ident) ? $ident->from : $ident;
+  my $table = blessed $ident ? $ident->from : $ident;
 
   my $bind_info = $self->_resolve_column_info(
     $ident, [map $_->[0], @{$bind}]
   );
-  my $bound_identity_col = List::Util::first
-    { $bind_info->{$_}{is_auto_increment} }
-    (keys %$bind_info)
+  my $bound_identity_col =
+    first { $bind_info->{$_}{is_auto_increment} }
+    keys %$bind_info
   ;
-  my $identity_col = Scalar::Util::blessed($ident) &&
-    List::Util::first
-    { $ident->column_info($_)->{is_auto_increment} }
-    $ident->columns
+  my $identity_col =
+    blessed $ident &&
+    first { $ident->column_info($_)->{is_auto_increment} } $ident->columns
   ;
 
   if (($op eq 'insert' && $bound_identity_col) ||
@@ -347,9 +358,9 @@
   my $self = shift;
   my ($source, $to_insert) = @_;
 
-  my $identity_col = (List::Util::first
-    { $source->column_info($_)->{is_auto_increment} }
-    $source->columns) || '';
+  my $identity_col =
+    (first { $source->column_info($_)->{is_auto_increment} } $source->columns)
+    || '';
 
   # check for empty insert
   # INSERT INTO foo DEFAULT VALUES -- does not work with Sybase
@@ -432,9 +443,8 @@
 
   my $table = $source->name;
 
-  my $identity_col = List::Util::first
-    { $source->column_info($_)->{is_auto_increment} }
-    $source->columns;
+  my $identity_col =
+    first { $source->column_info($_)->{is_auto_increment} } $source->columns;
 
   my $is_identity_update = $identity_col && defined $fields->{$identity_col};
 
@@ -483,14 +493,10 @@
   my $self = shift;
   my ($source, $cols, $data) = @_;
 
-  my $identity_col = List::Util::first
-    { $source->column_info($_)->{is_auto_increment} }
-    $source->columns;
+  my $identity_col =
+    first { $source->column_info($_)->{is_auto_increment} } $source->columns;
 
-  my $is_identity_insert = (List::Util::first
-    { $_ eq $identity_col }
-    @{$cols}
-  ) ? 1 : 0;
+  my $is_identity_insert = (first { $_ eq $identity_col } @{$cols}) ? 1 : 0;
 
   my @source_columns = $source->columns;
 
@@ -596,7 +602,8 @@
       return 0;
   });
 
-  eval {
+  my $exception = '';
+  try {
     my $bulk = $self->_bulk_storage;
 
     my $guard = $bulk->txn_scope_guard;
@@ -640,9 +647,10 @@
     );
 
     $bulk->_query_end($sql);
+  } catch {
+    $exception = shift;
   };
 
-  my $exception = $@;
   DBD::Sybase::set_cslib_cb($orig_cslib_cb);
 
   if ($exception =~ /-Y option/) {
@@ -728,9 +736,11 @@
 sub _update_blobs {
   my ($self, $source, $blob_cols, $where) = @_;
 
-  my @primary_cols = eval { $source->_pri_cols };
-  $self->throw_exception("Cannot update TEXT/IMAGE column(s): $@")
-    if $@;
+  my @primary_cols = try
+    { $source->_pri_cols }
+    catch {
+      $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
+    };
 
 # check if we're updating a single row by PK
   my $pk_cols_in_where = 0;
@@ -762,9 +772,11 @@
   my $table = $source->name;
 
   my %row = %$row;
-  my @primary_cols = eval { $source->_pri_cols} ;
-  $self->throw_exception("Cannot update TEXT/IMAGE column(s): $@")
-    if $@;
+  my @primary_cols = try
+    { $source->_pri_cols }
+    catch {
+      $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
+    };
 
   $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
     if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
@@ -779,14 +791,13 @@
     my $sth = $cursor->sth;
 
     if (not $sth) {
-
       $self->throw_exception(
           "Could not find row in table '$table' for blob update:\n"
-        . Data::Dumper::Concise::Dumper (\%where)
+        . (Dumper \%where)
       );
     }
 
-    eval {
+    try {
       do {
         $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
       } while $sth->fetch;
@@ -804,19 +815,20 @@
       $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr;
 
       $sth->func('ct_finish_send') or die $sth->errstr;
-    };
-    my $exception = $@;
-    $sth->finish if $sth;
-    if ($exception) {
+    }
+    catch {
       if ($self->using_freetds) {
         $self->throw_exception (
-          'TEXT/IMAGE operation failed, probably because you are using FreeTDS: '
-          . $exception
+          "TEXT/IMAGE operation failed, probably because you are using FreeTDS: $_"
         );
-      } else {
-        $self->throw_exception($exception);
       }
+      else {
+        $self->throw_exception($_);
+      }
     }
+    finally {
+      $sth->finish if $sth;
+    };
   }
 }
 
@@ -846,7 +858,7 @@
 
   on_connect_call => 'datetime_setup'
 
-In L<DBIx::Class::Storage::DBI/connect_info> to set:
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set:
 
   $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
   $dbh->do('set dateformat mdy');   # input fmt:  08/13/1979 18:08:55.080
@@ -1069,11 +1081,12 @@
 instead.
 
 However, the C<LongReadLen> you pass in
-L<DBIx::Class::Storage::DBI/connect_info> is used to execute the equivalent
-C<SET TEXTSIZE> command on connection.
+L<connect_info|DBIx::Class::Storage::DBI/connect_info> is used to execute the
+equivalent C<SET TEXTSIZE> command on connection.
 
-See L</connect_call_blob_setup> for a L<DBIx::Class::Storage::DBI/connect_info>
-setting you need to work with C<IMAGE> columns.
+See L</connect_call_blob_setup> for a
+L<connect_info|DBIx::Class::Storage::DBI/connect_info> setting you need to work
+with C<IMAGE> columns.
 
 =head1 BULK API
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -8,6 +8,7 @@
   DBIx::Class::Storage::DBI::MSSQL
 /;
 use mro 'c3';
+use Carp::Clan qw/^DBIx::Class/;
 
 sub _rebless {
   my $self = shift;
@@ -55,6 +56,88 @@
   $dbh->do('ROLLBACK');
 }
 
+sub _get_server_version {
+  my $self = shift;
+
+  my $product_version = $self->_get_dbh->selectrow_hashref('master.dbo.xp_msver ProductVersion');
+
+  if ((my $version = $product_version->{Character_Value}) =~ /^(\d+)\./) {
+    return $version;
+  }
+  else {
+    $self->throw_exception(
+      "MSSQL Version Retrieval Failed, Your ProductVersion's Character_Value is missing or malformed!"
+    );
+  }
+}
+
+=head2 connect_call_datetime_setup
+
+Used as:
+
+  on_connect_call => 'datetime_setup'
+
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set:
+
+  $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
+
+On connection for use with L<DBIx::Class::InflateColumn::DateTime>
+
+This works for both C<DATETIME> and C<SMALLDATETIME> columns, although
+C<SMALLDATETIME> columns only have minute precision.
+
+=cut
+
+{
+  my $old_dbd_warned = 0;
+
+  sub connect_call_datetime_setup {
+    my $self = shift;
+    my $dbh = $self->_get_dbh;
+
+    if ($dbh->can('syb_date_fmt')) {
+      # amazingly, this works with FreeTDS
+      $dbh->syb_date_fmt('ISO_strict');
+    } elsif (not $old_dbd_warned) {
+      carp "Your DBD::Sybase is too old to support ".
+      "DBIx::Class::InflateColumn::DateTime, please upgrade!";
+      $old_dbd_warned = 1;
+    }
+  }
+}
+
+sub datetime_parser_type {
+  'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::DateTime::Format'
+} 
+
+package # hide from PAUSE
+  DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::DateTime::Format;
+
+my $datetime_parse_format  = '%Y-%m-%dT%H:%M:%S.%3NZ';
+my $datetime_format_format = '%Y-%m-%d %H:%M:%S.%3N'; # %F %T 
+
+my ($datetime_parser, $datetime_formatter);
+
+sub parse_datetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $datetime_parser ||= DateTime::Format::Strptime->new(
+    pattern  => $datetime_parse_format,
+    on_error => 'croak',
+  );
+  return $datetime_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+  shift;
+  require DateTime::Format::Strptime;
+  $datetime_formatter ||= DateTime::Format::Strptime->new(
+    pattern  => $datetime_format_format,
+    on_error => 'croak',
+  );
+  return $datetime_formatter->format_datetime(shift);
+}
+
 1;
 
 =head1 NAME

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/Sybase.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -2,6 +2,8 @@
 
 use strict;
 use warnings;
+use Try::Tiny;
+use namespace::clean;
 
 use base qw/DBIx::Class::Storage::DBI/;
 
@@ -22,13 +24,13 @@
 sub _rebless {
   my $self = shift;
 
-  my $dbtype = eval {
-    @{$self->_get_dbh->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})}[2]
+  my $dbtype;
+  try {
+    $dbtype = @{$self->_get_dbh->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})}[2]
+  } catch {
+    $self->throw_exception("Unable to estable connection to determine database type: $_")
   };
 
-  $self->throw_exception("Unable to estable connection to determine database type: $@")
-    if $@;
-
   if ($dbtype) {
     $dbtype =~ s/\W/_/gi;
 
@@ -53,17 +55,17 @@
 
   if ($dbh->{syb_no_child_con}) {
 # if extra connections are not allowed, then ->ping is reliable
-    my $ping = eval { $dbh->ping };
-    return $@ ? 0 : $ping;
+    return try { $dbh->ping } catch { 0; };
   }
 
-  eval {
+  return try {
 # XXX if the main connection goes stale, does opening another for this statement
 # really determine anything?
     $dbh->do('select 1');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 sub _set_max_connect {
@@ -103,15 +105,18 @@
   $dbh->do("SET TEXTSIZE $bytes");
 
 Takes the number of bytes, or uses the C<LongReadLen> value from your
-L<DBIx::Class/connect_info> if omitted, lastly falls back to the C<32768> which
-is the L<DBD::Sybase> default.
+L<connect_info|DBIx::Class::Storage::DBI/connect_info> if omitted, lastly falls
+back to the C<32768> which is the L<DBD::Sybase> default.
 
 =cut
 
 sub set_textsize {
   my $self = shift;
-  my $text_size = shift ||
-    eval { $self->_dbi_connect_info->[-1]->{LongReadLen} } ||
+  my $text_size =
+    shift
+      ||
+    try { $self->_dbi_connect_info->[-1]->{LongReadLen} }
+      ||
     32768; # the DBD::Sybase default
 
   return unless defined $text_size;

Added: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/UniqueIdentifier.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/UniqueIdentifier.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/UniqueIdentifier.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,83 @@
+package DBIx::Class::Storage::DBI::UniqueIdentifier;
+
+use strict;
+use warnings;
+use base 'DBIx::Class::Storage::DBI';
+use mro 'c3';
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::UniqueIdentifier - Storage component for RDBMSes
+supporting the 'uniqueidentifier' type
+
+=head1 DESCRIPTION
+
+This is a storage component for databases that support the C<uniqueidentifier>
+type and the C<NEWID()> function for generating UUIDs.
+
+UUIDs are generated automatically for PK columns with the C<uniqueidentifier>
+L<data_type|DBIx::Class::ResultSource/data_type>, as well as non-PK with this
+L<data_type|DBIx::Class::ResultSource/data_type> and
+L<auto_nextval|DBIx::Class::ResultSource/auto_nextval>.
+
+Currently used by L<DBIx::Class::Storage::DBI::MSSQL> and
+L<DBIx::Class::Storage::DBI::SQLAnywhere>.
+
+The composing class can define a C<_new_uuid> method to override the function
+used to generate a new UUID.
+
+=cut
+
+sub _new_uuid { 'NEWID()' }
+
+sub insert {
+  my $self = shift;
+  my ($source, $to_insert) = @_;
+
+  my $supplied_col_info = $self->_resolve_column_info($source, [keys %$to_insert] );
+
+  my %guid_cols;
+  my @pk_cols = $source->primary_columns;
+  my %pk_cols;
+  @pk_cols{@pk_cols} = ();
+
+  my @pk_guids = grep {
+    $source->column_info($_)->{data_type}
+    &&
+    $source->column_info($_)->{data_type} =~ /^uniqueidentifier/i
+  } @pk_cols;
+
+  my @auto_guids = grep {
+    $source->column_info($_)->{data_type}
+    &&
+    $source->column_info($_)->{data_type} =~ /^uniqueidentifier/i
+    &&
+    $source->column_info($_)->{auto_nextval}
+  } grep { not exists $pk_cols{$_} } $source->columns;
+
+  my @get_guids_for =
+    grep { not exists $to_insert->{$_} } (@pk_guids, @auto_guids);
+
+  my $updated_cols = {};
+
+  for my $guid_col (@get_guids_for) {
+    my ($new_guid) = $self->_get_dbh->selectrow_array('SELECT '.$self->_new_uuid);
+    $updated_cols->{$guid_col} = $to_insert->{$guid_col} = $new_guid;
+  }
+
+  $updated_cols = { %$updated_cols, %{ $self->next::method(@_) } };
+
+  return $updated_cols;
+}
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
+
+1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/mysql.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/mysql.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI/mysql.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -51,6 +51,23 @@
   return 'MySQL';
 }
 
+sub deployment_statements {
+  my $self = shift;
+  my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_;
+
+  $sqltargs ||= {};
+
+  if (
+    ! exists $sqltargs->{producer_args}{mysql_version}
+      and 
+    my $dver = $self->_server_info->{normalized_dbms_version}
+  ) {
+    $sqltargs->{producer_args}{mysql_version} = $dver;
+  }
+
+  $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest);
+}
+
 sub _svp_begin {
     my ($self, $name) = @_;
 
@@ -99,8 +116,13 @@
 
 =head1 DESCRIPTION
 
-This class implements MySQL specific bits of L<DBIx::Class::Storage::DBI>.
+This class implements MySQL specific bits of L<DBIx::Class::Storage::DBI>,
+like AutoIncrement column support and savepoints. Also it augments the
+SQL maker to support the MySQL-specific C<STRAIGHT_JOIN> join type, which
+you can use by specifying C<< join_type => 'straight' >> in the
+L<relationship attributes|DBIx::Class::Relationship::Base/join_type>
 
+
 It also provides a one-stop on-connect macro C<set_strict_mode> which sets
 session variables such that MySQL behaves more predictably as far as the
 SQL standard is concerned.

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBI.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -11,15 +11,18 @@
 use DBI;
 use DBIx::Class::Storage::DBI::Cursor;
 use DBIx::Class::Storage::Statistics;
-use Scalar::Util();
-use List::Util();
-use Data::Dumper::Concise();
-use Sub::Name ();
+use Scalar::Util qw/refaddr weaken reftype blessed/;
+use Data::Dumper::Concise 'Dumper';
+use Sub::Name 'subname';
+use Try::Tiny;
+use File::Path 'make_path';
+use namespace::clean;
 
-__PACKAGE__->mk_group_accessors('simple' =>
-  qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid
-     _conn_tid transaction_depth _dbh_autocommit _driver_determined savepoints/
-);
+__PACKAGE__->mk_group_accessors('simple' => qw/
+  _connect_info _dbi_connect_info _dbic_connect_attributes _driver_determined
+  _dbh _server_info_hash _conn_pid _conn_tid _sql_maker _sql_maker_opts
+  transaction_depth _dbh_autocommit  savepoints
+/);
 
 # the values for these accessors are picked out (and deleted) from
 # the attribute hashref passed to connect_info
@@ -33,15 +36,18 @@
 # default cursor class, overridable in connect_info attributes
 __PACKAGE__->cursor_class('DBIx::Class::Storage::DBI::Cursor');
 
-__PACKAGE__->mk_group_accessors('inherited' => qw/sql_maker_class/);
+__PACKAGE__->mk_group_accessors('inherited' => qw/
+  sql_maker_class
+  _supports_insert_returning
+/);
 __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks');
 
-
 # Each of these methods need _determine_driver called before itself
 # in order to function reliably. This is a purely DRY optimization
 my @rdbms_specific_methods = qw/
   deployment_statements
   sqlt_type
+  sql_maker
   build_datetime_parser
   datetime_parser_type
 
@@ -60,7 +66,7 @@
 
   no strict qw/refs/;
   no warnings qw/redefine/;
-  *{__PACKAGE__ ."::$meth"} = Sub::Name::subname $meth => sub {
+  *{__PACKAGE__ ."::$meth"} = subname $meth => sub {
     if (not $_[0]->_driver_determined) {
       $_[0]->_determine_driver;
       goto $_[0]->can($meth);
@@ -89,7 +95,7 @@
   );
 
   $schema->resultset('Book')->search({
-     written_on => $schema->storage->datetime_parser(DateTime->now)
+     written_on => $schema->storage->datetime_parser->format_datetime(DateTime->now)
   });
 
 =head1 DESCRIPTION
@@ -111,9 +117,101 @@
   $new->{_in_dbh_do} = 0;
   $new->{_dbh_gen} = 0;
 
+  # read below to see what this does
+  $new->_arm_global_destructor;
+
   $new;
 }
 
+# This is hack to work around perl shooting stuff in random
+# order on exit(). If we do not walk the remaining storage
+# objects in an END block, there is a *small but real* chance
+# of a fork()ed child to kill the parent's shared DBI handle,
+# *before perl reaches the DESTROY in this package*
+# Yes, it is ugly and effective.
+{
+  my %seek_and_destroy;
+
+  sub _arm_global_destructor {
+    my $self = shift;
+    my $key = Scalar::Util::refaddr ($self);
+    $seek_and_destroy{$key} = $self;
+    Scalar::Util::weaken ($seek_and_destroy{$key});
+  }
+
+  END {
+    local $?; # just in case the DBI destructor changes it somehow
+
+    # destroy just the object if not native to this process/thread
+    $_->_preserve_foreign_dbh for (grep
+      { defined $_ }
+      values %seek_and_destroy
+    );
+  }
+}
+
+sub DESTROY {
+  my $self = shift;
+
+  # destroy just the object if not native to this process/thread
+  $self->_preserve_foreign_dbh;
+
+  # some databases need this to stop spewing warnings
+  if (my $dbh = $self->_dbh) {
+    try {
+      %{ $dbh->{CachedKids} } = ();
+      $dbh->disconnect;
+    };
+  }
+
+  $self->_dbh(undef);
+}
+
+sub _preserve_foreign_dbh {
+  my $self = shift;
+
+  return unless $self->_dbh;
+
+  $self->_verify_tid;
+
+  return unless $self->_dbh;
+
+  $self->_verify_pid;
+
+}
+
+# handle pid changes correctly - do not destroy parent's connection
+sub _verify_pid {
+  my $self = shift;
+
+  return if ( defined $self->_conn_pid and $self->_conn_pid == $$ );
+
+  $self->_dbh->{InactiveDestroy} = 1;
+  $self->_dbh(undef);
+  $self->{_dbh_gen}++;
+
+  return;
+}
+
+# very similar to above, but seems to FAIL if I set InactiveDestroy
+sub _verify_tid {
+  my $self = shift;
+
+  if ( ! defined $self->_conn_tid ) {
+    return; # no threads
+  }
+  elsif ( $self->_conn_tid == threads->tid ) {
+    return; # same thread
+  }
+
+  #$self->_dbh->{InactiveDestroy} = 1;  # why does t/51threads.t fail...?
+  $self->_dbh(undef);
+  $self->{_dbh_gen}++;
+
+  return;
+}
+
+
 =head2 connect_info
 
 This method is normally called by L<DBIx::Class::Schema/connection>, which
@@ -483,6 +581,11 @@
   $self->_dbi_connect_info([@args,
     %attrs && !(ref $args[0] eq 'CODE') ? \%attrs : ()]);
 
+  # FIXME - dirty:
+  # save attributes them in a separate accessor so they are always
+  # introspectable, even in case of a CODE $dbhmaker
+  $self->_dbic_connect_attributes (\%attrs);
+
   return $self->_connect_info;
 }
 
@@ -624,39 +727,26 @@
 
   my $dbh = $self->_get_dbh;
 
-  return $self->$code($dbh, @_) if $self->{_in_dbh_do}
-      || $self->{transaction_depth};
+  return $self->$code($dbh, @_)
+    if ( $self->{_in_dbh_do} || $self->{transaction_depth} );
 
   local $self->{_in_dbh_do} = 1;
 
-  my @result;
-  my $want_array = wantarray;
+  # take a ref instead of a copy, to preserve coderef @_ aliasing semantics
+  my $args = \@_;
+  return try {
+    $self->$code ($dbh, @$args);
+  } catch {
+    $self->throw_exception($_) if $self->connected;
 
-  eval {
+    # We were not connected - reconnect and retry, but let any
+    #  exception fall right through this time
+    carp "Retrying $code after catching disconnected exception: $_"
+      if $ENV{DBIC_DBIRETRY_DEBUG};
 
-    if($want_array) {
-        @result = $self->$code($dbh, @_);
-    }
-    elsif(defined $want_array) {
-        $result[0] = $self->$code($dbh, @_);
-    }
-    else {
-        $self->$code($dbh, @_);
-    }
+    $self->_populate_dbh;
+    $self->$code($self->_dbh, @$args);
   };
-
-  # ->connected might unset $@ - copy
-  my $exception = $@;
-  if(!$exception) { return $want_array ? @result : $result[0] }
-
-  $self->throw_exception($exception) if $self->connected;
-
-  # We were not connected - reconnect and retry, but let any
-  #  exception fall right through this time
-  carp "Retrying $code after catching disconnected exception: $exception"
-    if $ENV{DBIC_DBIRETRY_DEBUG};
-  $self->_populate_dbh;
-  $self->$code($self->_dbh, @_);
 }
 
 # This is basically a blend of dbh_do above and DBIx::Class::Storage::txn_do.
@@ -678,30 +768,35 @@
 
   my $tried = 0;
   while(1) {
-    eval {
+    my $exception;
+
+    # take a ref instead of a copy, to preserve coderef @_ aliasing semantics
+    my $args = \@_;
+
+    try {
       $self->_get_dbh;
 
       $self->txn_begin;
       if($want_array) {
-          @result = $coderef->(@_);
+          @result = $coderef->(@$args);
       }
       elsif(defined $want_array) {
-          $result[0] = $coderef->(@_);
+          $result[0] = $coderef->(@$args);
       }
       else {
-          $coderef->(@_);
+          $coderef->(@$args);
       }
       $self->txn_commit;
+    } catch {
+      $exception = $_;
     };
 
-    # ->connected might unset $@ - copy
-    my $exception = $@;
-    if(!$exception) { return $want_array ? @result : $result[0] }
+    if(! defined $exception) { return $want_array ? @result : $result[0] }
 
     if($tried++ || $self->connected) {
-      eval { $self->txn_rollback };
-      my $rollback_exception = $@;
-      if($rollback_exception) {
+      my $rollback_exception;
+      try { $self->txn_rollback } catch { $rollback_exception = shift };
+      if(defined $rollback_exception) {
         my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
         $self->throw_exception($exception)  # propagate nested rollback
           if $rollback_exception =~ /$exception_class/;
@@ -799,19 +894,11 @@
 sub _seems_connected {
   my $self = shift;
 
+  $self->_preserve_foreign_dbh;
+
   my $dbh = $self->_dbh
     or return 0;
 
-  if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) {
-    $self->_dbh(undef);
-    $self->{_dbh_gen}++;
-    return 0;
-  }
-  else {
-    $self->_verify_pid;
-    return 0 if !$self->_dbh;
-  }
-
   return $dbh->FETCH('Active');
 }
 
@@ -823,20 +910,6 @@
   return $dbh->ping;
 }
 
-# handle pid changes correctly
-#  NOTE: assumes $self->_dbh is a valid $dbh
-sub _verify_pid {
-  my ($self) = @_;
-
-  return if defined $self->_conn_pid && $self->_conn_pid == $$;
-
-  $self->_dbh->{InactiveDestroy} = 1;
-  $self->_dbh(undef);
-  $self->{_dbh_gen}++;
-
-  return;
-}
-
 sub ensure_connected {
   my ($self) = @_;
 
@@ -869,7 +942,7 @@
 # this is the internal "get dbh or connect (don't check)" method
 sub _get_dbh {
   my $self = shift;
-  $self->_verify_pid if $self->_dbh;
+  $self->_preserve_foreign_dbh;
   $self->_populate_dbh unless $self->_dbh;
   return $self->_dbh;
 }
@@ -904,6 +977,7 @@
 
   my @info = @{$self->_dbi_connect_info || []};
   $self->_dbh(undef); # in case ->connected failed we might get sent here
+  $self->_server_info_hash (undef);
   $self->_dbh($self->_connect(@info));
 
   $self->_conn_pid($$);
@@ -928,6 +1002,48 @@
   $self->_do_connection_actions(connect_call_ => $_) for @actions;
 }
 
+sub _server_info {
+  my $self = shift;
+
+  unless ($self->_server_info_hash) {
+
+    my %info;
+
+    my $server_version = try { $self->_get_server_version };
+
+    if (defined $server_version) {
+      $info{dbms_version} = $server_version;
+
+      my ($numeric_version) = $server_version =~ /^([\d\.]+)/;
+      my @verparts = split (/\./, $numeric_version);
+      if (
+        @verparts
+          &&
+        $verparts[0] <= 999
+      ) {
+        # consider only up to 3 version parts, iff not more than 3 digits
+        my @use_parts;
+        while (@verparts && @use_parts < 3) {
+          my $p = shift @verparts;
+          last if $p > 999;
+          push @use_parts, $p;
+        }
+        push @use_parts, 0 while @use_parts < 3;
+
+        $info{normalized_dbms_version} = sprintf "%d.%03d%03d", @use_parts;
+      }
+    }
+
+    $self->_server_info_hash(\%info);
+  }
+
+  return $self->_server_info_hash
+}
+
+sub _get_server_version {
+  shift->_get_dbh->get_info(18);
+}
+
 sub _determine_driver {
   my ($self) = @_;
 
@@ -943,7 +1059,7 @@
       } else {
         # if connect_info is a CODEREF, we have no choice but to connect
         if (ref $self->_dbi_connect_info->[0] &&
-            Scalar::Util::reftype($self->_dbi_connect_info->[0]) eq 'CODE') {
+            reftype $self->_dbi_connect_info->[0] eq 'CODE') {
           $self->_populate_dbh;
           $driver = $self->_dbh->{Driver}{Name};
         }
@@ -951,8 +1067,9 @@
           # try to use dsn to not require being connected, the driver may still
           # force a connection in _rebless to determine version
           # (dsn may not be supplied at all if all we do is make a mock-schema)
-          my $dsn = $self->_dbi_connect_info->[0] || '';
+          my $dsn = $self->_dbi_connect_info->[0] || $ENV{DBI_DSN} || '';
           ($driver) = $dsn =~ /dbi:([^:]+):/i;
+          $driver ||= $ENV{DBI_DRIVER};
         }
       }
 
@@ -1049,7 +1166,7 @@
     $DBI::connect_via = 'connect';
   }
 
-  eval {
+  try {
     if(ref $info[0] eq 'CODE') {
        $dbh = $info[0]->();
     }
@@ -1057,9 +1174,13 @@
        $dbh = DBI->connect(@info);
     }
 
-    if($dbh && !$self->unsafe) {
+    if (!$dbh) {
+      die $DBI::errstr;
+    }
+
+    unless ($self->unsafe) {
       my $weak_self = $self;
-      Scalar::Util::weaken($weak_self);
+      weaken $weak_self;
       $dbh->{HandleError} = sub {
           if ($weak_self) {
             $weak_self->throw_exception("DBI Exception: $_[0]");
@@ -1074,15 +1195,15 @@
       $dbh->{RaiseError} = 1;
       $dbh->{PrintError} = 0;
     }
+  }
+  catch {
+    $self->throw_exception("DBI Connection failed: $_")
+  }
+  finally {
+    $DBI::connect_via = $old_connect_via if $old_connect_via;
   };
 
-  $DBI::connect_via = $old_connect_via if $old_connect_via;
-
-  $self->throw_exception("DBI Connection failed: " . ($@||$DBI::errstr))
-    if !$dbh || $@;
-
   $self->_dbh_autocommit($dbh->{AutoCommit});
-
   $dbh;
 }
 
@@ -1230,7 +1351,7 @@
 sub txn_rollback {
   my $self = shift;
   my $dbh = $self->_dbh;
-  eval {
+  try {
     if ($self->{transaction_depth} == 1) {
       $self->debugobj->txn_rollback()
         if ($self->debug);
@@ -1248,15 +1369,17 @@
     else {
       die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new;
     }
+  }
+  catch {
+    my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
+
+    if ($_ !~ /$exception_class/) {
+      # ensure that a failed rollback resets the transaction depth
+      $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1;
+    }
+
+    $self->throw_exception($_)
   };
-  if ($@) {
-    my $error = $@;
-    my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
-    $error =~ /$exception_class/ and $self->throw_exception($error);
-    # ensure that a failed rollback resets the transaction depth
-    $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1;
-    $self->throw_exception($error);
-  }
 }
 
 sub _dbh_rollback {
@@ -1272,7 +1395,7 @@
 sub _prep_for_execute {
   my ($self, $op, $extra_bind, $ident, $args) = @_;
 
-  if( Scalar::Util::blessed($ident) && $ident->isa("DBIx::Class::ResultSource") ) {
+  if( blessed $ident && $ident->isa("DBIx::Class::ResultSource") ) {
     $ident = $ident->from();
   }
 
@@ -1351,7 +1474,9 @@
 
   # Can this fail without throwing an exception anyways???
   my $rv = $sth->execute();
-  $self->throw_exception($sth->errstr) if !$rv;
+  $self->throw_exception(
+    $sth->errstr || $sth->err || 'Unknown error: execute() returned false, but error flags were not set...'
+  ) if !$rv;
 
   $self->_query_end( $sql, @$bind );
 
@@ -1363,20 +1488,17 @@
     $self->dbh_do('_dbh_execute', @_);  # retry over disconnects
 }
 
-sub insert {
+sub _prefetch_insert_auto_nextvals {
   my ($self, $source, $to_insert) = @_;
 
-  my $ident = $source->from;
-  my $bind_attributes = $self->source_bind_attributes($source);
+  my $upd = {};
 
-  my $updated_cols = {};
-
   foreach my $col ( $source->columns ) {
     if ( !defined $to_insert->{$col} ) {
       my $col_info = $source->column_info($col);
 
       if ( $col_info->{auto_nextval} ) {
-        $updated_cols->{$col} = $to_insert->{$col} = $self->_sequence_fetch(
+        $upd->{$col} = $to_insert->{$col} = $self->_sequence_fetch(
           'nextval',
           $col_info->{sequence} ||=
             $self->_dbh_get_autoinc_seq($self->_get_dbh, $source, $col)
@@ -1385,8 +1507,38 @@
     }
   }
 
-  $self->_execute('insert' => [], $source, $bind_attributes, $to_insert);
+  return $upd;
+}
 
+sub insert {
+  my $self = shift;
+  my ($source, $to_insert, $opts) = @_;
+
+  my $updated_cols = $self->_prefetch_insert_auto_nextvals (@_);
+
+  my $bind_attributes = $self->source_bind_attributes($source);
+
+  my ($rv, $sth) = $self->_execute('insert' => [], $source, $bind_attributes, $to_insert, $opts);
+
+  if ($opts->{returning}) {
+    my @ret_cols = @{$opts->{returning}};
+
+    my @ret_vals = try {
+      local $SIG{__WARN__} = sub {};
+      my @r = $sth->fetchrow_array;
+      $sth->finish;
+      @r;
+    };
+
+    my %ret;
+    @ret{@ret_cols} = @ret_vals if (@ret_vals);
+
+    $updated_cols = {
+      %$updated_cols,
+      %ret,
+    };
+  }
+
   return $updated_cols;
 }
 
@@ -1414,9 +1566,9 @@
       $cols->[$col_idx],
       do {
         local $Data::Dumper::Maxdepth = 1; # don't dump objects, if any
-        Data::Dumper::Concise::Dumper({
+        Dumper {
           map { $cols->[$_] => $data->[$slice_idx][$_] } (0 .. $#$cols)
-        }),
+        },
       }
     );
   };
@@ -1526,16 +1678,27 @@
     $placeholder_index++;
   }
 
-  my $rv = eval {
-    $self->_dbh_execute_array($sth, $tuple_status, @extra);
+  my ($rv, $err);
+  try {
+    $rv = $self->_dbh_execute_array($sth, $tuple_status, @extra);
+  }
+  catch {
+    $err = shift;
+  }
+  finally {
+    # Statement must finish even if there was an exception.
+    try {
+      $sth->finish
+    }
+    catch {
+      $err = shift unless defined $err
+    };
   };
-  my $err = $@ || $sth->errstr;
 
-# Statement must finish even if there was an exception.
-  eval { $sth->finish };
-  $err = $@ unless $err;
+  $err = $sth->errstr
+    if (! defined $err and $sth->err);
 
-  if ($err) {
+  if (defined $err) {
     my $i = 0;
     ++$i while $i <= $#$tuple_status && !ref $tuple_status->[$i];
 
@@ -1544,11 +1707,10 @@
 
     $self->throw_exception(sprintf "%s for populate slice:\n%s",
       ($tuple_status->[$i][1] || $err),
-      Data::Dumper::Concise::Dumper({
-        map { $cols->[$_] => $data->[$i][$_] } (0 .. $#$cols)
-      }),
+      Dumper { map { $cols->[$_] => $data->[$i][$_] } (0 .. $#$cols) },
     );
   }
+
   return $rv;
 }
 
@@ -1561,21 +1723,29 @@
 sub _dbh_execute_inserts_with_no_binds {
   my ($self, $sth, $count) = @_;
 
-  eval {
+  my $err;
+  try {
     my $dbh = $self->_get_dbh;
     local $dbh->{RaiseError} = 1;
     local $dbh->{PrintError} = 0;
 
     $sth->execute foreach 1..$count;
+  }
+  catch {
+    $err = shift;
+  }
+  finally {
+    # Make sure statement is finished even if there was an exception.
+    try {
+      $sth->finish
+    }
+    catch {
+      $err = shift unless defined $err;
+    };
   };
-  my $exception = $@;
 
-# Make sure statement is finished even if there was an exception.
-  eval { $sth->finish };
-  $exception = $@ unless $exception;
+  $self->throw_exception($err) if defined $err;
 
-  $self->throw_exception($exception) if $exception;
-
   return $count;
 }
 
@@ -1695,31 +1865,18 @@
 
 sub _select {
   my $self = shift;
-
-  # localization is neccessary as
-  # 1) there is no infrastructure to pass this around before SQLA2
-  # 2) _select_args sets it and _prep_for_execute consumes it
-  my $sql_maker = $self->sql_maker;
-  local $sql_maker->{_dbic_rs_attrs};
-
-  return $self->_execute($self->_select_args(@_));
+  $self->_execute($self->_select_args(@_));
 }
 
 sub _select_args_to_query {
   my $self = shift;
 
-  # localization is neccessary as
-  # 1) there is no infrastructure to pass this around before SQLA2
-  # 2) _select_args sets it and _prep_for_execute consumes it
-  my $sql_maker = $self->sql_maker;
-  local $sql_maker->{_dbic_rs_attrs};
-
-  # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $order, $rows, $offset)
+  # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $rs_attrs, $rows, $offset)
   #  = $self->_select_args($ident, $select, $cond, $attrs);
   my ($op, $bind, $ident, $bind_attrs, @args) =
     $self->_select_args(@_);
 
-  # my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, [ $select, $cond, $order, $rows, $offset ]);
+  # my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, [ $select, $cond, $rs_attrs, $rows, $offset ]);
   my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, \@args);
   $prepared_bind ||= [];
 
@@ -1732,16 +1889,16 @@
 sub _select_args {
   my ($self, $ident, $select, $where, $attrs) = @_;
 
+  my $sql_maker = $self->sql_maker;
   my ($alias2source, $rs_alias) = $self->_resolve_ident_sources ($ident);
 
-  my $sql_maker = $self->sql_maker;
-  $sql_maker->{_dbic_rs_attrs} = {
+  $attrs = {
     %$attrs,
     select => $select,
     from => $ident,
     where => $where,
     $rs_alias && $alias2source->{$rs_alias}
-      ? ( _source_handle => $alias2source->{$rs_alias}->handle )
+      ? ( _rsroot_source_handle => $alias2source->{$rs_alias}->handle )
       : ()
     ,
   };
@@ -1774,19 +1931,13 @@
   }
 
   # adjust limits
-  if (
-    $attrs->{software_limit}
-      ||
-    $sql_maker->_default_limit_syntax eq "GenericSubQ"
-  ) {
-    $attrs->{software_limit} = 1;
+  if (defined $attrs->{rows}) {
+    $self->throw_exception("rows attribute must be positive if present")
+      unless $attrs->{rows} > 0;
   }
-  else {
-    $self->throw_exception("rows attribute must be positive if present")
-      if (defined($attrs->{rows}) && !($attrs->{rows} > 0));
-
+  elsif (defined $attrs->{offset}) {
     # MySQL actually recommends this approach.  I cringe.
-    $attrs->{rows} = 2**48 if not defined $attrs->{rows} and defined $attrs->{offset};
+    $attrs->{rows} = $sql_maker->__max_int;
   }
 
   my @limit;
@@ -1797,18 +1948,7 @@
     #limited has_many
     ( $attrs->{rows} && keys %{$attrs->{collapse}} )
        ||
-    # limited prefetch with RNO subqueries
-    (
-      $attrs->{rows}
-        &&
-      $sql_maker->limit_dialect eq 'RowNumberOver'
-        &&
-      $attrs->{_prefetch_select}
-        &&
-      @{$attrs->{_prefetch_select}}
-    )
-      ||
-    # grouped prefetch
+    # grouped prefetch (to satisfy group_by == select)
     ( $attrs->{group_by}
         &&
       @{$attrs->{group_by}}
@@ -1821,39 +1961,6 @@
     ($ident, $select, $where, $attrs)
       = $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs);
   }
-
-  elsif (
-    ($attrs->{rows} || $attrs->{offset})
-      &&
-    $sql_maker->limit_dialect eq 'RowNumberOver'
-      &&
-    (ref $ident eq 'ARRAY' && @$ident > 1)  # indicates a join
-      &&
-    scalar $self->_parse_order_by ($attrs->{order_by})
-  ) {
-    # the RNO limit dialect above mangles the SQL such that the join gets lost
-    # wrap a subquery here
-
-    push @limit, delete @{$attrs}{qw/rows offset/};
-
-    my $subq = $self->_select_args_to_query (
-      $ident,
-      $select,
-      $where,
-      $attrs,
-    );
-
-    $ident = {
-      -alias => $attrs->{alias},
-      -source_handle => $ident->[0]{-source_handle},
-      $attrs->{alias} => $subq,
-    };
-
-    # all part of the subquery now
-    delete @{$attrs}{qw/order_by group_by having/};
-    $where = undef;
-  }
-
   elsif (! $attrs->{software_limit} ) {
     push @limit, $attrs->{rows}, $attrs->{offset};
   }
@@ -1871,12 +1978,7 @@
   # invoked, and that's just bad...
 ###
 
-  my $order = { map
-    { $attrs->{$_} ? ( $_ => $attrs->{$_} ) : ()  }
-    (qw/order_by group_by having/ )
-  };
-
-  return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $order, @limit);
+  return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $attrs, @limit);
 }
 
 # Returns a counting SELECT for a simple count
@@ -1888,47 +1990,7 @@
   return { count => '*' };
 }
 
-# Returns a SELECT which will end up in the subselect
-# There may or may not be a group_by, as the subquery
-# might have been called to accomodate a limit
-#
-# Most databases would be happy with whatever ends up
-# here, but some choke in various ways.
-#
-sub _subq_count_select {
-  my ($self, $source, $rs_attrs) = @_;
 
-  if (my $groupby = $rs_attrs->{group_by}) {
-
-    my $avail_columns = $self->_resolve_column_info ($rs_attrs->{from});
-
-    my $sel_index;
-    for my $sel (@{$rs_attrs->{select}}) {
-      if (ref $sel eq 'HASH' and $sel->{-as}) {
-        $sel_index->{$sel->{-as}} = $sel;
-      }
-    }
-
-    my @selection;
-    for my $g_part (@$groupby) {
-      if (ref $g_part or $avail_columns->{$g_part}) {
-        push @selection, $g_part;
-      }
-      elsif ($sel_index->{$g_part}) {
-        push @selection, $sel_index->{$g_part};
-      }
-      else {
-        $self->throw_exception ("group_by criteria '$g_part' not contained within current resultset source(s)");
-      }
-    }
-
-    return \@selection;
-  }
-
-  my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
-  return @pcols ? \@pcols : [ 1 ];
-}
-
 sub source_bind_attributes {
   my ($self, $source) = @_;
 
@@ -2011,7 +2073,8 @@
 
   if ($dbh->can('column_info')) {
     my %result;
-    eval {
+    my $caught;
+    try {
       my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table);
       my $sth = $dbh->column_info( undef,$schema, $tab, '%' );
       $sth->execute();
@@ -2026,8 +2089,10 @@
 
         $result{$col_name} = \%column_info;
       }
+    } catch {
+      $caught = 1;
     };
-    return \%result if !$@ && scalar keys %result;
+    return \%result if !$caught && scalar keys %result;
   }
 
   my %result;
@@ -2077,7 +2142,7 @@
 sub _dbh_last_insert_id {
     my ($self, $dbh, $source, $col) = @_;
 
-    my $id = eval { $dbh->last_insert_id (undef, undef, $source->name, $col) };
+    my $id = try { $dbh->last_insert_id (undef, undef, $source->name, $col) };
 
     return $id if defined $id;
 
@@ -2128,12 +2193,15 @@
 
   # some drivers provide a $dbh attribute (e.g. Sybase and $dbh->{syb_dynamic_supported})
   # but it is inaccurate more often than not
-  eval {
+  return try {
     local $dbh->{PrintError} = 0;
     local $dbh->{RaiseError} = 1;
     $dbh->do('select ?', {}, 1);
+    1;
+  }
+  catch {
+    0;
   };
-  return $@ ? 0 : 1;
 }
 
 # Check if placeholders bound to non-string types throw exceptions
@@ -2142,13 +2210,16 @@
   my $self = shift;
   my $dbh  = $self->_get_dbh;
 
-  eval {
+  return try {
     local $dbh->{PrintError} = 0;
     local $dbh->{RaiseError} = 1;
     # this specifically tests a bind that is NOT a string
     $dbh->do('select 1 where 1 = ?', {}, 1);
+    1;
+  }
+  catch {
+    0;
   };
-  return $@ ? 0 : 1;
 }
 
 =head2 sqlt_type
@@ -2259,6 +2330,14 @@
   unless ($dir) {
     carp "No directory given, using ./\n";
     $dir = './';
+  } else {
+      -d $dir
+        or
+      make_path ("$dir")  # make_path does not like objects (i.e. Path::Class::Dir)
+        or
+      $self->throw_exception(
+        "Failed to create '$dir': " . ($! || $@ || 'error unknow')
+      );
   }
 
   $self->throw_exception ("Directory '$dir' does not exist\n") unless(-d $dir);
@@ -2462,14 +2541,13 @@
     return if($line =~ /^COMMIT/m);
     return if $line =~ /^\s+$/; # skip whitespace only
     $self->_query_start($line);
-    eval {
+    try {
       # do a dbh_do cycle here, as we need some error checking in
       # place (even though we will ignore errors)
       $self->dbh_do (sub { $_[1]->do($line) });
+    } catch {
+      carp qq{$_ (running "${line}")};
     };
-    if ($@) {
-      carp qq{$@ (running "${line}")};
-    }
     $self->_query_end($line);
   };
   my @statements = $schema->deployment_statements($type, undef, $dir, { %{ $sqltargs || {} }, no_comments => 1 } );
@@ -2574,23 +2652,6 @@
   return $alias;
 }
 
-sub DESTROY {
-  my $self = shift;
-
-  $self->_verify_pid if $self->_dbh;
-
-  # some databases need this to stop spewing warnings
-  if (my $dbh = $self->_dbh) {
-    local $@;
-    eval {
-      %{ $dbh->{CachedKids} } = ();
-      $dbh->disconnect;
-    };
-  }
-
-  $self->_dbh(undef);
-}
-
 1;
 
 =head1 USAGE NOTES

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBIHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBIHacks.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/DBIHacks.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -14,6 +14,9 @@
 use mro 'c3';
 
 use Carp::Clan qw/^DBIx::Class/;
+use List::Util 'first';
+use Scalar::Util 'blessed';
+use namespace::clean;
 
 #
 # This code will remove non-selecting/non-restricting joins from
@@ -94,6 +97,8 @@
     }
 
     push @$inner_select, $sel;
+
+    push @{$inner_attrs->{as}}, $attrs->{as}[$i];
   }
 
   # construct the inner $from for the subquery
@@ -103,10 +108,7 @@
   # if a multi-type join was needed in the subquery - add a group_by to simulate the
   # collapse in the subq
   $inner_attrs->{group_by} ||= $inner_select
-    if List::Util::first
-      { ! $_->[0]{-is_single} }
-      (@{$inner_from}[1 .. $#$inner_from])
-  ;
+    if first { ! $_->[0]{-is_single} } (@{$inner_from}[1 .. $#$inner_from]);
 
   # generate the subquery
   my $subq = $self->_select_args_to_query (
@@ -164,10 +166,10 @@
   while (my $j = shift @$from) {
     my $alias = $j->[0]{-alias};
 
-    if ($outer_aliastypes->{select}{$alias}) {
+    if ($outer_aliastypes->{selecting}{$alias}) {
       push @outer_from, $j;
     }
-    elsif ($outer_aliastypes->{restrict}{$alias}) {
+    elsif ($outer_aliastypes->{restricting}{$alias}) {
       push @outer_from, $j;
       $outer_attrs->{group_by} ||= $outer_select unless $j->[0]{-is_single};
     }
@@ -186,15 +188,17 @@
   return (\@outer_from, $outer_select, $where, $outer_attrs);
 }
 
+#
+# I KNOW THIS SUCKS! GET SQLA2 OUT THE DOOR SO THIS CAN DIE!
+#
 # Due to a lack of SQLA2 we fall back to crude scans of all the
 # select/where/order/group attributes, in order to determine what
 # aliases are neded to fulfill the query. This information is used
 # throughout the code to prune unnecessary JOINs from the queries
 # in an attempt to reduce the execution time.
 # Although the method is pretty horrific, the worst thing that can
-# happen is for it to fail due to an unqualified column, which in
-# turn will result in a vocal exception. Qualifying the column will
-# invariably solve the problem.
+# happen is for it to fail due to some scalar SQL, which in turn will
+# result in a vocal exception.
 sub _resolve_aliastypes_from_select_args {
   my ( $self, $from, $select, $where, $attrs ) = @_;
 
@@ -217,36 +221,84 @@
       unless $j->{-is_single};
   }
 
+  # get a column to source/alias map (including unqualified ones)
+  my $colinfo = $self->_resolve_column_info ($from);
+
   # set up a botched SQLA
   my $sql_maker = $self->sql_maker;
   my $sep = quotemeta ($self->_sql_maker_opts->{name_sep} || '.');
-  local $sql_maker->{quote_char}; # so that we can regex away
 
-
-  my $select_sql = $sql_maker->_recurse_fields ($select);
-  my $where_sql = $sql_maker->where ($where);
-  my $group_by_sql = $sql_maker->_order_by({
-    map { $_ => $attrs->{$_} } qw/group_by having/
+  my ($orig_lquote, $orig_rquote) = map { quotemeta $_ } (do {
+    if (ref $sql_maker->{quote_char} eq 'ARRAY') {
+      @{$sql_maker->{quote_char}}
+    }
+    else {
+      ($sql_maker->{quote_char} || '') x 2;
+    }
   });
-  my @order_by_chunks = ($self->_parse_order_by ($attrs->{order_by}) );
 
-  # match every alias to the sql chunks above
+  local $sql_maker->{quote_char} = "\x00"; # so that we can regex away
+
+  # generate sql chunks
+  local $sql_maker->{having_bind};  # these are throw away results
+  my $to_scan = {
+    restricting => [
+      $sql_maker->_recurse_where ($where),
+      $sql_maker->_parse_rs_attrs ({
+        map { $_ => $attrs->{$_} } (qw/group_by having/)
+      }),
+    ],
+    selecting => [
+      $self->_parse_order_by ($attrs->{order_by}, $sql_maker),
+      $sql_maker->_recurse_fields ($select),
+    ],
+  };
+
+  # throw away empty chunks
+  $_ = [ map { $_ || () } @$_ ] for values %$to_scan;
+
+  # first loop through all fully qualified columns and get the corresponding
+  # alias (should work even if they are in scalarrefs)
   for my $alias (keys %$alias_list) {
-    my $al_re = qr/\b $alias $sep/x;
+    my $al_re = qr/
+      \x00 $alias \x00 $sep
+        |
+      \b $alias $sep
+    /x;
 
-    for my $piece ($where_sql, $group_by_sql) {
-      $aliases_by_type->{restrict}{$alias} = 1 if ($piece =~ $al_re);
+    # add matching for possible quoted literal sql
+    $al_re = qr/ $al_re | $orig_lquote $alias $orig_rquote /x
+      if ($orig_lquote && $orig_rquote);
+
+
+    for my $type (keys %$to_scan) {
+      for my $piece (@{$to_scan->{$type}}) {
+        $aliases_by_type->{$type}{$alias} = 1 if ($piece =~ $al_re);
+      }
     }
+  }
 
-    for my $piece ($select_sql, @order_by_chunks ) {
-      $aliases_by_type->{select}{$alias} = 1 if ($piece =~ $al_re);
+  # now loop through unqualified column names, and try to locate them within
+  # the chunks
+  for my $col (keys %$colinfo) {
+    next if $col =~ $sep;   # if column is qualified it was caught by the above
+
+    my $col_re = qr/ \x00 $col \x00 /x;
+
+    $col_re = qr/ $col_re | $orig_lquote $col $orig_rquote /x
+      if ($orig_lquote && $orig_rquote);
+
+    for my $type (keys %$to_scan) {
+      for my $piece (@{$to_scan->{$type}}) {
+        $aliases_by_type->{$type}{$colinfo->{$col}{-source_alias}} = 1 if ($piece =~ $col_re);
+      }
     }
   }
 
   # Add any non-left joins to the restriction list (such joins are indeed restrictions)
   for my $j (values %$alias_list) {
     my $alias = $j->{-alias} or next;
-    $aliases_by_type->{restrict}{$alias} = 1 if (
+    $aliases_by_type->{restricting}{$alias} = 1 if (
       (not $j->{-join_type})
         or
       ($j->{-join_type} !~ /^left (?: \s+ outer)? $/xi)
@@ -258,7 +310,7 @@
   for my $type (keys %$aliases_by_type) {
     for my $alias (keys %{$aliases_by_type->{$type}}) {
       $aliases_by_type->{$type}{$_} = 1
-        for (map { keys %$_ } @{ $alias_list->{$alias}{-join_path} || [] });
+        for (map { values %$_ } @{ $alias_list->{$alias}{-join_path} || [] });
     }
   }
 
@@ -273,7 +325,7 @@
 
   # the reason this is so contrived is that $ident may be a {from}
   # structure, specifying multiple tables to join
-  if ( Scalar::Util::blessed($ident) && $ident->isa("DBIx::Class::ResultSource") ) {
+  if ( blessed $ident && $ident->isa("DBIx::Class::ResultSource") ) {
     # this is compat mode for insert/update/delete which do not deal with aliases
     $alias2source->{me} = $ident;
     $rs_alias = 'me';
@@ -403,7 +455,7 @@
   # anyway, and deep cloning is just too fucking expensive
   # So replace the first hashref in the node arrayref manually 
   my @new_from = ($from->[0]);
-  my $sw_idx = { map { values %$_ => 1 } @$switch_branch };
+  my $sw_idx = { map { (values %$_), 1 } @$switch_branch }; #there's one k/v per join-path
 
   for my $j (@{$from}[1 .. $#$from]) {
     my $jalias = $j->[0]{-alias};
@@ -485,20 +537,31 @@
 }
 
 sub _parse_order_by {
-  my ($self, $order_by) = @_;
+  my ($self, $order_by, $sql_maker) = @_;
 
-  return scalar $self->sql_maker->_order_by_chunks ($order_by)
-    unless wantarray;
+  my $parser = sub {
+    my ($sql_maker, $order_by) = @_;
 
-  my $sql_maker = $self->sql_maker;
-  local $sql_maker->{quote_char}; #disable quoting
-  my @chunks;
-  for my $chunk (map { ref $_ ? @$_ : $_ } ($sql_maker->_order_by_chunks ($order_by) ) ) {
-    $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
-    push @chunks, $chunk;
+    return scalar $sql_maker->_order_by_chunks ($order_by)
+      unless wantarray;
+
+    my @chunks;
+    for my $chunk (map { ref $_ ? @$_ : $_ } ($sql_maker->_order_by_chunks ($order_by) ) ) {
+      $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
+      push @chunks, $chunk;
+    }
+
+    return @chunks;
+  };
+
+  if ($sql_maker) {
+    return $parser->($sql_maker, $order_by);
   }
-
-  return @chunks;
+  else {
+    $sql_maker = $self->sql_maker;
+    local $sql_maker->{quote_char};
+    return $parser->($sql_maker, $order_by);
+  }
 }
 
 1;

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/TxnScopeGuard.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/TxnScopeGuard.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage/TxnScopeGuard.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,6 +3,8 @@
 use strict;
 use warnings;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
+use namespace::clean;
 
 sub new {
   my ($class, $storage) = @_;
@@ -31,10 +33,11 @@
     carp 'A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back.'
       unless $exception;
 
-    eval { $storage->txn_rollback };
-    my $rollback_exception = $@;
+    my $rollback_exception;
+    try { $storage->txn_rollback }
+    catch { $rollback_exception = shift };
 
-    if ($rollback_exception && $rollback_exception !~ /DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION/) {
+    if (defined $rollback_exception && $rollback_exception !~ /DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION/) {
       if ($exception) {
         $exception = "Transaction aborted: ${exception} "
           ."Rollback failed: ${rollback_exception}";
@@ -76,7 +79,7 @@
 =head1 DESCRIPTION
 
 An object that behaves much like L<Scope::Guard>, but hardcoded to do the
-right thing with transactions in DBIx::Class. 
+right thing with transactions in DBIx::Class.
 
 =head1 METHODS
 

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/Storage.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -7,9 +7,11 @@
 use mro 'c3';
 
 use DBIx::Class::Exception;
-use Scalar::Util();
+use Scalar::Util 'weaken';
 use IO::File;
 use DBIx::Class::Storage::TxnScopeGuard;
+use Try::Tiny;
+use namespace::clean;
 
 __PACKAGE__->mk_group_accessors('simple' => qw/debug debugobj schema/);
 __PACKAGE__->mk_group_accessors('inherited' => 'cursor_class');
@@ -83,7 +85,7 @@
 sub set_schema {
   my ($self, $schema) = @_;
   $self->schema($schema);
-  Scalar::Util::weaken($self->{schema}) if ref $self->{schema};
+  weaken $self->{schema} if ref $self->{schema};
 }
 
 =head2 connected
@@ -158,16 +160,16 @@
   };
 
   my $rs;
-  eval {
+  try {
     $rs = $schema->txn_do($coderef);
-  };
-
-  if ($@) {                                  # Transaction failed
+  } catch {
+    my $error = shift;
+    # Transaction failed
     die "something terrible has happened!"   #
-      if ($@ =~ /Rollback failed/);          # Rollback failed
+      if ($error =~ /Rollback failed/);          # Rollback failed
 
     deal_with_failed_transaction();
-  }
+  };
 
 In a nested transaction (calling txn_do() from within a txn_do() coderef) only
 the outermost transaction will issue a L</txn_commit>, and txn_do() can be
@@ -185,7 +187,8 @@
 =cut
 
 sub txn_do {
-  my ($self, $coderef, @args) = @_;
+  my $self = shift;
+  my $coderef = shift;
 
   ref $coderef eq 'CODE' or $self->throw_exception
     ('$coderef must be a CODE reference');
@@ -195,45 +198,42 @@
   $self->txn_begin; # If this throws an exception, no rollback is needed
 
   my $wantarray = wantarray; # Need to save this since the context
-                             # inside the eval{} block is independent
+                             # inside the try{} block is independent
                              # of the context that called txn_do()
-  eval {
+  my $args = \@_;
 
+  try {
+
     # Need to differentiate between scalar/list context to allow for
     # returning a list in scalar context to get the size of the list
     if ($wantarray) {
       # list context
-      @return_values = $coderef->(@args);
+      @return_values = $coderef->(@$args);
     } elsif (defined $wantarray) {
       # scalar context
-      $return_value = $coderef->(@args);
+      $return_value = $coderef->(@$args);
     } else {
       # void context
-      $coderef->(@args);
+      $coderef->(@$args);
     }
     $self->txn_commit;
-  };
+  }
+  catch {
+    my $error = shift;
 
-  if ($@) {
-    my $error = $@;
-
-    eval {
+    try {
       $self->txn_rollback;
-    };
-
-    if ($@) {
-      my $rollback_error = $@;
+    } catch {
       my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
       $self->throw_exception($error)  # propagate nested rollback
-        if $rollback_error =~ /$exception_class/;
+        if $_ =~ /$exception_class/;
 
       $self->throw_exception(
-        "Transaction aborted: $error. Rollback failed: ${rollback_error}"
+        "Transaction aborted: $error. Rollback failed: $_"
       );
-    } else {
-      $self->throw_exception($error); # txn failed but rollback succeeded
     }
-  }
+    $self->throw_exception($error); # txn failed but rollback succeeded
+  };
 
   return $wantarray ? @return_values : $return_value;
 }
@@ -483,8 +483,8 @@
 written to the file C</path/name>.
 
 This environment variable is checked when the storage object is first
-created (when you call connect on your schema).  So, run-time changes 
-to this environment variable will not take effect unless you also 
+created (when you call connect on your schema).  So, run-time changes
+to this environment variable will not take effect unless you also
 re-connect on your schema.
 
 =head2 DBIX_CLASS_STORAGE_DBI_DEBUG

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/UTF8Columns.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/UTF8Columns.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class/UTF8Columns.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -7,7 +7,7 @@
 
 =head1 NAME
 
-DBIx::Class::UTF8Columns - Force UTF8 (Unicode) flag on columns
+DBIx::Class::UTF8Columns - Force UTF8 (Unicode) flag on columns (DEPRECATED)
 
 =head1 SYNOPSIS
 
@@ -23,10 +23,55 @@
 
 =head1 DESCRIPTION
 
-This module allows you to get columns data that have utf8 (Unicode) flag.
+This module allows you to get and store utf8 (unicode) column data
+in a database that does not natively support unicode. It ensures
+that column data is correctly serialised as a byte stream when
+stored and de-serialised to unicode strings on retrieval.
 
-=head2 Warning
+  THE USE OF THIS MODULE (AND ITS COUSIN DBIx::Class::ForceUTF8) IS VERY
+  STRONGLY DISCOURAGED, PLEASE READ THE WARNINGS BELOW FOR AN EXPLANATION.
 
+If you want to continue using this module and do not want to recieve
+further warnings set the environmane variable C<DBIC_UTF8COLUMNS_OK>
+to a true value.
+
+=head2 Warning - Module does not function properly on create/insert
+
+Recently (April 2010) a bug was found deep in the core of L<DBIx::Class>
+which affects any component attempting to perform encoding/decoding by
+overloading L<store_column|DBIx::Class::Row/store_column> and
+L<get_columns|DBIx::Class::Row/get_columns>. As a result of this problem
+L<create|DBIx::Class::ResultSet/create> sends the original column values
+to the database, while L<update|DBIx::Class::ResultSet/update> sends the
+encoded values. L<DBIx::Class::UTF8Columns> and L<DBIx::Class::ForceUTF8>
+are both affected by ths bug.
+
+It is unclear how this bug went undetected for so long (it was
+introduced in March 2006), No attempts to fix it will be made while the
+implications of changing such a fundamental behavior of DBIx::Class are
+being evaluated. However in this day and age you should not be using
+this module anyway as Unicode is properly supported by all major
+database engines, as explained below.
+
+If you have specific questions about the integrity of your data in light
+of this development - please 
+L<join us on IRC or the mailing list|DBIx::Class/GETTING HELP/SUPPORT>
+to further discuss your concerns with the team.
+
+=head2 Warning - Native Database Unicode Support
+
+If your database natively supports Unicode (as does SQLite with the
+C<sqlite_unicode> connect flag, MySQL with C<mysql_enable_utf8>
+connect flag or Postgres with the C<pg_enable_utf8> connect flag),
+then this component should B<not> be used, and will corrupt unicode
+data in a subtle and unexpected manner.
+
+It is far better to do Unicode support within the database if
+possible rather than converting data to and from raw bytes on every
+database round trip.
+
+=head2 Warning - Component Overloading
+
 Note that this module overloads L<DBIx::Class::Row/store_column> in a way
 that may prevent other components overloading the same method from working
 correctly. This component must be the last one before L<DBIx::Class::Row>

Modified: DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/DBIx/Class.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -27,7 +27,7 @@
 # Always remember to do all digits for the version even if they're 0
 # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
 # brain damage and presumably various other packaging systems too
-$VERSION = '0.08120_1';
+$VERSION = '0.08121_01';
 
 $VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases
 
@@ -42,8 +42,11 @@
 sub _attr_cache {
   my $self = shift;
   my $cache = $self->can('__attr_cache') ? $self->__attr_cache : {};
-  my $rest = eval { $self->next::method };
-  return $@ ? $cache : { %$cache, %$rest };
+
+  return {
+    %$cache,
+    %{ $self->maybe::next::method || {} },
+  };
 }
 
 1;
@@ -218,10 +221,12 @@
 
 =head1 CONTRIBUTORS
 
-abraxxa: Alexander Hartmaier <alex_hartmaier at hotmail.com>
+abraxxa: Alexander Hartmaier <abraxxa at cpan.org>
 
 aherzog: Adam Herzog <adam at herzogdesigns.com>
 
+Alexander Keusch <cpan at keusch.at>
+
 amoore: Andrew Moore <amoore at cpan.org>
 
 andyg: Andy Grundman <andy at hybridized.org>
@@ -340,6 +345,8 @@
 
 rafl: Florian Ragwitz <rafl at debian.org>
 
+rbo: Robert Bohne <rbo at cpan.org>
+
 rbuels: Robert Buels <rmb32 at cornell.edu>
 
 rdj: Ryan D Johnson <ryan at innerfence.com>
@@ -370,6 +377,8 @@
 
 Tom Hukins
 
+tonvoon: Ton Voon <tonvoon at cpan.org>
+
 triode: Pete Gamache <gamache at cpan.org>
 
 typester: Daisuke Murase <typester at cpan.org>
@@ -384,6 +393,8 @@
 
 zamolxes: Bogdan Lucaciu <bogdan at wiz.ro>
 
+Possum: Daniel LeWarne <possum at cpan.org>
+
 =head1 COPYRIGHT
 
 Copyright (c) 2005 - 2010 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>

Modified: DBIx-Class/0.08/branches/extended_rels/lib/SQL/Translator/Parser/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/lib/SQL/Translator/Parser/DBIx/Class.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/lib/SQL/Translator/Parser/DBIx/Class.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -14,8 +14,10 @@
 
 use Exporter;
 use SQL::Translator::Utils qw(debug normalize_name);
-use Carp::Clan qw/^SQL::Translator|^DBIx::Class/;
-use Scalar::Util ();
+use Carp::Clan qw/^SQL::Translator|^DBIx::Class|^Try::Tiny/;
+use Scalar::Util 'weaken';
+use Try::Tiny;
+use namespace::clean;
 
 use base qw(Exporter);
 
@@ -33,7 +35,7 @@
 sub parse {
     # this is a hack to prevent schema leaks due to a retarded SQLT implementation
     # DO NOT REMOVE (until SQLT2 is out, the all of this will be rewritten anyway)
-    Scalar::Util::weaken ($_[1]) if ref ($_[1]);
+    weaken $_[1] if ref ($_[1]);
 
     my ($tr, $data)   = @_;
     my $args          = $tr->parser_args;
@@ -43,8 +45,12 @@
 
     croak 'No DBIx::Class::Schema' unless ($dbicschema);
     if (!ref $dbicschema) {
-      eval "use $dbicschema;";
-      croak "Can't load $dbicschema ($@)" if($@);
+      try {
+        eval "require $dbicschema;"
+      }
+      catch {
+        croak "Can't load $dbicschema ($_)";
+      }
     }
 
     my $schema      = $tr->schema;
@@ -59,7 +65,7 @@
         $dbicschema->throw_exception ("'sources' parameter must be an array or hash ref")
           unless( $ref eq 'ARRAY' || ref eq 'HASH' );
 
-        # limit monikers to those specified in 
+        # limit monikers to those specified in
         my $sources;
         if ($ref eq 'ARRAY') {
             $sources->{$_} = 1 for (@$limit_sources);
@@ -165,7 +171,7 @@
             # Force the order of @cond to match the order of ->add_columns
             my $idx;
             my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $relsource->columns;
-            my @cond = sort { $other_columns_idx{$a} cmp $other_columns_idx{$b} } keys(%{$rel_info->{cond}}); 
+            my @cond = sort { $other_columns_idx{$a} cmp $other_columns_idx{$b} } keys(%{$rel_info->{cond}});
 
             # Get the key information, mapping off the foreign/self markers
             my @refkeys = map {/^\w+\.(\w+)$/} @cond;

Added: DBIx-Class/0.08/branches/extended_rels/maint/benchmark_datafetch.pl
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/maint/benchmark_datafetch.pl	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/maint/benchmark_datafetch.pl	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,38 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Benchmark qw/cmpthese/;
+use FindBin;
+use lib "$FindBin::Bin/../t/lib";
+use lib "$FindBin::Bin/../lib";
+use DBICTest::Schema;
+use DBIx::Class::ResultClass::HashRefInflator;  # older dbic didn't load it
+
+printf "Benchmarking DBIC version %s\n", DBIx::Class->VERSION;
+
+my $schema = DBICTest::Schema->connect ('dbi:SQLite::memory:');
+$schema->deploy;
+
+my $rs = $schema->resultset ('Artist');
+$rs->populate ([ map { { name => "Art_$_"} } (1 .. 10000) ]);
+
+my $dbh = $schema->storage->dbh;
+my $sql = sprintf ('SELECT %s FROM %s %s',
+  join (',', @{$rs->_resolved_attrs->{select}} ),
+  $rs->result_source->name,
+  $rs->_resolved_attrs->{alias},
+);
+
+my $compdbi = sub {
+  my @r = $schema->storage->dbh->selectall_arrayref ('SELECT * FROM ' . ${$rs->as_query}->[0] )
+} if $rs->can ('as_query');
+
+cmpthese(-3, {
+  Cursor => sub { $rs->reset; my @r = $rs->cursor->all },
+  HRI => sub { $rs->reset; my @r = $rs->search ({}, { result_class => 'DBIx::Class::ResultClass::HashRefInflator' } )->all },
+  RowObj => sub { $rs->reset; my @r = $rs->all },
+  RawDBI => sub { my @r = $dbh->selectall_arrayref ($sql) },
+  $compdbi ? (CompDBI => $compdbi) : (),
+});


Property changes on: DBIx-Class/0.08/branches/extended_rels/maint/benchmark_datafetch.pl
___________________________________________________________________
Added: svn:executable
   + *

Modified: DBIx-Class/0.08/branches/extended_rels/maint/joint_deps.pl
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/maint/joint_deps.pl	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/maint/joint_deps.pl	2010-06-02 17:41:37 UTC (rev 9557)
@@ -19,17 +19,29 @@
 my $s = CPANDB::Schema->connect (sub { CPANDB->dbh } );
 
 # reference names are unstable - just create rels manually
-# is there a saner way to do that?
-my $distclass = $s->class('Distribution');
-$distclass->has_many (
+my $distrsrc = $s->source('Distribution');
+
+# the has_many helper is a class-only method (why?), thus
+# manual add_rel
+$distrsrc->add_relationship (
   'deps',
   $s->class('Dependency'),
-  'distribution',
+  { 'foreign.distribution' => 'self.' . ($distrsrc->primary_columns)[0] },
+  { accessor => 'multi', join_type => 'left' },
 );
-$s->unregister_source ('Distribution');
-$s->register_class ('Distribution', $distclass);
 
+# here is how one could use the helper currently:
+#
+#my $distresult = $s->class('Distribution');
+#$distresult->has_many (
+#  'deps',
+#  $s->class('Dependency'),
+#  'distribution',
+#);
+#$s->unregister_source ('Distribution');
+#$s->register_class ('Distribution', $distresult);
 
+
 # a proof of concept how to find out who uses us *AND* SQLT
 my $us_and_sqlt = $s->resultset('Distribution')->search (
   {


Property changes on: DBIx-Class/0.08/branches/extended_rels/maint/svn-log.perl
___________________________________________________________________
Deleted: svn:mime-type
   - text/script

Modified: DBIx-Class/0.08/branches/extended_rels/script/dbicadmin
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/script/dbicadmin	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/script/dbicadmin	2010-06-02 17:41:37 UTC (rev 9557)
@@ -39,7 +39,7 @@
       ['deploy' => 'Deploy the schema to the database',],
       ['select'   => 'Select data from the schema', ],
       ['insert'   => 'Insert data into the schema', ],
-      ['update'   => 'Update data in the schema', ], 
+      ['update'   => 'Update data in the schema', ],
       ['delete'   => 'Delete data from the schema',],
       ['op:s' => 'compatiblity option all of the above can be suppied as --op=<action>'],
       ['help' => 'display this help', { implies => { schema_class => '__dummy__' } } ],
@@ -62,6 +62,7 @@
     ['force' => 'Be forceful with some operations'],
     ['trace' => 'Turn on DBIx::Class trace output'],
     ['quiet' => 'Be less verbose'],
+    ['I:s@' => 'Same as perl\'s -I, prepended to current @INC'],
   )
 );
 
@@ -86,23 +87,27 @@
     );
 }
 
+# FIXME - lowercasing will eventually go away when Getopt::Long::Descriptive is fixed
+if($opts->{i}) {
+  require lib;
+  lib->import( @{delete $opts->{i}} );
+}
+
 if($opts->{help}) {
-    $usage->die();
+  $usage->die();
 }
 
 # option compatability mangle
 if($opts->{connect}) {
   $opts->{connect_info} = delete $opts->{connect};
 }
-
 my $admin = DBIx::Class::Admin->new( %$opts );
 
-
 my $action = $opts->{action};
 
 $action = $opts->{op} if ($action eq 'op');
 
-print "Performig action $action...\n";
+print "Performing action $action...\n";
 
 my $res = $admin->$action();
 if ($action eq 'select') {


Property changes on: DBIx-Class/0.08/branches/extended_rels/t
___________________________________________________________________
Deleted: svn:ignore
   - var


Modified: DBIx-Class/0.08/branches/extended_rels/t/03podcoverage.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/03podcoverage.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/03podcoverage.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -2,9 +2,10 @@
 use strict;
 
 use Test::More;
-use List::Util ();
+use List::Util 'first';
 use lib qw(t/lib);
 use DBICTest;
+use namespace::clean;
 
 # Don't run tests for installs
 unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
@@ -23,7 +24,7 @@
 # of what this is doing might be in order.
 # The exceptions structure below is a hash keyed by the module
 # name. Any * in a name is treated like a wildcard and will behave
-# as expected. Modules are matched by longest string first, so 
+# as expected. Modules are matched by longest string first, so
 # A::B::C will match even if there is A::B*
 
 # The value for each is a hash, which contains one or more
@@ -46,6 +47,15 @@
             MULTICREATE_DEBUG
         /],
     },
+    'DBIx::Class::FilterColumn' => {
+        ignore => [qw/
+            new
+            update
+            store_column
+            get_column
+            get_columns
+        /],
+    },
     'DBIx::Class::ResultSource' => {
         ignore => [qw/
             compare_relationship_keys
@@ -79,6 +89,12 @@
         /]
     },
 
+    'DBIx::Class::Admin'        => {
+        ignore => [ qw/
+            BUILD
+        /]
+     },
+
     'DBIx::Class::Storage::DBI::Replicated*'        => {
         ignore => [ qw/
             connect_call_do_sql
@@ -125,8 +141,8 @@
 foreach my $module (@modules) {
   SKIP: {
 
-    my ($match) = List::Util::first
-      { $module =~ $_ }
+    my ($match) =
+      first { $module =~ $_ }
       (sort { length $b <=> length $a || $b cmp $a } (keys %$ex_lookup) )
     ;
 

Modified: DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_1.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_1.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_1.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -7,8 +7,6 @@
 use lib qw(t/lib);
 use DBICTest; # do not remove even though it is not used
 
-plan tests => 8;
-
 my $warnings;
 eval {
     local $SIG{__WARN__} = sub { $warnings .= shift };
@@ -16,9 +14,11 @@
     use base qw/DBIx::Class::Schema/;
     __PACKAGE__->load_namespaces;
 };
-ok(!$@) or diag $@;
-like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/);
+ok(!$@, 'load_namespaces doesnt die') or diag $@;
+like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/, 'Found warning about extra ResultSet classes');
 
+like($warnings, qr/load_namespaces found ResultSet class DBICNSTest::ResultSet::D that does not subclass DBIx::Class::ResultSet/, 'Found warning about ResultSets with incorrect subclass');
+
 my $source_a = DBICNSTest->source('A');
 isa_ok($source_a, 'DBIx::Class::ResultSource::Table');
 my $rset_a   = DBICNSTest->resultset('A');
@@ -31,5 +31,7 @@
 
 for my $moniker (qw/A B/) {
   my $class = "DBICNSTest::Result::$moniker";
-  ok(!defined($class->result_source_instance->source_name));
+  ok(!defined($class->result_source_instance->source_name), "Source name of $moniker not defined");
 }
+
+done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_3.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_3.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/39load_namespaces_3.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,25 +3,25 @@
 use strict;
 use warnings;
 use Test::More;
+use Test::Exception;
+use Test::Warn;
 
 use lib qw(t/lib);
 use DBICTest; # do not remove even though it is not used
 
-plan tests => 7;
+lives_ok (sub {
+  warnings_exist ( sub {
+      package DBICNSTestOther;
+      use base qw/DBIx::Class::Schema/;
+      __PACKAGE__->load_namespaces(
+          result_namespace => [ '+DBICNSTest::Rslt', '+DBICNSTest::OtherRslt' ],
+          resultset_namespace => '+DBICNSTest::RSet',
+      );
+    },
+    qr/load_namespaces found ResultSet class C with no corresponding Result class/,
+  );
+});
 
-my $warnings;
-eval {
-    local $SIG{__WARN__} = sub { $warnings .= shift };
-    package DBICNSTestOther;
-    use base qw/DBIx::Class::Schema/;
-    __PACKAGE__->load_namespaces(
-        result_namespace => [ '+DBICNSTest::Rslt', '+DBICNSTest::OtherRslt' ],
-        resultset_namespace => '+DBICNSTest::RSet',
-    );
-};
-ok(!$@) or diag $@;
-like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/);
-
 my $source_a = DBICNSTestOther->source('A');
 isa_ok($source_a, 'DBIx::Class::ResultSource::Table');
 my $rset_a   = DBICNSTestOther->resultset('A');
@@ -34,3 +34,5 @@
 
 my $source_d = DBICNSTestOther->source('D');
 isa_ok($source_d, 'DBIx::Class::ResultSource::Table');
+
+done_testing;

Deleted: DBIx-Class/0.08/branches/extended_rels/t/41orrible.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/41orrible.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/41orrible.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,89 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-use DBIx::Class::SQLAHacks::OracleJoins;
-
-use lib qw(t/lib);
-use DBICTest; # do not remove even though it is not used
-use DBIC::SqlMakerTest;
-
-plan tests => 4;
-
-my $sa = new DBIx::Class::SQLAHacks::OracleJoins;
-
-$sa->limit_dialect('RowNum');
-
-is($sa->select('rubbish',
-                  [ 'foo.id', 'bar.id', \'TO_CHAR(foo.womble, "blah")' ],
-                  undef, undef, 1, 3),
-   'SELECT * FROM
-(
-    SELECT A.*, ROWNUM r FROM
-    (
-        SELECT foo.id AS col1, bar.id AS col2, TO_CHAR(foo.womble, "blah") AS col3 FROM rubbish 
-    ) A
-    WHERE ROWNUM < 5
-) B
-WHERE r >= 4
-', 'Munged stuff to make Oracle not explode');
-
-# test WhereJoins
-# search with undefined or empty $cond
-
-#  my ($self, $table, $fields, $where, $order, @rest) = @_;
-my ($sql, @bind) = $sa->select(
-    [
-        { me => "cd" },
-        [
-            { "-join_type" => "LEFT", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
-        ],
-    ],
-    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
-    undef,
-    undef
-);
-is_same_sql_bind(
-  $sql, \@bind,
-  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( artist.artistid(+) = me.artist )', [],
-  'WhereJoins search with empty where clause'
-);
-
-($sql, @bind) = $sa->select(
-    [
-        { me => "cd" },
-        [
-            { "-join_type" => "", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
-        ],
-    ],
-    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
-    { 'artist.artistid' => 3 },
-    undef
-);
-is_same_sql_bind(
-  $sql, \@bind,
-  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid = me.artist ) AND ( artist.artistid = ? ) ) )', [3],
-  'WhereJoins search with where clause'
-);
-
-($sql, @bind) = $sa->select(
-    [
-        { me => "cd" },
-        [
-            { "-join_type" => "LEFT", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
-        ],
-    ],
-    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
-    [{ 'artist.artistid' => 3 }, { 'me.cdid' => 5 }],
-    undef
-);
-is_same_sql_bind(
-  $sql, \@bind,
-  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid(+) = me.artist ) AND ( ( ( artist.artistid = ? ) OR ( me.cdid = ? ) ) ) ) )', [3, 5],
-  'WhereJoins search with or in where clause'
-);
-
-

Modified: DBIx-Class/0.08/branches/extended_rels/t/51threads.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/51threads.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/51threads.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,6 +1,9 @@
 use strict;
 use warnings;
+
 use Test::More;
+use Test::Exception;
+
 use Config;
 
 # README: If you set the env var to a number greater than 10,
@@ -38,7 +41,7 @@
 
 my $parent_rs;
 
-eval {
+lives_ok (sub {
     my $dbh = $schema->storage->dbh;
 
     {
@@ -52,8 +55,7 @@
 
     $parent_rs = $schema->resultset('CD')->search({ year => 1901 });
     $parent_rs->next;
-};
-ok(!$@) or diag "Creation eval failed: $@";
+}, 'populate successfull');
 
 my @children;
 while(@children < $num_children) {

Modified: DBIx-Class/0.08/branches/extended_rels/t/52cycle.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/52cycle.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/52cycle.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -12,7 +12,8 @@
 
 use DBICTest;
 use DBICTest::Schema;
-use Scalar::Util ();
+use Scalar::Util 'weaken';
+use namespace::clean;
 
 import Test::Memory::Cycle;
 
@@ -31,7 +32,7 @@
   my $row = $weak->{row} = $rs->first;
   memory_cycle_ok($row, 'No cycles in row');
 
-  Scalar::Util::weaken ($_) for values %$weak;
+  weaken $_ for values %$weak;
   memory_cycle_ok($weak, 'No cycles in weak object collection');
 }
 

Modified: DBIx-Class/0.08/branches/extended_rels/t/60core.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/60core.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/60core.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -45,6 +45,8 @@
 is(scalar(keys(%fake_dirty)), 1, '1 fake dirty column');
 ok(grep($_ eq 'name', keys(%fake_dirty)), 'name is fake dirty');
 
+ok($art->update, 'Update run');
+
 my $record_jp = $schema->resultset("Artist")->search(undef, { join => 'cds' })->search(undef, { prefetch => 'cds' })->next;
 
 ok($record_jp, "prefetch on same rel okay");
@@ -67,6 +69,8 @@
 
 is($art->in_storage, 0, "It knows it's dead");
 
+lives_ok { $art->update } 'No changes so update should be OK';
+
 dies_ok ( sub { $art->delete }, "Can't delete twice");
 
 is($art->name, 'We Are In Rehab', 'But the object is still live');

Modified: DBIx-Class/0.08/branches/extended_rels/t/71mysql.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/71mysql.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/71mysql.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -194,6 +194,29 @@
   );
 }
 
+{
+  # Test support for straight joins
+  my $cdsrc = $schema->source('CD');
+  my $artrel_info = $cdsrc->relationship_info ('artist');
+  $cdsrc->add_relationship(
+    'straight_artist',
+    $artrel_info->{class},
+    $artrel_info->{cond},
+    { %{$artrel_info->{attrs}}, join_type => 'straight' },
+  );
+  is_same_sql_bind (
+    $cdsrc->resultset->search({}, { prefetch => 'straight_artist' })->as_query,
+    '(
+      SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track,
+             straight_artist.artistid, straight_artist.name, straight_artist.rank, straight_artist.charfield
+        FROM cd me
+        STRAIGHT_JOIN artist straight_artist ON straight_artist.artistid = me.artist
+    )',
+    [],
+    'straight joins correctly supported for mysql'
+  );
+}
+
 ## Can we properly deal with the null search problem?
 ##
 ## Only way is to do a SET SQL_AUTO_IS_NULL = 0; on connect

Modified: DBIx-Class/0.08/branches/extended_rels/t/72pg.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/72pg.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/72pg.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -11,12 +11,11 @@
 
 plan skip_all => <<EOM unless $dsn && $user;
 Set \$ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test
-( NOTE: This test drops and creates tables called 'artist', 'casecheck',
-  'array_test' and 'sequence_test' as well as following sequences:
-  'pkid1_seq', 'pkid2_seq' and 'nonpkid_seq''.  as well as following
-  schemas: 'dbic_t_schema', 'dbic_t_schema_2', 'dbic_t_schema_3',
-  'dbic_t_schema_4', and 'dbic_t_schema_5'
-)
+( NOTE: This test drops and creates tables called 'artist', 'cd',
+'timestamp_primary_key_test', 'track', 'casecheck', 'array_test' and
+'sequence_test' as well as following sequences: 'pkid1_seq', 'pkid2_seq' and
+'nonpkid_seq''. as well as following schemas: 'dbic_t_schema',
+'dbic_t_schema_2', 'dbic_t_schema_3', 'dbic_t_schema_4', and 'dbic_t_schema_5')
 EOM
 
 ### load any test classes that are defined further down in the file via BEGIN blocks
@@ -24,250 +23,306 @@
 our @test_classes; #< array that will be pushed into by test classes defined in this file
 DBICTest::Schema->load_classes( map {s/.+:://;$_} @test_classes ) if @test_classes;
 
+my $test_server_supports_insert_returning = do {
+  my $s = DBICTest::Schema->connect($dsn, $user, $pass);
+  $s->storage->_determine_driver;
+  $s->storage->_supports_insert_returning;
+};
 
+my $schema;
+
+for my $use_insert_returning ($test_server_supports_insert_returning
+  ? (0,1)
+  : (0)
+) {
+  no warnings qw/redefine once/;
+  local *DBIx::Class::Storage::DBI::Pg::_supports_insert_returning = sub {
+    $use_insert_returning
+  };
+
 ###  pre-connect tests (keep each test separate as to make sure rebless() runs)
-{
-  my $s = DBICTest::Schema->connect($dsn, $user, $pass);
+  {
+    my $s = DBICTest::Schema->connect($dsn, $user, $pass);
 
-  ok (!$s->storage->_dbh, 'definitely not connected');
+    ok (!$s->storage->_dbh, 'definitely not connected');
 
-  # Check that datetime_parser returns correctly before we explicitly connect.
-  SKIP: {
-      eval { require DateTime::Format::Pg };
-      skip "DateTime::Format::Pg required", 2 if $@;
+    # Check that datetime_parser returns correctly before we explicitly connect.
+    SKIP: {
+        eval { require DateTime::Format::Pg };
+        skip "DateTime::Format::Pg required", 2 if $@;
 
-      my $store = ref $s->storage;
-      is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
+        my $store = ref $s->storage;
+        is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
 
-      my $parser = $s->storage->datetime_parser;
-      is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
+        my $parser = $s->storage->datetime_parser;
+        is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
+    }
+
+    ok (!$s->storage->_dbh, 'still not connected');
   }
+  {
+    my $s = DBICTest::Schema->connect($dsn, $user, $pass);
+    # make sure sqlt_type overrides work (::Storage::DBI::Pg does this)
+    ok (!$s->storage->_dbh, 'definitely not connected');
+    is ($s->storage->sqlt_type, 'PostgreSQL', 'sqlt_type correct pre-connection');
+    ok (!$s->storage->_dbh, 'still not connected');
+  }
 
-  ok (!$s->storage->_dbh, 'still not connected');
-}
-{
-  my $s = DBICTest::Schema->connect($dsn, $user, $pass);
-  # make sure sqlt_type overrides work (::Storage::DBI::Pg does this)
-  ok (!$s->storage->_dbh, 'definitely not connected');
-  is ($s->storage->sqlt_type, 'PostgreSQL', 'sqlt_type correct pre-connection');
-  ok (!$s->storage->_dbh, 'still not connected');
-}
-
 ### connect, create postgres-specific test schema
 
-my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+  $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+  $schema->storage->ensure_connected;
 
-drop_test_schema($schema);
-create_test_schema($schema);
+  drop_test_schema($schema);
+  create_test_schema($schema);
 
 ### begin main tests
 
-
 # run a BIG bunch of tests for last-insert-id / Auto-PK / sequence
 # discovery
-run_apk_tests($schema); #< older set of auto-pk tests
-run_extended_apk_tests($schema); #< new extended set of auto-pk tests
+  run_apk_tests($schema); #< older set of auto-pk tests
+  run_extended_apk_tests($schema); #< new extended set of auto-pk tests
 
+### type_info tests
 
+  my $test_type_info = {
+      'artistid' => {
+          'data_type' => 'integer',
+          'is_nullable' => 0,
+          'size' => 4,
+      },
+      'name' => {
+          'data_type' => 'character varying',
+          'is_nullable' => 1,
+          'size' => 100,
+          'default_value' => undef,
+      },
+      'rank' => {
+          'data_type' => 'integer',
+          'is_nullable' => 0,
+          'size' => 4,
+          'default_value' => 13,
 
+      },
+      'charfield' => {
+          'data_type' => 'character',
+          'is_nullable' => 1,
+          'size' => 10,
+          'default_value' => undef,
+      },
+      'arrayfield' => {
+          'data_type' => 'integer[]',
+          'is_nullable' => 1,
+          'size' => undef,
+          'default_value' => undef,
+      },
+  };
 
+  my $type_info = $schema->storage->columns_info_for('dbic_t_schema.artist');
+  my $artistid_defval = delete $type_info->{artistid}->{default_value};
+  like($artistid_defval,
+       qr/^nextval\('([^\.]*\.){0,1}artist_artistid_seq'::(?:text|regclass)\)/,
+       'columns_info_for - sequence matches Pg get_autoinc_seq expectations');
+  is_deeply($type_info, $test_type_info,
+            'columns_info_for - column data types');
 
-### type_info tests
 
-my $test_type_info = {
-    'artistid' => {
-        'data_type' => 'integer',
-        'is_nullable' => 0,
-        'size' => 4,
-    },
-    'name' => {
-        'data_type' => 'character varying',
-        'is_nullable' => 1,
-        'size' => 100,
-        'default_value' => undef,
-    },
-    'rank' => {
-        'data_type' => 'integer',
-        'is_nullable' => 0,
-        'size' => 4,
-        'default_value' => 13,
 
-    },
-    'charfield' => {
-        'data_type' => 'character',
-        'is_nullable' => 1,
-        'size' => 10,
-        'default_value' => undef,
-    },
-    'arrayfield' => {
-        'data_type' => 'integer[]',
-        'is_nullable' => 1,
-        'size' => undef,
-        'default_value' => undef,
-    },
-};
 
-my $type_info = $schema->storage->columns_info_for('dbic_t_schema.artist');
-my $artistid_defval = delete $type_info->{artistid}->{default_value};
-like($artistid_defval,
-     qr/^nextval\('([^\.]*\.){0,1}artist_artistid_seq'::(?:text|regclass)\)/,
-     'columns_info_for - sequence matches Pg get_autoinc_seq expectations');
-is_deeply($type_info, $test_type_info,
-          'columns_info_for - column data types');
+####### Array tests
 
+  BEGIN {
+    package DBICTest::Schema::ArrayTest;
+    push @main::test_classes, __PACKAGE__;
 
+    use strict;
+    use warnings;
+    use base 'DBIx::Class::Core';
 
+    __PACKAGE__->table('dbic_t_schema.array_test');
+    __PACKAGE__->add_columns(qw/id arrayfield/);
+    __PACKAGE__->column_info_from_storage(1);
+    __PACKAGE__->set_primary_key('id');
 
-####### Array tests
+  }
+  SKIP: {
+    skip "Need DBD::Pg 2.9.2 or newer for array tests", 4 if $DBD::Pg::VERSION < 2.009002;
 
-BEGIN {
-  package DBICTest::Schema::ArrayTest;
-  push @main::test_classes, __PACKAGE__;
+    lives_ok {
+      $schema->resultset('ArrayTest')->create({
+        arrayfield => [1, 2],
+      });
+    } 'inserting arrayref as pg array data';
 
-  use strict;
-  use warnings;
-  use base 'DBIx::Class::Core';
+    lives_ok {
+      $schema->resultset('ArrayTest')->update({
+        arrayfield => [3, 4],
+      });
+    } 'updating arrayref as pg array data';
 
-  __PACKAGE__->table('dbic_t_schema.array_test');
-  __PACKAGE__->add_columns(qw/id arrayfield/);
-  __PACKAGE__->column_info_from_storage(1);
-  __PACKAGE__->set_primary_key('id');
-
-}
-SKIP: {
-  skip "Need DBD::Pg 2.9.2 or newer for array tests", 4 if $DBD::Pg::VERSION < 2.009002;
-
-  lives_ok {
     $schema->resultset('ArrayTest')->create({
-      arrayfield => [1, 2],
+      arrayfield => [5, 6],
     });
-  } 'inserting arrayref as pg array data';
 
-  lives_ok {
-    $schema->resultset('ArrayTest')->update({
-      arrayfield => [3, 4],
-    });
-  } 'updating arrayref as pg array data';
+    my $count;
+    lives_ok {
+      $count = $schema->resultset('ArrayTest')->search({
+        arrayfield => \[ '= ?' => [arrayfield => [3, 4]] ],   #Todo anything less ugly than this?
+      })->count;
+    } 'comparing arrayref to pg array data does not blow up';
+    is($count, 1, 'comparing arrayref to pg array data gives correct result');
+  }
 
-  $schema->resultset('ArrayTest')->create({
-    arrayfield => [5, 6],
-  });
 
-  my $count;
-  lives_ok {
-    $count = $schema->resultset('ArrayTest')->search({
-      arrayfield => \[ '= ?' => [arrayfield => [3, 4]] ],   #Todo anything less ugly than this?
-    })->count;
-  } 'comparing arrayref to pg array data does not blow up';
-  is($count, 1, 'comparing arrayref to pg array data gives correct result');
-}
 
-
-
 ########## Case check
 
-BEGIN {
-  package DBICTest::Schema::Casecheck;
-  push @main::test_classes, __PACKAGE__;
+  BEGIN {
+    package DBICTest::Schema::Casecheck;
+    push @main::test_classes, __PACKAGE__;
 
-  use strict;
-  use warnings;
-  use base 'DBIx::Class::Core';
+    use strict;
+    use warnings;
+    use base 'DBIx::Class::Core';
 
-  __PACKAGE__->table('dbic_t_schema.casecheck');
-  __PACKAGE__->add_columns(qw/id name NAME uc_name/);
-  __PACKAGE__->column_info_from_storage(1);
-  __PACKAGE__->set_primary_key('id');
-}
+    __PACKAGE__->table('dbic_t_schema.casecheck');
+    __PACKAGE__->add_columns(qw/id name NAME uc_name/);
+    __PACKAGE__->column_info_from_storage(1);
+    __PACKAGE__->set_primary_key('id');
+  }
 
-my $name_info = $schema->source('Casecheck')->column_info( 'name' );
-is( $name_info->{size}, 1, "Case sensitive matching info for 'name'" );
+  my $name_info = $schema->source('Casecheck')->column_info( 'name' );
+  is( $name_info->{size}, 1, "Case sensitive matching info for 'name'" );
 
-my $NAME_info = $schema->source('Casecheck')->column_info( 'NAME' );
-is( $NAME_info->{size}, 2, "Case sensitive matching info for 'NAME'" );
+  my $NAME_info = $schema->source('Casecheck')->column_info( 'NAME' );
+  is( $NAME_info->{size}, 2, "Case sensitive matching info for 'NAME'" );
 
-my $uc_name_info = $schema->source('Casecheck')->column_info( 'uc_name' );
-is( $uc_name_info->{size}, 3, "Case insensitive matching info for 'uc_name'" );
+  my $uc_name_info = $schema->source('Casecheck')->column_info( 'uc_name' );
+  is( $uc_name_info->{size}, 3, "Case insensitive matching info for 'uc_name'" );
 
 
+## Test ResultSet->update
+my $artist = $schema->resultset('Artist')->first;
+my $cds = $artist->cds_unordered->search({
+    year => { '!=' => 2010 }
+}, { prefetch => 'liner_notes' });
+TODO: {
+    todo_skip 'update resultset with a prefetch over a might_have rel', 1;
+    $cds->update({ year => '2010' });
+}
 
 
 ## Test SELECT ... FOR UPDATE
 
-SKIP: {
-    if(eval "require Sys::SigAction" && !$@) {
-        Sys::SigAction->import( 'set_sig_handler' );
-    }
-    else {
-      skip "Sys::SigAction is not available", 6;
-    }
+  SKIP: {
+      if(eval "require Sys::SigAction" && !$@) {
+          Sys::SigAction->import( 'set_sig_handler' );
+      }
+      else {
+        skip "Sys::SigAction is not available", 6;
+      }
 
-    my ($timed_out, $artist2);
+      my ($timed_out, $artist2);
 
-    for my $t (
-      {
-        # Make sure that an error was raised, and that the update failed
-        update_lock => 1,
-        test_sub => sub {
-          ok($timed_out, "update from second schema times out");
-          ok($artist2->is_column_changed('name'), "'name' column is still dirty from second schema");
+      for my $t (
+        {
+          # Make sure that an error was raised, and that the update failed
+          update_lock => 1,
+          test_sub => sub {
+            ok($timed_out, "update from second schema times out");
+            ok($artist2->is_column_changed('name'), "'name' column is still dirty from second schema");
+          },
         },
-      },
-      {
-        # Make sure that an error was NOT raised, and that the update succeeded
-        update_lock => 0,
-        test_sub => sub {
-          ok(! $timed_out, "update from second schema DOES NOT timeout");
-          ok(! $artist2->is_column_changed('name'), "'name' column is NOT dirty from second schema");
+        {
+          # Make sure that an error was NOT raised, and that the update succeeded
+          update_lock => 0,
+          test_sub => sub {
+            ok(! $timed_out, "update from second schema DOES NOT timeout");
+            ok(! $artist2->is_column_changed('name'), "'name' column is NOT dirty from second schema");
+          },
         },
-      },
-    ) {
-      # create a new schema
-      my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
-      $schema2->source("Artist")->name("dbic_t_schema.artist");
+      ) {
+        # create a new schema
+        my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
+        $schema2->source("Artist")->name("dbic_t_schema.artist");
 
-      $schema->txn_do( sub {
-        my $artist = $schema->resultset('Artist')->search(
-            {
-                artistid => 1
-            },
-            $t->{update_lock} ? { for => 'update' } : {}
-        )->first;
-        is($artist->artistid, 1, "select returns artistid = 1");
+        $schema->txn_do( sub {
+          my $rs = $schema->resultset('Artist')->search(
+              {
+                  artistid => 1
+              },
+              $t->{update_lock} ? { for => 'update' } : {}
+          );
+          ok ($rs->count, 'Count works');
 
-        $timed_out = 0;
-        eval {
-            my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
-            alarm(2);
-            $artist2 = $schema2->resultset('Artist')->find(1);
-            $artist2->name('fooey');
-            $artist2->update;
-            alarm(0);
-        };
-        $timed_out = $@ =~ /DBICTestTimeout/;
-      });
+          my $artist = $rs->next;
+          is($artist->artistid, 1, "select returns artistid = 1");
 
-      $t->{test_sub}->();
-    }
-}
+          $timed_out = 0;
+          eval {
+              my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
+              alarm(2);
+              $artist2 = $schema2->resultset('Artist')->find(1);
+              $artist2->name('fooey');
+              $artist2->update;
+              alarm(0);
+          };
+          $timed_out = $@ =~ /DBICTestTimeout/;
+        });
 
+        $t->{test_sub}->();
+      }
+  }
 
+
 ######## other older Auto-pk tests
 
-$schema->source("SequenceTest")->name("dbic_t_schema.sequence_test");
-for (1..5) {
-    my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
-    is($st->pkid1, $_, "Oracle Auto-PK without trigger: First primary key");
-    is($st->pkid2, $_ + 9, "Oracle Auto-PK without trigger: Second primary key");
-    is($st->nonpkid, $_ + 19, "Oracle Auto-PK without trigger: Non-primary key");
+  $schema->source("SequenceTest")->name("dbic_t_schema.sequence_test");
+  for (1..5) {
+      my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
+      is($st->pkid1, $_, "Auto-PK for sequence without default: First primary key");
+      is($st->pkid2, $_ + 9, "Auto-PK for sequence without default: Second primary key");
+      is($st->nonpkid, $_ + 19, "Auto-PK for sequence without default: Non-primary key");
+  }
+  my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
+  is($st->pkid1, 55, "Auto-PK for sequence without default: First primary key set manually");
+
+
+######## test non-serial auto-pk
+
+  if ($schema->storage->_supports_insert_returning) {
+    $schema->source('TimestampPrimaryKey')->name('dbic_t_schema.timestamp_primary_key_test');
+    my $row = $schema->resultset('TimestampPrimaryKey')->create({});
+    ok $row->id;
+  }
+
+######## test with_deferred_fk_checks
+
+  $schema->source('CD')->name('dbic_t_schema.cd');
+  $schema->source('Track')->name('dbic_t_schema.track');
+  lives_ok {
+    $schema->storage->with_deferred_fk_checks(sub {
+      $schema->resultset('Track')->create({
+        trackid => 999, cd => 999, position => 1, title => 'deferred FK track'
+      });
+      $schema->resultset('CD')->create({
+        artist => 1, cdid => 999, year => '2003', title => 'deferred FK cd'
+      });
+    });
+  } 'with_deferred_fk_checks code survived';
+
+  is eval { $schema->resultset('Track')->find(999)->title }, 'deferred FK track',
+     'code in with_deferred_fk_checks worked'; 
+
+  throws_ok {
+    $schema->resultset('Track')->create({
+      trackid => 1, cd => 9999, position => 1, title => 'Track1'
+    });
+  } qr/constraint/i, 'with_deferred_fk_checks is off';
 }
-my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
-is($st->pkid1, 55, "Oracle Auto-PK without trigger: First primary key set manually");
 
 done_testing;
 
-exit;
-
 END {
     return unless $schema;
     drop_test_schema($schema);
@@ -296,7 +351,35 @@
 
       $dbh->do("CREATE SCHEMA dbic_t_schema");
       $dbh->do("CREATE TABLE dbic_t_schema.artist $std_artist_table");
+
       $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.timestamp_primary_key_test (
+  id timestamp default current_timestamp
+)
+EOS
+      $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.cd (
+  cdid int PRIMARY KEY,
+  artist int,
+  title varchar(255),
+  year varchar(4),
+  genreid int,
+  single_track int
+)
+EOS
+      $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.track (
+  trackid int,
+  cd int REFERENCES dbic_t_schema.cd(cdid) DEFERRABLE,
+  position int,
+  title varchar(255),
+  last_updated_on date,
+  last_updated_at date,
+  small_dt date
+)
+EOS
+
+      $dbh->do(<<EOS);
 CREATE TABLE dbic_t_schema.sequence_test (
     pkid1 integer
     , pkid2 integer
@@ -478,6 +561,7 @@
   my $search_path_save = eapk_get_search_path($schema);
 
   eapk_drop_all($schema);
+  %seqs = ();
 
   # make the test schemas and sequences
   $schema->storage->dbh_do(sub {

Modified: DBIx-Class/0.08/branches/extended_rels/t/73oracle.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/73oracle.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/73oracle.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -30,8 +30,10 @@
 
 use Test::Exception;
 use Test::More;
+
 use lib qw(t/lib);
 use DBICTest;
+use DBIC::SqlMakerTest;
 
 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
 
@@ -48,6 +50,7 @@
 eval {
   $dbh->do("DROP SEQUENCE artist_seq");
   $dbh->do("DROP SEQUENCE cd_seq");
+  $dbh->do("DROP SEQUENCE track_seq");
   $dbh->do("DROP SEQUENCE pkid1_seq");
   $dbh->do("DROP SEQUENCE pkid2_seq");
   $dbh->do("DROP SEQUENCE nonpkid_seq");
@@ -58,11 +61,12 @@
 };
 $dbh->do("CREATE SEQUENCE artist_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
 $dbh->do("CREATE SEQUENCE cd_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
+$dbh->do("CREATE SEQUENCE track_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
 $dbh->do("CREATE SEQUENCE pkid1_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
 $dbh->do("CREATE SEQUENCE pkid2_seq START WITH 10 MAXVALUE 999999 MINVALUE 0");
 $dbh->do("CREATE SEQUENCE nonpkid_seq START WITH 20 MAXVALUE 999999 MINVALUE 0");
 
-$dbh->do("CREATE TABLE artist (artistid NUMBER(12), name VARCHAR(255), rank NUMBER(38), charfield VARCHAR2(10))");
+$dbh->do("CREATE TABLE artist (artistid NUMBER(12), parentid NUMBER(12), name VARCHAR(255), rank NUMBER(38), charfield VARCHAR2(10))");
 $dbh->do("ALTER TABLE artist ADD (CONSTRAINT artist_pk PRIMARY KEY (artistid))");
 
 $dbh->do("CREATE TABLE sequence_test (pkid1 NUMBER(12), pkid2 NUMBER(12), nonpkid NUMBER(12), name VARCHAR(255))");
@@ -72,6 +76,7 @@
 $dbh->do("ALTER TABLE cd ADD (CONSTRAINT cd_pk PRIMARY KEY (cdid))");
 
 $dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12) REFERENCES cd(cdid) DEFERRABLE, position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at DATE, small_dt DATE)");
+$dbh->do("ALTER TABLE track ADD (CONSTRAINT track_pk PRIMARY KEY (trackid))");
 
 $dbh->do(qq{
   CREATE OR REPLACE TRIGGER artist_insert_trg
@@ -87,6 +92,18 @@
 });
 $dbh->do(qq{
   CREATE OR REPLACE TRIGGER cd_insert_trg
+  BEFORE INSERT OR UPDATE ON cd
+  FOR EACH ROW
+  BEGIN
+    IF :new.cdid IS NULL THEN
+      SELECT cd_seq.nextval
+      INTO :new.cdid
+      FROM DUAL;
+    END IF;
+  END;
+});
+$dbh->do(qq{
+  CREATE OR REPLACE TRIGGER cd_insert_trg
   BEFORE INSERT ON cd
   FOR EACH ROW
   BEGIN
@@ -97,6 +114,18 @@
     END IF;
   END;
 });
+$dbh->do(qq{
+  CREATE OR REPLACE TRIGGER track_insert_trg
+  BEFORE INSERT ON track
+  FOR EACH ROW
+  BEGIN
+    IF :new.trackid IS NULL THEN
+      SELECT track_seq.nextval
+      INTO :new.trackid
+      FROM DUAL;
+    END IF;
+  END;
+});
 
 {
     # Swiped from t/bindtype_columns.t to avoid creating my own Resultset.
@@ -161,7 +190,7 @@
 
 # test join with row count ambiguity
 
-my $track = $schema->resultset('Track')->create({ trackid => 1, cd => 1,
+my $track = $schema->resultset('Track')->create({ cd => $cd->cdid,
     position => 1, title => 'Track1' });
 my $tjoin = $schema->resultset('Track')->search({ 'me.title' => 'Track1'},
         { join => 'cd',
@@ -173,7 +202,7 @@
 is($row->title, 'Track1', "ambiguous column ok");
 
 # check count distinct with multiple columns
-my $other_track = $schema->resultset('Track')->create({ trackid => 2, cd => 1, position => 1, title => 'Track2' });
+my $other_track = $schema->resultset('Track')->create({ cd => $cd->cdid, position => 1, title => 'Track2' });
 
 my $tcount = $schema->resultset('Track')->search(
   {},
@@ -284,6 +313,410 @@
   }
 }
 
+
+### test hierarchical queries
+if ( $schema->storage->isa('DBIx::Class::Storage::DBI::Oracle::Generic') ) {
+    my $source = $schema->source('Artist');
+
+    $source->add_column( 'parentid' );
+
+    $source->add_relationship('children', 'DBICTest::Schema::Artist',
+        { 'foreign.parentid' => 'self.artistid' },
+        {
+            accessor => 'multi',
+            join_type => 'LEFT',
+            cascade_delete => 1,
+            cascade_copy => 1,
+        } );
+    $source->add_relationship('parent', 'DBICTest::Schema::Artist',
+        { 'foreign.artistid' => 'self.parentid' },
+        { accessor => 'single' } );
+    DBICTest::Schema::Artist->add_column( 'parentid' );
+    DBICTest::Schema::Artist->has_many(
+        children => 'DBICTest::Schema::Artist',
+        { 'foreign.parentid' => 'self.artistid' }
+    );
+    DBICTest::Schema::Artist->belongs_to(
+        parent => 'DBICTest::Schema::Artist',
+        { 'foreign.artistid' => 'self.parentid' }
+    );
+
+    $schema->resultset('Artist')->create ({
+        name => 'root',
+        rank => 1,
+        cds => [],
+        children => [
+            {
+                name => 'child1',
+                rank => 2,
+                children => [
+                    {
+                        name => 'grandchild',
+                        rank => 3,
+                        cds => [
+                            {
+                                title => "grandchilds's cd" ,
+                                year => '2008',
+                                tracks => [
+                                    {
+                                        position => 1,
+                                        title => 'Track 1 grandchild',
+                                    }
+                                ],
+                            }
+                        ],
+                        children => [
+                            {
+                                name => 'greatgrandchild',
+                                rank => 3,
+                            }
+                        ],
+                    }
+                ],
+            },
+            {
+                name => 'child2',
+                rank => 3,
+            },
+        ],
+    });
+
+    $schema->resultset('Artist')->create(
+        {
+            name     => 'cycle-root',
+            children => [
+                {
+                    name     => 'cycle-child1',
+                    children => [ { name => 'cycle-grandchild' } ],
+                },
+                { name => 'cycle-child2' },
+            ],
+        }
+    );
+
+    $schema->resultset('Artist')->find({ name => 'cycle-root' })
+      ->update({ parentid => \'artistid' });
+
+    # select the whole tree
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        start_with => { name => 'root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+          SELECT me.artistid, me.name, me.rank, me.charfield, me.parentid
+            FROM artist me
+          START WITH name = ?
+          CONNECT BY parentid = PRIOR artistid 
+        )',
+        [ [ name => 'root'] ],
+      );
+      is_deeply (
+        [ $rs->get_column ('name')->all ],
+        [ qw/root child1 grandchild greatgrandchild child2/ ],
+        'got artist tree',
+      );
+
+
+      is_same_sql_bind (
+        $rs->count_rs->as_query,
+        '(
+          SELECT COUNT( * )
+            FROM artist me
+          START WITH name = ?
+          CONNECT BY parentid = PRIOR artistid 
+        )',
+        [ [ name => 'root'] ],
+      );
+
+      is( $rs->count, 5, 'Connect By count ok' );
+    }
+
+    # use order siblings by statement
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        start_with => { name => 'root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+        order_siblings_by => { -desc => 'name' },
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+          SELECT me.artistid, me.name, me.rank, me.charfield, me.parentid
+            FROM artist me
+          START WITH name = ?
+          CONNECT BY parentid = PRIOR artistid 
+          ORDER SIBLINGS BY name DESC
+        )',
+        [ [ name => 'root'] ],
+      );
+
+      is_deeply (
+        [ $rs->get_column ('name')->all ],
+        [ qw/root child2 child1 grandchild greatgrandchild/ ],
+        'Order Siblings By ok',
+      );
+    }
+
+    # get the root node
+    {
+      my $rs = $schema->resultset('Artist')->search({ parentid => undef }, {
+        start_with => { name => 'root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+          SELECT me.artistid, me.name, me.rank, me.charfield, me.parentid
+            FROM artist me
+          WHERE ( parentid IS NULL )
+          START WITH name = ?
+          CONNECT BY parentid = PRIOR artistid 
+        )',
+        [ [ name => 'root'] ],
+      );
+
+      is_deeply(
+        [ $rs->get_column('name')->all ],
+        [ 'root' ],
+        'found root node',
+      );
+    }
+
+    # combine a connect by with a join
+    {
+      my $rs = $schema->resultset('Artist')->search(
+        {'cds.title' => { -like => '%cd'} },
+        {
+          join => 'cds',
+          start_with => { 'me.name' => 'root' },
+          connect_by => { parentid => { -prior => \ 'artistid' } },
+        }
+      );
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+          SELECT me.artistid, me.name, me.rank, me.charfield, me.parentid
+            FROM artist me
+            LEFT JOIN cd cds ON cds.artist = me.artistid
+          WHERE ( cds.title LIKE ? )
+          START WITH me.name = ?
+          CONNECT BY parentid = PRIOR artistid 
+        )',
+        [ [ 'cds.title' => '%cd' ], [ 'me.name' => 'root' ] ],
+      );
+
+      is_deeply(
+        [ $rs->get_column('name')->all ],
+        [ 'grandchild' ],
+        'Connect By with a join result name ok'
+      );
+
+
+      is_same_sql_bind (
+        $rs->count_rs->as_query,
+        '(
+          SELECT COUNT( * )
+            FROM artist me
+            LEFT JOIN cd cds ON cds.artist = me.artistid
+          WHERE ( cds.title LIKE ? )
+          START WITH me.name = ?
+          CONNECT BY parentid = PRIOR artistid 
+        )',
+        [ [ 'cds.title' => '%cd' ], [ 'me.name' => 'root' ] ],
+      );
+
+      is( $rs->count, 1, 'Connect By with a join; count ok' );
+    }
+
+    # combine a connect by with order_by
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        start_with => { name => 'root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+        order_by => { -asc => [ 'LEVEL', 'name' ] },
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+          SELECT me.artistid, me.name, me.rank, me.charfield, me.parentid
+            FROM artist me
+          START WITH name = ?
+          CONNECT BY parentid = PRIOR artistid 
+          ORDER BY LEVEL ASC, name ASC
+        )',
+        [ [ name => 'root' ] ],
+      );
+
+      is_deeply (
+        [ $rs->get_column ('name')->all ],
+        [ qw/root child1 child2 grandchild greatgrandchild/ ],
+        'Connect By with a order_by - result name ok'
+      );
+    }
+
+
+    # limit a connect by
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        start_with => { name => 'root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+        order_by => { -asc => 'name' },
+        rows => 2,
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '( 
+            SELECT artistid, name, rank, charfield, parentid FROM (
+                  SELECT artistid, name, rank, charfield, parentid, ROWNUM rownum__index FROM (
+                      SELECT 
+                          me.artistid,
+                          me.name,
+                          me.rank,
+                          me.charfield,
+                          me.parentid 
+                      FROM artist me 
+                      START WITH name = ? 
+                      CONNECT BY parentid = PRIOR artistid
+                      ORDER BY name ASC 
+                  ) me 
+            ) me
+            WHERE rownum__index BETWEEN 1 AND 2
+        )',
+        [ [ name => 'root' ] ],
+      );
+
+      is_deeply (
+        [ $rs->get_column ('name')->all ],
+        [qw/child1 child2/],
+        'LIMIT a Connect By query - correct names'
+      );
+
+      # TODO: 
+      # prints "START WITH name = ? 
+      # CONNECT BY artistid = PRIOR parentid "
+      # after count_subq, 
+      # I will fix this later...
+      # 
+      is_same_sql_bind (
+        $rs->count_rs->as_query,
+        '( 
+            SELECT COUNT( * ) FROM (
+                SELECT artistid FROM (
+                    SELECT artistid, ROWNUM rownum__index FROM (
+                        SELECT 
+                            me.artistid
+                        FROM artist me 
+                        START WITH name = ? 
+                        CONNECT BY parentid = PRIOR artistid
+                    ) me
+                ) me 
+                WHERE rownum__index BETWEEN 1 AND 2
+            ) me
+        )',
+        [ [ name => 'root' ] ],
+      );
+
+      is( $rs->count, 2, 'Connect By; LIMIT count ok' );
+    }
+
+    # combine a connect_by with group_by and having
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        select => ['count(rank)'],
+        start_with => { name => 'root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+        group_by => ['rank'],
+        having => { 'count(rank)' => { '<', 2 } },
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+            SELECT count(rank)
+            FROM artist me
+            START WITH name = ?
+            CONNECT BY parentid = PRIOR artistid
+            GROUP BY rank HAVING count(rank) < ?
+        )',
+        [ [ name => 'root' ], [ 'count(rank)' => 2 ] ],
+      );
+
+      is_deeply (
+        [ $rs->get_column ('count(rank)')->all ],
+        [1, 1],
+        'Group By a Connect By query - correct values'
+      );
+    }
+
+
+    # select the whole cycle tree without nocylce
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        start_with => { name => 'cycle-root' },
+        connect_by => { parentid => { -prior => \ 'artistid' } },
+      });
+      eval { $rs->get_column ('name')->all };
+      if ( $@ =~ /ORA-01436/ ){ # ORA-01436:  CONNECT BY loop in user data
+        pass "connect by initify loop detection without nocycle";
+      }else{
+        fail "connect by initify loop detection without nocycle, not detected by oracle";
+      }
+    }
+
+    # select the whole cycle tree with nocylce
+    {
+      my $rs = $schema->resultset('Artist')->search({}, {
+        start_with => { name => 'cycle-root' },
+        '+select'  => [ \ 'CONNECT_BY_ISCYCLE' ],
+        connect_by_nocycle => { parentid => { -prior => \ 'artistid' } },
+      });
+
+      is_same_sql_bind (
+        $rs->as_query,
+        '(
+          SELECT me.artistid, me.name, me.rank, me.charfield, me.parentid, CONNECT_BY_ISCYCLE
+            FROM artist me
+          START WITH name = ?
+          CONNECT BY NOCYCLE parentid = PRIOR artistid 
+        )',
+        [ [ name => 'cycle-root'] ],
+      );
+      is_deeply (
+        [ $rs->get_column ('name')->all ],
+        [ qw/cycle-root cycle-child1 cycle-grandchild cycle-child2/ ],
+        'got artist tree with nocycle (name)',
+      );
+      is_deeply (
+        [ $rs->get_column ('CONNECT_BY_ISCYCLE')->all ],
+        [ qw/1 0 0 0/ ],
+        'got artist tree with nocycle (CONNECT_BY_ISCYCLE)',
+      );
+
+
+      is_same_sql_bind (
+        $rs->count_rs->as_query,
+        '(
+          SELECT COUNT( * )
+            FROM artist me
+          START WITH name = ?
+          CONNECT BY NOCYCLE parentid = PRIOR artistid 
+        )',
+        [ [ name => 'cycle-root'] ],
+      );
+
+      is( $rs->count, 4, 'Connect By Nocycle count ok' );
+    }
+}
+
 done_testing;
 
 # clean up our mess
@@ -291,6 +724,7 @@
     if($schema && ($dbh = $schema->storage->dbh)) {
         $dbh->do("DROP SEQUENCE artist_seq");
         $dbh->do("DROP SEQUENCE cd_seq");
+        $dbh->do("DROP SEQUENCE track_seq");
         $dbh->do("DROP SEQUENCE pkid1_seq");
         $dbh->do("DROP SEQUENCE pkid2_seq");
         $dbh->do("DROP SEQUENCE nonpkid_seq");

Modified: DBIx-Class/0.08/branches/extended_rels/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/746mssql.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/746mssql.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -33,6 +33,12 @@
   ok (! $schema2->storage->connected, 'a re-connected cloned schema starts unconnected');
 }
 
+$schema->storage->_dbh->disconnect;
+
+lives_ok {
+  $schema->storage->dbh_do(sub { $_[1]->do('select 1') })
+} '_ping works';
+
 $schema->storage->dbh_do (sub {
     my ($storage, $dbh) = @_;
     eval { $dbh->do("DROP TABLE artist") };
@@ -53,8 +59,6 @@
   { on_connect_call => 'use_dynamic_cursors' },
   {},
 );
-my $new;
-
 # test Auto-PK with different options
 for my $opts (@opts) {
   SKIP: {
@@ -71,384 +75,412 @@
 
     $schema->resultset('Artist')->search({ name => 'foo' })->delete;
 
-    $new = $schema->resultset('Artist')->create({ name => 'foo' });
+    my $new = $schema->resultset('Artist')->create({ name => 'foo' });
 
     ok($new->artistid > 0, "Auto-PK worked");
   }
 }
 
-$seen_id{$new->artistid}++;
 
-# test LIMIT support
-for (1..6) {
-    $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
-    is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
-    $seen_id{$new->artistid}++;
-}
 
-my $it = $schema->resultset('Artist')->search( {}, {
-    rows => 3,
-    order_by => 'artistid',
-});
+# Test populate
 
-is( $it->count, 3, "LIMIT count ok" );
-is( $it->next->name, "foo", "iterator->next ok" );
-$it->next;
-is( $it->next->name, "Artist 2", "iterator->next ok" );
-is( $it->next, undef, "next past end of resultset ok" );
-
-# test GUID columns
-
-$schema->storage->dbh_do (sub {
+{
+  $schema->storage->dbh_do (sub {
     my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE artist") };
+    eval { $dbh->do("DROP TABLE owners") };
+    eval { $dbh->do("DROP TABLE books") };
     $dbh->do(<<'SQL');
-CREATE TABLE artist (
-   artistid UNIQUEIDENTIFIER NOT NULL,
+CREATE TABLE books (
+   id INT IDENTITY (1, 1) NOT NULL,
+   source VARCHAR(100),
+   owner INT,
+   title VARCHAR(10),
+   price INT NULL
+)
+
+CREATE TABLE owners (
+   id INT IDENTITY (1, 1) NOT NULL,
    name VARCHAR(100),
-   rank INT NOT NULL DEFAULT '13',
-   charfield CHAR(10) NULL,
-   a_guid UNIQUEIDENTIFIER,
-   primary key(artistid)
 )
 SQL
-});
 
-# start disconnected to make sure insert works on an un-reblessed storage
-$schema = DBICTest::Schema->connect($dsn, $user, $pass);
+  });
 
-my $row;
-lives_ok {
-  $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
-} 'created a row with a GUID';
+  lives_ok ( sub {
+    # start a new connection, make sure rebless works
+    my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+    $schema->populate ('Owners', [
+      [qw/id  name  /],
+      [qw/1   wiggle/],
+      [qw/2   woggle/],
+      [qw/3   boggle/],
+      [qw/4   fRIOUX/],
+      [qw/5   fRUE/],
+      [qw/6   fREW/],
+      [qw/7   fROOH/],
+      [qw/8   fISMBoC/],
+      [qw/9   station/],
+      [qw/10   mirror/],
+      [qw/11   dimly/],
+      [qw/12   face_to_face/],
+      [qw/13   icarus/],
+      [qw/14   dream/],
+      [qw/15   dyrstyggyr/],
+    ]);
+  }, 'populate with PKs supplied ok' );
 
-ok(
-  eval { $row->artistid },
-  'row has GUID PK col populated',
-);
-diag $@ if $@;
 
-ok(
-  eval { $row->a_guid },
-  'row has a GUID col with auto_nextval populated',
-);
-diag $@ if $@;
+  lives_ok (sub {
+    # start a new connection, make sure rebless works
+    # test an insert with a supplied identity, followed by one without
+    my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+    for (2, 1) {
+      my $id = $_ * 20 ;
+      $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
+      $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
+    }
+  }, 'create with/without PKs ok' );
 
-my $row_from_db = $schema->resultset('ArtistGUID')
-  ->search({ name => 'mtfnpy' })->first;
+  is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
 
-is $row_from_db->artistid, $row->artistid,
-  'PK GUID round trip';
+  lives_ok ( sub {
+    # start a new connection, make sure rebless works
+    my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+    $schema->populate ('BooksInLibrary', [
+      [qw/source  owner title   /],
+      [qw/Library 1     secrets0/],
+      [qw/Library 1     secrets1/],
+      [qw/Eatery  1     secrets2/],
+      [qw/Library 2     secrets3/],
+      [qw/Library 3     secrets4/],
+      [qw/Eatery  3     secrets5/],
+      [qw/Library 4     secrets6/],
+      [qw/Library 5     secrets7/],
+      [qw/Eatery  5     secrets8/],
+      [qw/Library 6     secrets9/],
+      [qw/Library 7     secrets10/],
+      [qw/Eatery  7     secrets11/],
+      [qw/Library 8     secrets12/],
+    ]);
+  }, 'populate without PKs supplied ok' );
+}
 
-is $row_from_db->a_guid, $row->a_guid,
-  'NON-PK GUID round trip';
+# test simple, complex LIMIT and limited prefetch support, with both dialects and quote combinations (if possible)
+for my $dialect (
+  'Top',
+  ($schema->storage->_server_info->{normalized_dbms_version} || 0 ) >= 9
+    ? ('RowNumberOver')
+    : ()
+  ,
+) {
+  for my $quoted (0, 1) {
 
-# test MONEY type
-$schema->storage->dbh_do (sub {
-    my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE money_test") };
-    $dbh->do(<<'SQL');
-CREATE TABLE money_test (
-   id INT IDENTITY PRIMARY KEY,
-   amount MONEY NULL
-)
-SQL
-});
+    $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+        limit_dialect => $dialect,
+        $quoted
+          ? ( quote_char => [ qw/ [ ] / ], name_sep => '.' )
+          : ()
+        ,
+      });
 
-my $rs = $schema->resultset('Money');
+    my $test_type = "Dialect:$dialect Quoted:$quoted";
 
-lives_ok {
-  $row = $rs->create({ amount => 100 });
-} 'inserted a money value';
+    # basic limit support
+    TODO: {
+      my $art_rs = $schema->resultset ('Artist');
+      $art_rs->delete;
+      $art_rs->create({ name => 'Artist ' . $_ }) for (1..6);
 
-cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
+      my $it = $schema->resultset('Artist')->search( {}, {
+        rows => 4,
+        offset => 3,
+        order_by => 'artistid',
+      });
 
-lives_ok {
-  $row->update({ amount => 200 });
-} 'updated a money value';
+      is( $it->count, 3, "$test_type: LIMIT count ok" );
 
-cmp_ok $rs->find($row->id)->amount, '==', 200,
-  'updated money value round-trip';
+      local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+        if $dialect eq 'Top';
 
-lives_ok {
-  $row->update({ amount => undef });
-} 'updated a money value to NULL';
+      is( $it->next->name, 'Artist 4', "$test_type: iterator->next ok" );
+      $it->next;
+      is( $it->next->name, 'Artist 6', "$test_type: iterator->next ok" );
+      is( $it->next, undef, "$test_type: next past end of resultset ok" );
+    }
 
-is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip';
+    # plain ordered subqueries throw
+    throws_ok (sub {
+      $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
+    }, qr/ordered subselect encountered/, "$test_type: Ordered Subselect detection throws ok");
 
-$schema->storage->dbh_do (sub {
-    my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE owners") };
-    eval { $dbh->do("DROP TABLE books") };
-    $dbh->do(<<'SQL');
-CREATE TABLE books (
-   id INT IDENTITY (1, 1) NOT NULL,
-   source VARCHAR(100),
-   owner INT,
-   title VARCHAR(10),
-   price INT NULL
-)
+    # make sure ordered subselects *somewhat* work
+    {
+      my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+      my $sealed_owners = $owners->as_subselect_rs;
 
-CREATE TABLE owners (
-   id INT IDENTITY (1, 1) NOT NULL,
-   name VARCHAR(100),
-)
-SQL
+      is_deeply (
+        [ map { $_->name } ($sealed_owners->all) ],
+        [ map { $_->name } ($owners->all) ],
+        "$test_type: Sort preserved from within a subquery",
+      );
+    }
 
-});
+    {
+      my $book_owner_ids = $schema->resultset ('BooksInLibrary')->search ({}, {
+        rows => 6,
+        offset => 2,
+        join => 'owner',
+        distinct => 1,
+        order_by => 'owner.name',
+        unsafe_subselect_ok => 1
+      })->get_column ('owner');
 
-lives_ok ( sub {
-  # start a new connection, make sure rebless works
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  $schema->populate ('Owners', [
-    [qw/id  name  /],
-    [qw/1   wiggle/],
-    [qw/2   woggle/],
-    [qw/3   boggle/],
-    [qw/4   fRIOUX/],
-    [qw/5   fRUE/],
-    [qw/6   fREW/],
-    [qw/7   fROOH/],
-    [qw/8   fISMBoC/],
-    [qw/9   station/],
-    [qw/10   mirror/],
-    [qw/11   dimly/],
-    [qw/12   face_to_face/],
-    [qw/13   icarus/],
-    [qw/14   dream/],
-    [qw/15   dyrstyggyr/],
-  ]);
-}, 'populate with PKs supplied ok' );
+      my @ids = $book_owner_ids->all;
 
+      is (@ids, 6, 'Limit works');
 
-lives_ok (sub {
-  # start a new connection, make sure rebless works
-  # test an insert with a supplied identity, followed by one without
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  for (2, 1) {
-    my $id = $_ * 20 ;
-    $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
-    $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
-  }
-}, 'create with/without PKs ok' );
+      my $book_owners = $schema->resultset ('Owners')->search ({
+        id => { -in => $book_owner_ids->as_query }
+      });
 
-is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
+      TODO: {
+        local $TODO = "Correlated limited IN subqueries will probably never preserve order";
 
-lives_ok ( sub {
-  # start a new connection, make sure rebless works
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  $schema->populate ('BooksInLibrary', [
-    [qw/source  owner title   /],
-    [qw/Library 1     secrets0/],
-    [qw/Library 1     secrets1/],
-    [qw/Eatery  1     secrets2/],
-    [qw/Library 2     secrets3/],
-    [qw/Library 3     secrets4/],
-    [qw/Eatery  3     secrets5/],
-    [qw/Library 4     secrets6/],
-    [qw/Library 5     secrets7/],
-    [qw/Eatery  5     secrets8/],
-    [qw/Library 6     secrets9/],
-    [qw/Library 7     secrets10/],
-    [qw/Eatery  7     secrets11/],
-    [qw/Library 8     secrets12/],
-  ]);
-}, 'populate without PKs supplied ok' );
+        is_deeply (
+          [ map { $_->id } ($book_owners->all) ],
+          [ $book_owner_ids->all ],
+          "$test_type: Sort is preserved across IN subqueries",
+        );
+      }
+    }
 
-# plain ordered subqueries throw
-throws_ok (sub {
-  $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
-}, qr/ordered subselect encountered/, 'Ordered Subselect detection throws ok');
+    # still even with lost order of IN, we should be getting correct
+    # sets
+    {
+      my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+      my $corelated_owners = $owners->result_source->resultset->search (
+        {
+          id => { -in => $owners->get_column('id')->as_query },
+        },
+        {
+          order_by => 'name' #reorder because of what is shown above
+        },
+      );
 
-# make sure ordered subselects *somewhat* work
-{
-  my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+      is (
+        join ("\x00", map { $_->name } ($corelated_owners->all) ),
+        join ("\x00", map { $_->name } ($owners->all) ),
+        "$test_type: With an outer order_by, everything still matches",
+      );
+    }
 
-  my $al = $owners->current_source_alias;
-  my $sealed_owners = $owners->result_source->resultset->search (
-    {},
+    # make sure right-join-side single-prefetch ordering limit works
     {
-      alias => $al,
-      from => [{
-        -alias => $al,
-        -source_handle => $owners->result_source->handle,
-        $al => $owners->as_query,
-      }],
-    },
-  );
+      my $rs = $schema->resultset ('BooksInLibrary')->search (
+        {
+          'owner.name' => { '!=', 'woggle' },
+        },
+        {
+          prefetch => 'owner',
+          order_by => 'owner.name',
+        }
+      );
+      # this is the order in which they should come from the above query
+      my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
 
-  is_deeply (
-    [ map { $_->name } ($sealed_owners->all) ],
-    [ map { $_->name } ($owners->all) ],
-    'Sort preserved from within a subquery',
-  );
-}
+      is ($rs->all, 8, "$test_type: Correct amount of objects from right-sorted joined resultset");
+      is_deeply (
+        [map { $_->owner->name } ($rs->all) ],
+        \@owner_names,
+        "$test_type: Prefetched rows were properly ordered"
+      );
 
-TODO: {
-  local $TODO = "This porbably will never work, but it isn't critical either afaik";
+      my $limited_rs = $rs->search ({}, {rows => 6, offset => 2, unsafe_subselect_ok => 1});
+      is ($limited_rs->count, 6, "$test_type: Correct count of limited right-sorted joined resultset");
+      is ($limited_rs->count_rs->next, 6, "$test_type: Correct count_rs of limited right-sorted joined resultset");
 
-  my $book_owner_ids = $schema->resultset ('BooksInLibrary')
-                               ->search ({}, { join => 'owner', distinct => 1, order_by => 'owner.name', unsafe_subselect_ok => 1 })
-                                ->get_column ('owner');
+      my $queries;
+      my $orig_debug = $schema->storage->debug;
+      $schema->storage->debugcb(sub { $queries++; });
+      $schema->storage->debug(1);
 
-  my $book_owners = $schema->resultset ('Owners')->search ({
-    id => { -in => $book_owner_ids->as_query }
-  });
+      is_deeply (
+        [map { $_->owner->name } ($limited_rs->all) ],
+        [@owner_names[2 .. 7]],
+        "$test_type: Prefetch-limited rows were properly ordered"
+      );
+      is ($queries, 1, "$test_type: Only one query with prefetch");
 
-  is_deeply (
-    [ map { $_->id } ($book_owners->all) ],
-    [ $book_owner_ids->all ],
-    'Sort is preserved across IN subqueries',
-  );
-}
+      $schema->storage->debugcb(undef);
+      $schema->storage->debug($orig_debug);
 
-# This is known not to work - thus the negative test
-{
-  my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
-  my $corelated_owners = $owners->result_source->resultset->search (
-    {
-      id => { -in => $owners->get_column('id')->as_query },
-    },
-    {
-      order_by => 'name' #reorder because of what is shown above
-    },
-  );
+      is_deeply (
+        [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
+        [@owner_names[2 .. 7]],
+        "$test_type: Rows are still properly ordered after search_related",
+      );
+    }
 
-  cmp_ok (
-    join ("\x00", map { $_->name } ($corelated_owners->all) ),
-      'ne',
-    join ("\x00", map { $_->name } ($owners->all) ),
-    'Sadly sort not preserved from within a corelated subquery',
-  );
+    # try a ->has_many direction with duplicates
+    my $owners = $schema->resultset ('Owners')->search (
+      {
+        'books.id' => { '!=', undef },
+        'me.name' => { '!=', 'somebogusstring' },
+      },
+      {
+        prefetch => 'books',
+        order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
+        rows     => 3,  # 8 results total
+        unsafe_subselect_ok => 1,
+      },
+    );
 
-  cmp_ok (
-    join ("\x00", sort map { $_->name } ($corelated_owners->all) ),
-      'ne',
-    join ("\x00", sort map { $_->name } ($owners->all) ),
-    'Which in fact gives a completely wrong dataset',
-  );
-}
+    my ($sql, @bind) = @${$owners->page(3)->as_query};
+    is_deeply (
+      \@bind,
+      [
+        $dialect eq 'Top' ? [ test => 'xxx' ] : (),                 # the extra re-order bind
+        ([ 'me.name' => 'somebogusstring' ], [ test => 'xxx' ]) x 2 # double because of the prefetch subq
+      ],
+    );
 
+    is ($owners->page(1)->all, 3, "$test_type: has_many prefetch returns correct number of rows");
+    is ($owners->page(1)->count, 3, "$test_type: has-many prefetch returns correct count");
 
-# make sure right-join-side single-prefetch ordering limit works
-{
-  my $rs = $schema->resultset ('BooksInLibrary')->search (
-    {
-      'owner.name' => { '!=', 'woggle' },
-    },
-    {
-      prefetch => 'owner',
-      order_by => 'owner.name',
+    is ($owners->page(3)->count, 2, "$test_type: has-many prefetch returns correct count");
+    TODO: {
+      local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+        if $dialect eq 'Top';
+      is ($owners->page(3)->all, 2, "$test_type: has_many prefetch returns correct number of rows");
+      is ($owners->page(3)->count_rs->next, 2, "$test_type: has-many prefetch returns correct count_rs");
     }
-  );
-  # this is the order in which they should come from the above query
-  my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
 
-  is ($rs->all, 8, 'Correct amount of objects from right-sorted joined resultset');
-  is_deeply (
-    [map { $_->owner->name } ($rs->all) ],
-    \@owner_names,
-    'Rows were properly ordered'
-  );
 
-  my $limited_rs = $rs->search ({}, {rows => 7, offset => 2, unsafe_subselect_ok => 1});
-  is ($limited_rs->count, 6, 'Correct count of limited right-sorted joined resultset');
-  is ($limited_rs->count_rs->next, 6, 'Correct count_rs of limited right-sorted joined resultset');
+    # try a ->belongs_to direction (no select collapse, group_by should work)
+    my $books = $schema->resultset ('BooksInLibrary')->search (
+      {
+        'owner.name' => [qw/wiggle woggle/],
+      },
+      {
+        distinct => 1,
+        having => \['1 = ?', [ test => 1 ] ], #test having propagation
+        prefetch => 'owner',
+        rows     => 2,  # 3 results total
+        order_by => { -desc => 'me.owner' },
+        unsafe_subselect_ok => 1,
+      },
+    );
 
-  my $queries;
-  $schema->storage->debugcb(sub { $queries++; });
-  $schema->storage->debug(1);
+    ($sql, @bind) = @${$books->page(3)->as_query};
+    is_deeply (
+      \@bind,
+      [
+        # inner
+        [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ], [ test => '1' ],
+        # outer
+        [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ],
+      ],
+    );
 
-  is_deeply (
-    [map { $_->owner->name } ($limited_rs->all) ],
-    [@owner_names[2 .. 7]],
-    'Limited rows were properly ordered'
-  );
-  is ($queries, 1, 'Only one query with prefetch');
+    is ($books->page(1)->all, 2, "$test_type: Prefetched grouped search returns correct number of rows");
+    is ($books->page(1)->count, 2, "$test_type: Prefetched grouped search returns correct count");
 
-  $schema->storage->debugcb(undef);
-  $schema->storage->debug(0);
+    is ($books->page(2)->count, 1, "$test_type: Prefetched grouped search returns correct count");
+    TODO: {
+      local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+        if $dialect eq 'Top';
+      is ($books->page(2)->all, 1, "$test_type: Prefetched grouped search returns correct number of rows");
+      is ($books->page(2)->count_rs->next, 1, "$test_type: Prefetched grouped search returns correct count_rs");
+    }
+  }
+}
 
 
-  is_deeply (
-    [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
-    [@owner_names[2 .. 7]],
-    'Rows are still properly ordered after search_related'
+# test GUID columns
+{
+  $schema->storage->dbh_do (sub {
+    my ($storage, $dbh) = @_;
+    eval { $dbh->do("DROP TABLE artist") };
+    $dbh->do(<<'SQL');
+CREATE TABLE artist (
+   artistid UNIQUEIDENTIFIER NOT NULL,
+   name VARCHAR(100),
+   rank INT NOT NULL DEFAULT '13',
+   charfield CHAR(10) NULL,
+   a_guid UNIQUEIDENTIFIER,
+   primary key(artistid)
+)
+SQL
+  });
+
+  # start disconnected to make sure insert works on an un-reblessed storage
+  $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+  my $row;
+  lives_ok {
+    $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
+  } 'created a row with a GUID';
+
+  ok(
+    eval { $row->artistid },
+    'row has GUID PK col populated',
   );
-}
+  diag $@ if $@;
 
+  ok(
+    eval { $row->a_guid },
+    'row has a GUID col with auto_nextval populated',
+  );
+  diag $@ if $@;
 
-#
-# try a prefetch on tables with identically named columns
-#
+  my $row_from_db = $schema->resultset('ArtistGUID')
+    ->search({ name => 'mtfnpy' })->first;
 
-# set quote char - make sure things work while quoted
-$schema->storage->_sql_maker->{quote_char} = [qw/[ ]/];
-$schema->storage->_sql_maker->{name_sep} = '.';
+  is $row_from_db->artistid, $row->artistid,
+    'PK GUID round trip';
 
+  is $row_from_db->a_guid, $row->a_guid,
+    'NON-PK GUID round trip';
+}
+
+# test MONEY type
 {
-  # try a ->has_many direction
-  my $owners = $schema->resultset ('Owners')->search (
-    {
-      'books.id' => { '!=', undef },
-      'me.name' => { '!=', 'somebogusstring' },
-    },
-    {
-      prefetch => 'books',
-      order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
-      rows     => 3,  # 8 results total
-      unsafe_subselect_ok => 1,
-    },
-  );
+  $schema->storage->dbh_do (sub {
+    my ($storage, $dbh) = @_;
+    eval { $dbh->do("DROP TABLE money_test") };
+    $dbh->do(<<'SQL');
+CREATE TABLE money_test (
+   id INT IDENTITY PRIMARY KEY,
+   amount MONEY NULL
+)
+SQL
+  });
 
-  my ($sql, @bind) = @${$owners->page(3)->as_query};
-  is_deeply (
-    \@bind,
-    [ ([ 'me.name' => 'somebogusstring' ], [ test => 'xxx' ]) x 2 ],  # double because of the prefetch subq
-  );
+  my $rs = $schema->resultset('Money');
+  my $row;
 
-  is ($owners->page(1)->all, 3, 'has_many prefetch returns correct number of rows');
-  is ($owners->page(1)->count, 3, 'has-many prefetch returns correct count');
+  lives_ok {
+    $row = $rs->create({ amount => 100 });
+  } 'inserted a money value';
 
-  is ($owners->page(3)->all, 2, 'has_many prefetch returns correct number of rows');
-  is ($owners->page(3)->count, 2, 'has-many prefetch returns correct count');
-  is ($owners->page(3)->count_rs->next, 2, 'has-many prefetch returns correct count_rs');
+  cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
 
+  lives_ok {
+    $row->update({ amount => 200 });
+  } 'updated a money value';
 
-  # try a ->belongs_to direction (no select collapse, group_by should work)
-  my $books = $schema->resultset ('BooksInLibrary')->search (
-    {
-      'owner.name' => [qw/wiggle woggle/],
-    },
-    {
-      distinct => 1,
-      having => \['1 = ?', [ test => 1 ] ], #test having propagation
-      prefetch => 'owner',
-      rows     => 2,  # 3 results total
-      order_by => { -desc => 'me.owner' },
-      unsafe_subselect_ok => 1,
-    },
-  );
+  cmp_ok $rs->find($row->id)->amount, '==', 200,
+    'updated money value round-trip';
 
-  ($sql, @bind) = @${$books->page(3)->as_query};
-  is_deeply (
-    \@bind,
-    [
-      # inner
-      [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ], [ test => '1' ],
-      # outer
-      [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ],
-    ],
-  );
+  lives_ok {
+    $row->update({ amount => undef });
+  } 'updated a money value to NULL';
 
-  is ($books->page(1)->all, 2, 'Prefetched grouped search returns correct number of rows');
-  is ($books->page(1)->count, 2, 'Prefetched grouped search returns correct count');
-
-  is ($books->page(2)->all, 1, 'Prefetched grouped search returns correct number of rows');
-  is ($books->page(2)->count, 1, 'Prefetched grouped search returns correct count');
-  is ($books->page(2)->count_rs->next, 1, 'Prefetched grouped search returns correct count_rs');
+  is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip';
 }
 
+
 done_testing;
 
 # clean up our mess

Modified: DBIx-Class/0.08/branches/extended_rels/t/746sybase.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/746sybase.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/746sybase.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -569,12 +569,10 @@
     $row->update({ amount => undef });
   } 'updated a money value to NULL';
 
-  my $null_amount = eval { $rs->find($row->id)->amount };
-  ok(
-    (($null_amount == undef) && (not $@)),
-    'updated money value to NULL round-trip'
-  );
-  diag $@ if $@;
+  lives_and {
+    my $null_amount = $rs->find($row->id)->amount;
+    is $null_amount, undef;
+  } 'updated money value to NULL round-trip';
 
 # Test computed columns and timestamps
   $schema->storage->dbh_do (sub {

Modified: DBIx-Class/0.08/branches/extended_rels/t/748informix.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/748informix.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/748informix.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -11,15 +11,39 @@
 #warn "$dsn $user $pass";
 
 plan skip_all => 'Set $ENV{DBICTEST_INFORMIX_DSN}, _USER and _PASS to run this test'
-  unless ($dsn && $user);
+  unless $dsn;
 
-my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+  auto_savepoint => 1
+});
 
 my $dbh = $schema->storage->dbh;
 
 eval { $dbh->do("DROP TABLE artist") };
-
 $dbh->do("CREATE TABLE artist (artistid SERIAL, name VARCHAR(255), charfield CHAR(10), rank INTEGER DEFAULT 13);");
+eval { $dbh->do("DROP TABLE cd") };
+$dbh->do(<<EOS);
+CREATE TABLE cd (
+  cdid int PRIMARY KEY,
+  artist int,
+  title varchar(255),
+  year varchar(4),
+  genreid int,
+  single_track int
+)
+EOS
+eval { $dbh->do("DROP TABLE track") };
+$dbh->do(<<EOS);
+CREATE TABLE track (
+  trackid int,
+  cd int REFERENCES cd(cdid),
+  position int,
+  title varchar(255),
+  last_updated_on date,
+  last_updated_at date,
+  small_dt date
+)
+EOS
 
 my $ars = $schema->resultset('Artist');
 is ( $ars->count, 0, 'No rows at first' );
@@ -72,7 +96,47 @@
 is( $lim->next->artistid, 102, "iterator->next ok" );
 is( $lim->next, undef, "next past end of resultset ok" );
 
+# test savepoints
+throws_ok {
+  $schema->txn_do(sub {
+    eval {
+      $schema->txn_do(sub {
+        $ars->create({ name => 'in_savepoint' });
+        die "rolling back savepoint";
+      });
+    };
+    ok ((not $ars->search({ name => 'in_savepoint' })->first),
+      'savepoint rolled back');
+    $ars->create({ name => 'in_outer_txn' });
+    die "rolling back outer txn";
+  });
+} qr/rolling back outer txn/,
+  'correct exception for rollback';
 
+ok ((not $ars->search({ name => 'in_outer_txn' })->first),
+  'outer txn rolled back');
+
+######## test with_deferred_fk_checks
+lives_ok {
+  $schema->storage->with_deferred_fk_checks(sub {
+    $schema->resultset('Track')->create({
+      trackid => 999, cd => 999, position => 1, title => 'deferred FK track'
+    });
+    $schema->resultset('CD')->create({
+      artist => 1, cdid => 999, year => '2003', title => 'deferred FK cd'
+    });
+  });
+} 'with_deferred_fk_checks code survived';
+
+is eval { $schema->resultset('Track')->find(999)->title }, 'deferred FK track',
+ 'code in with_deferred_fk_checks worked'; 
+
+throws_ok {
+  $schema->resultset('Track')->create({
+    trackid => 1, cd => 9999, position => 1, title => 'Track1'
+  });
+} qr/constraint/i, 'with_deferred_fk_checks is off';
+
 done_testing;
 
 # clean up our mess

Modified: DBIx-Class/0.08/branches/extended_rels/t/749sybase_asa.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/749sybase_asa.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/749sybase_asa.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,9 +3,12 @@
 
 use Test::More;
 use Test::Exception;
+use Scope::Guard ();
 use lib qw(t/lib);
 use DBICTest;
 
+DBICTest::Schema->load_classes('ArtistGUID');
+
 # tests stolen from 748informix.t
 
 my ($dsn, $user, $pass)    = @ENV{map { "DBICTEST_SYBASE_ASA_${_}" }      qw/DSN USER PASS/};
@@ -21,21 +24,21 @@
   [ $dsn2, $user2, $pass2 ],
 );
 
-my @handles_to_clean;
+my $schema;
 
 foreach my $info (@info) {
   my ($dsn, $user, $pass) = @$info;
 
   next unless $dsn;
 
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+  $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
     auto_savepoint => 1
   });
 
+  my $guard = Scope::Guard->new(\&cleanup);
+
   my $dbh = $schema->storage->dbh;
 
-  push @handles_to_clean, $dbh;
-
   eval { $dbh->do("DROP TABLE artist") };
 
   $dbh->do(<<EOF);
@@ -61,7 +64,7 @@
   is($new->artistid, 66, 'Explicit PK assigned');
 
 # test savepoints
-  eval {
+  throws_ok {
     $schema->txn_do(sub {
       eval {
         $schema->txn_do(sub {
@@ -74,9 +77,7 @@
       $ars->create({ name => 'in_outer_txn' });
       die "rolling back outer txn";
     });
-  };
-
-  like $@, qr/rolling back outer txn/,
+  } qr/rolling back outer txn/,
     'correct exception for rollback';
 
   ok ((not $ars->search({ name => 'in_outer_txn' })->first),
@@ -162,13 +163,62 @@
       ok($rs->find($id)->$type eq $binstr{$size}, "verified inserted $size $type" );
     }
   }
+ 
+  my @uuid_types = qw/uniqueidentifier uniqueidentifierstr/;
+
+# test uniqueidentifiers
+  for my $uuid_type (@uuid_types) {
+    local $schema->source('ArtistGUID')->column_info('artistid')->{data_type}
+      = $uuid_type;
+
+    local $schema->source('ArtistGUID')->column_info('a_guid')->{data_type}
+      = $uuid_type;
+
+    $schema->storage->dbh_do (sub {
+      my ($storage, $dbh) = @_;
+      eval { $dbh->do("DROP TABLE artist") };
+      $dbh->do(<<"SQL");
+CREATE TABLE artist (
+   artistid $uuid_type NOT NULL,
+   name VARCHAR(100),
+   rank INT NOT NULL DEFAULT '13',
+   charfield CHAR(10) NULL,
+   a_guid $uuid_type,
+   primary key(artistid)
+)
+SQL
+    });
+
+    my $row;
+    lives_ok {
+      $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
+    } 'created a row with a GUID';
+
+    ok(
+      eval { $row->artistid },
+      'row has GUID PK col populated',
+    );
+    diag $@ if $@;
+
+    ok(
+      eval { $row->a_guid },
+      'row has a GUID col with auto_nextval populated',
+    );
+    diag $@ if $@;
+
+    my $row_from_db = $schema->resultset('ArtistGUID')
+      ->search({ name => 'mtfnpy' })->first;
+
+    is $row_from_db->artistid, $row->artistid,
+      'PK GUID round trip';
+
+    is $row_from_db->a_guid, $row->a_guid,
+      'NON-PK GUID round trip';
+  }
 }
 
 done_testing;
 
-# clean up our mess
-END {
-  foreach my $dbh (@handles_to_clean) {
-    eval { $dbh->do("DROP TABLE $_") } for qw/artist bindtype_test/;
-  }
+sub cleanup {
+  eval { $schema->storage->dbh->do("DROP TABLE $_") } for qw/artist bindtype_test/;
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/74mssql.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/74mssql.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/74mssql.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -52,14 +52,19 @@
 
   isa_ok($schema->storage, "DBIx::Class::Storage::$storage_type");
 
-# start disconnected to test reconnection
-  $schema->storage->_dbh->disconnect;
+  SKIP: {
+    skip 'This version of DBD::Sybase segfaults on disconnect', 1 if DBD::Sybase->VERSION < 1.08;
 
-  my $dbh;
-  lives_ok (sub {
-    $dbh = $schema->storage->dbh;
-  }, 'reconnect works');
+    # start disconnected to test _ping
+    $schema->storage->_dbh->disconnect;
 
+    lives_ok {
+      $schema->storage->dbh_do(sub { $_[1]->do('select 1') })
+    } '_ping works';
+  }
+
+  my $dbh = $schema->storage->dbh;
+
   $dbh->do("IF OBJECT_ID('artist', 'U') IS NOT NULL
       DROP TABLE artist");
   $dbh->do("IF OBJECT_ID('cd', 'U') IS NOT NULL
@@ -171,6 +176,39 @@
 
   is $rs->first, undef, 'rolled back';
   $rs->reset;
+
+  # test RNO detection when version detection fails
+  SKIP: {
+    my $storage = $schema->storage;
+    my $version = $storage->_server_info->{normalized_dbms_version};
+
+    skip 'could not detect SQL Server version', 1 if not defined $version;
+
+    my $have_rno = $version >= 9 ? 1 : 0;
+
+    local $storage->{_sql_maker}        = undef;
+    local $storage->{_sql_maker_opts}   = undef;
+
+    local $storage->{_server_info_hash} = { %{ $storage->_server_info_hash } }; # clone
+    delete @{$storage->{_server_info_hash}}{qw/dbms_version normalized_dbms_version/};
+
+    $storage->sql_maker;
+
+    my $rno_detected =
+      ($storage->{_sql_maker_opts}{limit_dialect} eq 'RowNumberOver') ? 1 : 0;
+
+    ok (($have_rno == $rno_detected),
+      'row_number() over support detected correctly');
+  }
+
+  {
+    my $schema = DBICTest::Schema->clone;
+    $schema->connection($dsn, $user, $pass);
+
+    like $schema->storage->sql_maker->{limit_dialect},
+      qr/^(?:Top|RowNumberOver)\z/,
+      'sql_maker is correct on unconnected schema';
+  }
 }
 
 # test op-induced autoconnect

Modified: DBIx-Class/0.08/branches/extended_rels/t/750firebird.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/750firebird.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/750firebird.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -101,7 +101,7 @@
   is($st->pkid1, 55, "Firebird Auto-PK without trigger: First primary key set manually");
 
 # test savepoints
-  eval {
+  throws_ok {
     $schema->txn_do(sub {
       eval {
         $schema->txn_do(sub {
@@ -114,9 +114,7 @@
       $ars->create({ name => 'in_outer_txn' });
       die "rolling back outer txn";
     });
-  };
-
-  like $@, qr/rolling back outer txn/,
+  } qr/rolling back outer txn/,
     'correct exception for rollback';
 
   ok ((not $ars->search({ name => 'in_outer_txn' })->first),
@@ -184,13 +182,15 @@
   is( eval { $lim->next->artistid }, 102, "iterator->next ok" );
   is( $lim->next, undef, "next past end of resultset ok" );
 
-# test multiple executing cursors
+# test nested cursors
   {
     my $rs1 = $ars->search({}, { order_by => { -asc  => 'artistid' }});
-    my $rs2 = $ars->search({}, { order_by => { -desc => 'artistid' }});
 
-    is $rs1->next->artistid, 1,   'multiple cursors';
-    is $rs2->next->artistid, 102, 'multiple cursors';
+    my $rs2 = $ars->search({ artistid => $rs1->next->artistid }, {
+      order_by => { -desc => 'artistid' }
+    });
+
+    is $rs2->next->artistid, 1, 'nested cursors';
   }
 
 # test empty insert

Deleted: DBIx-Class/0.08/branches/extended_rels/t/81transactions.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/81transactions.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/81transactions.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,396 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-use Test::Warn;
-use Test::Exception;
-use lib qw(t/lib);
-use DBICTest;
-
-my $schema = DBICTest->init_schema();
-
-my $code = sub {
-  my ($artist, @cd_titles) = @_;
-
-  $artist->create_related('cds', {
-    title => $_,
-    year => 2006,
-  }) foreach (@cd_titles);
-
-  return $artist->cds->all;
-};
-
-# Test checking of parameters
-{
-  throws_ok (sub {
-    (ref $schema)->txn_do(sub{});
-  }, qr/storage/, "can't call txn_do without storage");
-
-  throws_ok ( sub {
-    $schema->txn_do('');
-  }, qr/must be a CODE reference/, '$coderef parameter check ok');
-}
-
-# Test successful txn_do() - scalar context
-{
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my @titles = map {'txn_do test CD ' . $_} (1..5);
-  my $artist = $schema->resultset('Artist')->find(1);
-  my $count_before = $artist->cds->count;
-  my $count_after = $schema->txn_do($code, $artist, @titles);
-  is($count_after, $count_before+5, 'successful txn added 5 cds');
-  is($artist->cds({
-    title => "txn_do test CD $_",
-  })->first->year, 2006, "new CD $_ year correct") for (1..5);
-
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
-}
-
-# Test successful txn_do() - list context
-{
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my @titles = map {'txn_do test CD ' . $_} (6..10);
-  my $artist = $schema->resultset('Artist')->find(1);
-  my $count_before = $artist->cds->count;
-  my @cds = $schema->txn_do($code, $artist, @titles);
-  is(scalar @cds, $count_before+5, 'added 5 CDs and returned in list context');
-  is($artist->cds({
-    title => "txn_do test CD $_",
-  })->first->year, 2006, "new CD $_ year correct") for (6..10);
-
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
-}
-
-# Test nested successful txn_do()
-{
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my $nested_code = sub {
-    my ($schema, $artist, $code) = @_;
-
-    my @titles1 = map {'nested txn_do test CD ' . $_} (1..5);
-    my @titles2 = map {'nested txn_do test CD ' . $_} (6..10);
-
-    $schema->txn_do($code, $artist, @titles1);
-    $schema->txn_do($code, $artist, @titles2);
-  };
-
-  my $artist = $schema->resultset('Artist')->find(2);
-  my $count_before = $artist->cds->count;
-
-  lives_ok (sub {
-    $schema->txn_do($nested_code, $schema, $artist, $code);
-  }, 'nested txn_do succeeded');
-
-  is($artist->cds({
-    title => 'nested txn_do test CD '.$_,
-  })->first->year, 2006, qq{nested txn_do CD$_ year ok}) for (1..10);
-  is($artist->cds->count, $count_before+10, 'nested txn_do added all CDs');
-
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
-}
-
-my $fail_code = sub {
-  my ($artist) = @_;
-  $artist->create_related('cds', {
-    title => 'this should not exist',
-    year => 2005,
-  });
-  die "the sky is falling";
-};
-
-# Test failed txn_do()
-{
-
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my $artist = $schema->resultset('Artist')->find(3);
-
-  throws_ok (sub {
-    $schema->txn_do($fail_code, $artist);
-  }, qr/the sky is falling/, 'failed txn_do threw an exception');
-
-  my $cd = $artist->cds({
-    title => 'this should not exist',
-    year => 2005,
-  })->first;
-  ok(!defined($cd), q{failed txn_do didn't change the cds table});
-
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
-}
-
-# do the same transaction again
-{
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my $artist = $schema->resultset('Artist')->find(3);
-
-  throws_ok (sub {
-    $schema->txn_do($fail_code, $artist);
-  }, qr/the sky is falling/, 'failed txn_do threw an exception');
-
-  my $cd = $artist->cds({
-    title => 'this should not exist',
-    year => 2005,
-  })->first;
-  ok(!defined($cd), q{failed txn_do didn't change the cds table});
-
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
-}
-
-# Test failed txn_do() with failed rollback
-{
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my $artist = $schema->resultset('Artist')->find(3);
-
-  # Force txn_rollback() to throw an exception
-  no warnings 'redefine';
-  no strict 'refs';
-
-  # die in rollback
-  local *{"DBIx::Class::Storage::DBI::SQLite::txn_rollback"} = sub{
-    my $storage = shift;
-    die 'FAILED';
-  };
-
-  throws_ok (
-    sub {
-      $schema->txn_do($fail_code, $artist);
-    },
-    qr/the sky is falling.+Rollback failed/s,
-    'txn_rollback threw a rollback exception (and included the original exception'
-  );
-
-  my $cd = $artist->cds({
-    title => 'this should not exist',
-    year => 2005,
-  })->first;
-  isa_ok($cd, 'DBICTest::CD', q{failed txn_do with a failed txn_rollback }.
-         q{changed the cds table});
-  $cd->delete; # Rollback failed
-  $cd = $artist->cds({
-    title => 'this should not exist',
-    year => 2005,
-  })->first;
-  ok(!defined($cd), q{deleted the failed txn's cd});
-  $schema->storage->_dbh->rollback;
-}
-
-# reset schema object (the txn_rollback meddling screws it up)
-$schema = DBICTest->init_schema();
-
-# Test nested failed txn_do()
-{
-  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
-
-  my $nested_fail_code = sub {
-    my ($schema, $artist, $code1, $code2) = @_;
-
-    my @titles = map {'nested txn_do test CD ' . $_} (1..5);
-
-    $schema->txn_do($code1, $artist, @titles); # successful txn
-    $schema->txn_do($code2, $artist);          # failed txn
-  };
-
-  my $artist = $schema->resultset('Artist')->find(3);
-
-  throws_ok ( sub {
-    $schema->txn_do($nested_fail_code, $schema, $artist, $code, $fail_code);
-  }, qr/the sky is falling/, 'nested failed txn_do threw exception');
-
-  ok(!defined($artist->cds({
-    title => 'nested txn_do test CD '.$_,
-    year => 2006,
-  })->first), qq{failed txn_do didn't add first txn's cd $_}) for (1..5);
-  my $cd = $artist->cds({
-    title => 'this should not exist',
-    year => 2005,
-  })->first;
-  ok(!defined($cd), q{failed txn_do didn't add failed txn's cd});
-}
-
-# Grab a new schema to test txn before connect
-{
-    my $schema2 = DBICTest->init_schema(no_deploy => 1);
-    lives_ok (sub {
-        $schema2->txn_begin();
-        $schema2->txn_begin();
-    }, 'Pre-connection nested transactions.');
-
-    # although not connected DBI would still warn about rolling back at disconnect
-    $schema2->txn_rollback;
-    $schema2->txn_rollback;
-    $schema2->storage->disconnect;
-}
-$schema->storage->disconnect;
-
-# Test txn_scope_guard
-{
-  my $schema = DBICTest->init_schema();
-
-  is($schema->storage->transaction_depth, 0, "Correct transaction depth");
-  my $artist_rs = $schema->resultset('Artist');
-  throws_ok {
-   my $guard = $schema->txn_scope_guard;
-
-
-    $artist_rs->create({
-      name => 'Death Cab for Cutie',
-      made_up_column => 1,
-    });
-
-   $guard->commit;
-  } qr/No such column made_up_column .*? at .*?81transactions.t line \d+/s, "Error propogated okay";
-
-  ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
-
-  my $inner_exception = '';  # set in inner() below
-  throws_ok (sub {
-    outer($schema, 1);
-  }, qr/$inner_exception/, "Nested exceptions propogated");
-
-  ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
-
-  lives_ok (sub {
-    warnings_exist ( sub {
-      # The 0 arg says don't die, just let the scope guard go out of scope
-      # forcing a txn_rollback to happen
-      outer($schema, 0);
-    }, qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./, 'Out of scope warning detected');
-    ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
-  }, 'rollback successful withot exception');
-
-  sub outer {
-    my ($schema) = @_;
-
-    my $guard = $schema->txn_scope_guard;
-    $schema->resultset('Artist')->create({
-      name => 'Death Cab for Cutie',
-    });
-    inner(@_);
-  }
-
-  sub inner {
-    my ($schema, $fatal) = @_;
-
-    my $inner_guard = $schema->txn_scope_guard;
-    is($schema->storage->transaction_depth, 2, "Correct transaction depth");
-
-    my $artist = $artist_rs->find({ name => 'Death Cab for Cutie' });
-
-    eval {
-      $artist->cds->create({
-        title => 'Plans',
-        year => 2005,
-        $fatal ? ( foo => 'bar' ) : ()
-      });
-    };
-    if ($@) {
-      # Record what got thrown so we can test it propgates out properly.
-      $inner_exception = $@;
-      die $@;
-    }
-
-    # inner guard should commit without consequences
-    $inner_guard->commit;
-  }
-}
-
-# make sure the guard does not eat exceptions
-{
-  my $schema = DBICTest->init_schema();
-  throws_ok (sub {
-    my $guard = $schema->txn_scope_guard;
-    $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
-
-    $schema->storage->disconnect;  # this should freak out the guard rollback
-
-    die 'Deliberate exception';
-  }, qr/Deliberate exception.+Rollback failed/s);
-}
-
-# make sure it warns *big* on failed rollbacks
-{
-  my $schema = DBICTest->init_schema();
-
-  # something is really confusing Test::Warn here, no time to debug
-=begin
-  warnings_exist (
-    sub {
-      my $guard = $schema->txn_scope_guard;
-      $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
-
-      $schema->storage->disconnect;  # this should freak out the guard rollback
-    },
-    [
-      qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./,
-      qr/\*+ ROLLBACK FAILED\!\!\! \*+/,
-    ],
-    'proper warnings generated on out-of-scope+rollback failure'
-  );
-=cut
-
-  my @want = (
-    qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./,
-    qr/\*+ ROLLBACK FAILED\!\!\! \*+/,
-  );
-
-  my @w;
-  local $SIG{__WARN__} = sub {
-    if (grep {$_[0] =~ $_} (@want)) {
-      push @w, $_[0];
-    }
-    else {
-      warn $_[0];
-    }
-  };
-  {
-      my $guard = $schema->txn_scope_guard;
-      $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
-
-      $schema->storage->disconnect;  # this should freak out the guard rollback
-  }
-
-  is (@w, 2, 'Both expected warnings found');
-}
-
-# make sure AutoCommit => 0 on external handles behaves correctly with scope_guard
-{
-  my $factory = DBICTest->init_schema (AutoCommit => 0);
-  cmp_ok ($factory->resultset('CD')->count, '>', 0, 'Something to delete');
-  my $dbh = $factory->storage->dbh;
-
-  ok (!$dbh->{AutoCommit}, 'AutoCommit is off on $dbh');
-  my $schema = DBICTest::Schema->connect (sub { $dbh });
-
-
-  lives_ok ( sub {
-    my $guard = $schema->txn_scope_guard;
-    $schema->resultset('CD')->delete;
-    $guard->commit;
-  }, 'No attempt to start a transaction with scope guard');
-
-  is ($schema->resultset('CD')->count, 0, 'Deletion successful');
-}
-
-# make sure AutoCommit => 0 on external handles behaves correctly with txn_do
-{
-  my $factory = DBICTest->init_schema (AutoCommit => 0);
-  cmp_ok ($factory->resultset('CD')->count, '>', 0, 'Something to delete');
-  my $dbh = $factory->storage->dbh;
-
-  ok (!$dbh->{AutoCommit}, 'AutoCommit is off on $dbh');
-  my $schema = DBICTest::Schema->connect (sub { $dbh });
-
-
-  lives_ok ( sub {
-    $schema->txn_do (sub { $schema->resultset ('CD')->delete });
-  }, 'No attempt to start a atransaction with txn_do');
-
-  is ($schema->resultset('CD')->count, 0, 'Deletion successful');
-}
-
-done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/85utf8.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/85utf8.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/85utf8.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,54 +5,164 @@
 use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
+use DBIC::DebugObj;
 
-warning_like (
+{
+  package A::Comp;
+  use base 'DBIx::Class';
+  sub store_column { shift->next::method (@_) };
+  1;
+}
+
+{
+  package A::SubComp;
+  use base 'A::Comp';
+
+  1;
+}
+
+warnings_are (
   sub {
-    package A::Comp;
-    use base 'DBIx::Class';
+    local $ENV{DBIC_UTF8COLUMNS_OK} = 1;
+    package A::Test1;
+    use base 'DBIx::Class::Core';
+    __PACKAGE__->load_components(qw(Core +A::Comp Ordered UTF8Columns));
+    __PACKAGE__->load_components(qw(Ordered +A::SubComp Row UTF8Columns Core));
     sub store_column { shift->next::method (@_) };
     1;
+  },
+  [],
+  'no spurious warnings issued',
+);
 
-    package A::Test;
+my $test1_mro;
+my $idx = 0;
+for (@{mro::get_linear_isa ('A::Test1')} ) {
+  $test1_mro->{$_} = $idx++;
+}
+
+cmp_ok ($test1_mro->{'A::SubComp'}, '<', $test1_mro->{'A::Comp'}, 'mro of Test1 correct (A::SubComp before A::Comp)' );
+cmp_ok ($test1_mro->{'A::Comp'}, '<', $test1_mro->{'DBIx::Class::UTF8Columns'}, 'mro of Test1 correct (A::Comp before UTF8Col)' );
+cmp_ok ($test1_mro->{'DBIx::Class::UTF8Columns'}, '<', $test1_mro->{'DBIx::Class::Core'}, 'mro of Test1 correct (UTF8Col before Core)' );
+cmp_ok ($test1_mro->{'DBIx::Class::Core'}, '<', $test1_mro->{'DBIx::Class::Row'}, 'mro of Test1 correct (Core before Row)' );
+
+warnings_like (
+  sub {
+    package A::Test2;
     use base 'DBIx::Class::Core';
     __PACKAGE__->load_components(qw(UTF8Columns +A::Comp));
+    sub store_column { shift->next::method (@_) };
     1;
   },
-  qr/Incorrect loading order of DBIx::Class::UTF8Columns.+affect other components overriding store_column \(A::Comp\)/,
-  'incorrect order warning issued',
+  [qr/Incorrect loading order of DBIx::Class::UTF8Columns.+affect other components overriding 'store_column' \(A::Comp\)/],
+  'incorrect order warning issued (violator defines)',
 );
 
+warnings_like (
+  sub {
+    package A::Test3;
+    use base 'DBIx::Class::Core';
+    __PACKAGE__->load_components(qw(UTF8Columns +A::SubComp));
+    sub store_column { shift->next::method (@_) };
+    1;
+  },
+  [qr/Incorrect loading order of DBIx::Class::UTF8Columns.+affect other components overriding 'store_column' \(A::SubComp \(via A::Comp\)\)/],
+  'incorrect order warning issued (violator inherits)',
+);
+
 my $schema = DBICTest->init_schema();
 DBICTest::Schema::CD->load_components('UTF8Columns');
 DBICTest::Schema::CD->utf8_columns('title');
 Class::C3->reinitialize();
 
-my $cd = $schema->resultset('CD')->create( { artist => 1, title => "weird\x{466}stuff", year => '2048' } );
+# as per http://search.cpan.org/dist/Test-Simple/lib/Test/More.pm#utf8
+binmode (Test::More->builder->$_, ':utf8') for qw/output failure_output todo_output/;
 
-ok( utf8::is_utf8( $cd->title ), 'got title with utf8 flag' );
-ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'store title without utf8' );
+my $bytestream_title = my $utf8_title = "weird \x{466} stuff";
+utf8::encode($bytestream_title);
+cmp_ok ($bytestream_title, 'ne', $utf8_title, 'unicode/raw differ (sanity check)');
 
-ok(! utf8::is_utf8( $cd->year ), 'got year without utf8 flag' );
-ok(! utf8::is_utf8( $cd->{_column_data}{year} ), 'store year without utf8' );
+my $storage = $schema->storage;
+my ($sql, @bind);
+my $debugobj = DBIC::DebugObj->new (\$sql, \@bind);
+my ($orig_debug, $orig_debugobj) = ($storage->debug, $storage->debugobj);
+$storage->debugobj ($debugobj);
+$storage->debug (1);
 
+my $cd = $schema->resultset('CD')->create( { artist => 1, title => $utf8_title, year => '2048' } );
+
+$storage->debugobj ($orig_debugobj);
+$storage->debug ($orig_debug);
+
+# bind values are always alphabetically ordered by column, thus [1]
+# the single quotes are an artefact of the debug-system
+TODO: {
+  local $TODO = "This has been broken since rev 1191, Mar 2006";
+  is ($bind[1], "'$bytestream_title'", 'INSERT: raw bytes sent to the database');
+}
+
+# this should be using the cursor directly, no inflation/processing of any sort
+my ($raw_db_title) = $schema->resultset('CD')
+                             ->search ($cd->ident_condition)
+                               ->get_column('title')
+                                ->_resultset
+                                 ->cursor
+                                  ->next;
+
+is ($raw_db_title, $bytestream_title, 'INSERT: raw bytes retrieved from database');
+
+for my $reloaded (0, 1) {
+  my $test = $reloaded ? 'reloaded' : 'stored';
+  $cd->discard_changes if $reloaded;
+
+  ok( utf8::is_utf8( $cd->title ), "got $test title with utf8 flag" );
+  ok(! utf8::is_utf8( $cd->{_column_data}{title} ), "in-object $test title without utf8" );
+
+  ok(! utf8::is_utf8( $cd->year ), "got $test year without utf8 flag" );
+  ok(! utf8::is_utf8( $cd->{_column_data}{year} ), "in-object $test year without utf8" );
+}
+
 $cd->title('nonunicode');
-ok(! utf8::is_utf8( $cd->title ), 'got title without utf8 flag' );
-ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'store utf8-less chars' );
+ok(! utf8::is_utf8( $cd->title ), 'update title without utf8 flag' );
+ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'store utf8-less title' );
 
+$cd->update;
+$cd->discard_changes;
+ok(! utf8::is_utf8( $cd->title ), 'reloaded title without utf8 flag' );
+ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'reloaded utf8-less title' );
 
-my $v_utf8 = "\x{219}";
+$bytestream_title = $utf8_title = "something \x{219} else";
+utf8::encode($bytestream_title);
 
-$cd->update ({ title => $v_utf8 });
-$cd->title($v_utf8);
+
+$storage->debugobj ($debugobj);
+$storage->debug (1);
+
+$cd->update ({ title => $utf8_title });
+
+$storage->debugobj ($orig_debugobj);
+$storage->debug ($orig_debug);
+
+is ($bind[0], "'$bytestream_title'", 'UPDATE: raw bytes sent to the database');
+($raw_db_title) = $schema->resultset('CD')
+                             ->search ($cd->ident_condition)
+                               ->get_column('title')
+                                ->_resultset
+                                 ->cursor
+                                  ->next;
+is ($raw_db_title, $bytestream_title, 'UPDATE: raw bytes retrieved from database');
+
+$cd->discard_changes;
+$cd->title($utf8_title);
 ok( !$cd->is_column_changed('title'), 'column is not dirty after setting the same unicode value' );
 
-$cd->update ({ title => $v_utf8 });
+$cd->update ({ title => $utf8_title });
 $cd->title('something_else');
 ok( $cd->is_column_changed('title'), 'column is dirty after setting to something completely different');
 
 TODO: {
   local $TODO = 'There is currently no way to propagate aliases to inflate_result()';
-  $cd = $schema->resultset('CD')->find ({ title => $v_utf8 }, { select => 'title', as => 'name' });
+  $cd = $schema->resultset('CD')->find ({ title => $utf8_title }, { select => 'title', as => 'name' });
   ok (utf8::is_utf8( $cd->get_column ('name') ), 'utf8 flag propagates via as');
 }
 

Modified: DBIx-Class/0.08/branches/extended_rels/t/90join_torture.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/90join_torture.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/90join_torture.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,13 +1,13 @@
 use strict;
-use warnings;  
+use warnings;
 
 use Test::More;
+use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
+use DBIC::SqlMakerTest;
 my $schema = DBICTest->init_schema();
 
-plan tests => 22;
-
  {
    my $rs = $schema->resultset( 'CD' )->search(
      {
@@ -25,11 +25,10 @@
        ],
      }
    );
-  
-   eval {
+
+   lives_ok {
      my @rows = $rs->all();
    };
-   is( $@, '' );
  }
 
 
@@ -106,7 +105,7 @@
 is(scalar(@{$merge_rs_2->{attrs}->{join}}), 1, 'only one join kept when inherited');
 my $merge_rs_2_cd = $merge_rs_2->next;
 
-eval {
+lives_ok (sub {
 
   my @rs_with_prefetch = $schema->resultset('TreeLike')
                                 ->search(
@@ -115,14 +114,44 @@
     prefetch => [ 'parent', { 'children' => 'parent' } ],
     });
 
-};
+}, 'pathological prefetch ok');
 
-ok(!$@, "pathological prefetch ok");
-
 my $rs = $schema->resultset("Artist")->search({}, { join => 'twokeys' });
 my $second_search_rs = $rs->search({ 'cds_2.cdid' => '2' }, { join =>
 ['cds', 'cds'] });
 is(scalar(@{$second_search_rs->{attrs}->{join}}), 3, 'both joins kept');
 ok($second_search_rs->next, 'query on double joined rel runs okay');
 
-1;
+# test joinmap pruner
+lives_ok ( sub {
+  my $rs = $schema->resultset('Artwork')->search (
+    {
+    },
+    {
+      distinct => 1,
+      join => [
+        { artwork_to_artist => 'artist' },
+        { cd => 'artist' },
+      ],
+    },
+  );
+
+  is_same_sql_bind (
+    $rs->count_rs->as_query,
+    '(
+      SELECT COUNT( * )
+        FROM (
+          SELECT me.cd_id
+            FROM cd_artwork me
+            JOIN cd cd ON cd.cdid = me.cd_id
+            JOIN artist artist_2 ON artist_2.artistid = cd.artist
+          GROUP BY me.cd_id
+        ) me
+    )',
+    [],
+  );
+
+  ok (defined $rs->count);
+});
+
+done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/93single_accessor_object.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/93single_accessor_object.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/93single_accessor_object.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -45,20 +45,20 @@
 $schema = DBICTest->init_schema();
 
 {
-	my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
-	my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982, genreid => undef });
+  my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
+  my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982, genreid => undef });
 
-	ok(!defined($cd->get_column('genreid')), 'genreid is NULL');  #no accessor was defined for this column
-	ok(!defined($cd->genre), 'genre accessor returns undef');
+  ok(!defined($cd->get_column('genreid')), 'genreid is NULL');  #no accessor was defined for this column
+  ok(!defined($cd->genre), 'genre accessor returns undef');
 }
 
 $schema = DBICTest->init_schema();
 
 {
-	my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
-	my $genre = $schema->resultset('Genre')->create({ genreid => 88, name => 'disco' });
-	my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982 });
+  my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
+  my $genre = $schema->resultset('Genre')->create({ genreid => 88, name => 'disco' });
+  my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982 });
 
-	dies_ok { $cd->genre } 'genre accessor throws without column';
+  dies_ok { $cd->genre } 'genre accessor throws without column';
 }
 

Modified: DBIx-Class/0.08/branches/extended_rels/t/94versioning.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/94versioning.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/94versioning.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -245,6 +245,33 @@
   is($schema_v2->get_db_version(), '3.0', 'Fast deploy/upgrade');
 };
 
+# Check that it Schema::Versioned deals with new/all forms of connect arguments.
+{
+  my $get_db_version_run = 0;
+
+  no warnings qw/once redefine/;
+  local *DBIx::Class::Schema::Versioned::get_db_version = sub {
+    $get_db_version_run = 1;
+    return $_[0]->schema_version;
+  };
+
+  # Make sure the env var isn't whats triggering it
+  local $ENV{DBIC_NO_VERSION_CHECK} = 0;
+
+  DBICVersion::Schema->connect({
+    dsn => $dsn,
+    user => $user, 
+    pass => $pass,
+    ignore_version => 1
+  });
+  
+  ok($get_db_version_run == 0, "attributes pulled from hashref connect_info");
+  $get_db_version_run = 0;
+
+  DBICVersion::Schema->connect( $dsn, $user, $pass, { ignore_version => 1 } );
+  ok($get_db_version_run == 0, "attributes pulled from list connect_info");
+}
+
 unless ($ENV{DBICTEST_KEEP_VERSIONING_DDL}) {
     unlink $_ for (values %$fn);
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/admin/02ddl.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/admin/02ddl.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/admin/02ddl.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -28,6 +28,7 @@
   no_populate=>1,
   sqlite_use_file  => 1,
 );
+
 { # create the schema
 
 #  make sure we are  clean
@@ -37,26 +38,24 @@
 my $admin = DBIx::Class::Admin->new(
   schema_class=> "DBICTest::Schema",
   sql_dir=> $sql_dir,
-  connect_info => \@connect_info, 
+  connect_info => \@connect_info,
 );
 isa_ok ($admin, 'DBIx::Class::Admin', 'create the admin object');
 lives_ok { $admin->create('MySQL'); } 'Can create MySQL sql';
 lives_ok { $admin->create('SQLite'); } 'Can Create SQLite sql';
+lives_ok {
+  $SIG{__WARN__} = sub { warn @_ unless $_[0] =~ /no such table.+DROP TABLE/s };
+  $admin->deploy()
+} 'Can Deploy schema';
 }
 
 { # upgrade schema
 
-#my $schema = DBICTest->init_schema(
-#  no_deploy    => 1,
-#  no_populat    => 1,
-#  sqlite_use_file  => 1,
-#);
-
 clean_dir($sql_dir);
 require DBICVersion_v1;
 
 my $admin = DBIx::Class::Admin->new(
-  schema_class => 'DBICVersion::Schema', 
+  schema_class => 'DBICVersion::Schema',
   sql_dir =>  $sql_dir,
   connect_info => \@connect_info,
 );
@@ -73,9 +72,10 @@
 
 
 require DBICVersion_v2;
+DBICVersion::Schema->upgrade_directory (undef);  # so that we can test use of $sql_dir
 
 $admin = DBIx::Class::Admin->new(
-  schema_class => 'DBICVersion::Schema', 
+  schema_class => 'DBICVersion::Schema',
   sql_dir =>  $sql_dir,
   connect_info => \@connect_info
 );
@@ -84,6 +84,7 @@
 {
   local $SIG{__WARN__} = sub { warn $_[0] unless $_[0] =~ /DB version .+? is lower than the schema version/ };
   lives_ok {$admin->upgrade();} 'upgrade the schema';
+  dies_ok {$admin->deploy} 'cannot deploy installed schema, should upgrade instead';
 }
 
 is($schema->get_db_version, $DBICVersion::Schema::VERSION, 'Schema and db versions match');
@@ -95,7 +96,7 @@
 clean_dir($sql_dir);
 
 my $admin = DBIx::Class::Admin->new(
-  schema_class  => 'DBICVersion::Schema', 
+  schema_class  => 'DBICVersion::Schema',
   sql_dir      => $sql_dir,
   _confirm    => 1,
   connect_info  => \@connect_info,
@@ -122,7 +123,7 @@
   }
   foreach my $file ($dir->children) {
     # skip any hidden files
-    next if ($file =~ /^\./); 
+    next if ($file =~ /^\./);
     unlink $file;
   }
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/admin/10script.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/admin/10script.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/admin/10script.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -17,9 +17,13 @@
 
 my @json_backends = qw/XS JSON DWIW/;
 my $tests_per_run = 5;
+plan tests => ($tests_per_run * @json_backends) + 1;
 
-plan tests => $tests_per_run * @json_backends;
 
+# test the script is setting @INC properly
+test_exec (qw| -It/lib/testinclude --schema=DBICTestAdminInc --insert --connect=[] |);
+cmp_ok ( $? >> 8, '==', 70, 'Correct exit code from connecting a custom INC schema' );
+
 for my $js (@json_backends) {
 
     eval {JSON::Any->import ($js) };
@@ -37,22 +41,22 @@
 
     my $employees = $schema->resultset('Employee');
 
-    system( _prepare_system_args( qw|--op=insert --set={"name":"Matt"}| ) );
+    test_exec( default_args(), qw|--op=insert --set={"name":"Matt"}| );
     ok( ($employees->count()==1), "$ENV{JSON_ANY_ORDER}: insert count" );
 
     my $employee = $employees->find(1);
     ok( ($employee->name() eq 'Matt'), "$ENV{JSON_ANY_ORDER}: insert valid" );
 
-    system( _prepare_system_args( qw|--op=update --set={"name":"Trout"}| ) );
+    test_exec( default_args(), qw|--op=update --set={"name":"Trout"}| );
     $employee = $employees->find(1);
     ok( ($employee->name() eq 'Trout'), "$ENV{JSON_ANY_ORDER}: update" );
 
-    system( _prepare_system_args( qw|--op=insert --set={"name":"Aran"}| ) );
+    test_exec( default_args(), qw|--op=insert --set={"name":"Aran"}| );
 
     SKIP: {
         skip ("MSWin32 doesn't support -| either", 1) if $^O eq 'MSWin32';
 
-        open(my $fh, "-|",  _prepare_system_args( qw|--op=select --attrs={"order_by":"name"}| ) ) or die $!;
+        open(my $fh, "-|",  ( 'script/dbicadmin', default_args(), qw|--op=select --attrs={"order_by":"name"}| ) ) or die $!;
         my $data = do { local $/; <$fh> };
         close($fh);
         if (!ok( ($data=~/Aran.*Trout/s), "$ENV{JSON_ANY_ORDER}: select with attrs" )) {
@@ -60,32 +64,35 @@
         };
     }
 
-    system( _prepare_system_args( qw|--op=delete --where={"name":"Trout"}| ) );
+    test_exec( default_args(), qw|--op=delete --where={"name":"Trout"}| );
     ok( ($employees->count()==1), "$ENV{JSON_ANY_ORDER}: delete" );
 }
 
+sub default_args {
+  return (
+    qw|--quiet --schema=DBICTest::Schema --class=Employee|,
+    q|--connect=["dbi:SQLite:dbname=t/var/DBIxClass.db","","",{"AutoCommit":1}]|,
+    qw|--force -I testincludenoniterference|,
+  );
+}
+
 # Why do we need this crap? Apparently MSWin32 can not pass through quotes properly
 # (sometimes it will and sometimes not, depending on what compiler was used to build
 # perl). So we go the extra mile to escape all the quotes. We can't also use ' instead
 # of ", because JSON::XS (proudly) does not support "malformed JSON" as the author
 # calls it. Bleh.
 #
-sub _prepare_system_args {
-    my $perl = $^X;
+sub test_exec {
+  my $perl = $^X;
 
-    my @args = (
-        qw|script/dbicadmin --quiet --schema=DBICTest::Schema --class=Employee|,
-        q|--connect=["dbi:SQLite:dbname=t/var/DBIxClass.db","","",{"AutoCommit":1}]|,
-        qw|--force|,
-        @_,
-    );
+  my @args = ('script/dbicadmin', @_);
 
-    if ( $^O eq 'MSWin32' ) {
-        $perl = qq|"$perl"|;    # execution will fail if $^X contains paths
-        for (@args) {
-            $_ =~ s/"/\\"/g;
-        }
+  if ( $^O eq 'MSWin32' ) {
+    $perl = qq|"$perl"|;    # execution will fail if $^X contains paths
+    for (@args) {
+      $_ =~ s/"/\\"/g;
     }
+  }
 
-    return ($perl, @args);
+  system ($perl, @args);
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/bind/order_by.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/bind/order_by.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/bind/order_by.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,6 +3,7 @@
 
 use Test::More;
 use Test::Exception;
+use Data::Dumper::Concise;
 use lib qw(t/lib);
 use DBICTest;
 use DBIC::SqlMakerTest;
@@ -25,7 +26,7 @@
             {
                 order_by => $args->{order_by},
                 having =>
-                  [ { read_count => { '>' => 5 } }, \[ 'read_count < ?', 8 ] ]
+                  [ { read_count => { '>' => 5 } }, \[ 'read_count < ?', [ read_count => 8  ] ] ]
             }
           )->as_query,
         "(
@@ -38,14 +39,13 @@
         [
             [qw(foo bar)],
             [qw(read_count 5)],
-            8,
+            [qw(read_count 8)],
             $args->{bind}
               ? @{ $args->{bind} }
               : ()
         ],
-      );
+      ) || diag Dumper $args->{order_by};
     };
-    fail('Fail the unfinished is_same_sql_bind') if $@;
   }
 }
 
@@ -61,46 +61,42 @@
         bind      => [],
     },
     {
-        order_by  => { -desc => \[ 'colA LIKE ?', 'test' ] },
+        order_by  => { -desc => \[ 'colA LIKE ?', [ colA => 'test' ] ] },
         order_req => 'colA LIKE ? DESC',
-        bind      => [qw(test)],
+        bind      => [ [ colA => 'test' ] ],
     },
     {
-        order_by  => \[ 'colA LIKE ? DESC', 'test' ],
+        order_by  => \[ 'colA LIKE ? DESC', [ colA => 'test' ] ],
         order_req => 'colA LIKE ? DESC',
-        bind      => [qw(test)],
+        bind      => [ [ colA => 'test' ] ],
     },
     {
         order_by => [
             { -asc  => \['colA'] },
-            { -desc => \[ 'colB LIKE ?', 'test' ] },
-            { -asc  => \[ 'colC LIKE ?', 'tost' ] }
+            { -desc => \[ 'colB LIKE ?', [ colB => 'test' ] ] },
+            { -asc  => \[ 'colC LIKE ?', [ colC => 'tost' ] ] },
         ],
         order_req => 'colA ASC, colB LIKE ? DESC, colC LIKE ? ASC',
-        bind      => [qw(test tost)],
+        bind      => [ [ colB => 'test' ], [ colC => 'tost' ] ],
     },
-
-    # (mo) this would be really really nice!
-    # (ribasushi) I don't think so, not writing it - patches welcome
     {
+        todo => 1,
         order_by => [
             { -asc  => 'colA' },
             { -desc => { colB => { 'LIKE' => 'test' } } },
             { -asc  => { colC => { 'LIKE' => 'tost' } } }
         ],
         order_req => 'colA ASC, colB LIKE ? DESC, colC LIKE ? ASC',
-        bind      => [ [ colB => 'test' ], [ colC => 'tost' ] ],      # ???
-        todo => 1,
+        bind      => [ [ colB => 'test' ], [ colC => 'tost' ] ],
     },
     {
+        todo => 1,
         order_by  => { -desc => { colA  => { LIKE  => 'test' } } },
         order_req => 'colA LIKE ? DESC',
-        bind      => [qw(test)],
-        todo => 1,
+        bind      => [ [ colA => 'test' ] ],
     },
 );
 
-plan( tests => scalar @tests * 2 );
-
 test_order($_) for @tests;
 
+done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/cdbi/02-Film.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/cdbi/02-Film.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/cdbi/02-Film.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,5 +1,7 @@
 use strict;
 use Test::More;
+use Scalar::Util 'refaddr';
+use namespace::clean;
 $| = 1;
 
 BEGIN {
@@ -383,21 +385,21 @@
   # my bad taste is your bad taste
   my $btaste  = Film->retrieve('Bad Taste');
   my $btaste2 = Film->retrieve('Bad Taste');
-  is Scalar::Util::refaddr($btaste), Scalar::Util::refaddr($btaste2),
+  is refaddr $btaste, refaddr $btaste2,
     "Retrieving twice gives ref to same object";
 
   my ($btaste5) = Film->search(title=>'Bad Taste');
-  is Scalar::Util::refaddr($btaste), Scalar::Util::refaddr($btaste5),
+  is refaddr $btaste, refaddr $btaste5,
     "Searching also gives ref to same object";
 
   $btaste2->remove_from_object_index;
   my $btaste3 = Film->retrieve('Bad Taste');
-  isnt Scalar::Util::refaddr($btaste2), Scalar::Util::refaddr($btaste3),
+  isnt refaddr $btaste2, refaddr $btaste3,
     "Removing from object_index and retrieving again gives new object";
 
   $btaste3->clear_object_index;
   my $btaste4 = Film->retrieve('Bad Taste');
-  isnt Scalar::Util::refaddr($btaste2), Scalar::Util::refaddr($btaste4),
+  isnt refaddr $btaste2, refaddr $btaste4,
     "Clearing cache and retrieving again gives new object";
  
   $btaste=Film->insert({
@@ -407,7 +409,7 @@
     NumExplodingSheep => 2,
   });
   $btaste2 = Film->retrieve('Bad Taste 2');
-  is Scalar::Util::refaddr($btaste), Scalar::Util::refaddr($btaste2),
+  is refaddr $btaste, refaddr $btaste2,
     "Creating and retrieving gives ref to same object";
  
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/cdbi/columns_as_hashes.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/cdbi/columns_as_hashes.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/cdbi/columns_as_hashes.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -26,7 +26,7 @@
         my $rating = $waves->{rating};
         $waves->Rating("PG");
         is $rating, "R", 'evaluation of column value is not deferred';
-    } qr{^Column 'rating' of 'Film/$waves' was fetched as a hash at \Q$0};
+    } qr{^Column 'rating' of 'Film/$waves' was fetched as a hash at\b};
 
     warnings_like {
         is $waves->{title}, $waves->Title, "columns can be accessed as hashes";

Modified: DBIx-Class/0.08/branches/extended_rels/t/count/count_rs.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/count/count_rs.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/count/count_rs.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -54,7 +54,7 @@
           JOIN cd disc ON disc.cdid = tracks.cd
         WHERE ( ( position = ? OR position = ? ) )
         LIMIT 3 OFFSET 8
-       ) count_subq
+       ) tracks
     )',
     [ [ position => 1 ], [ position => 2 ] ],
     'count_rs db-side limit applied',
@@ -88,7 +88,7 @@
           JOIN artist artist ON artist.artistid = cds.artist
         WHERE tracks.position = ? OR tracks.position = ?
         GROUP BY cds.cdid
-      ) count_subq
+      ) cds
     ',
     [ qw/'1' '2'/ ],
     'count softlimit applied',
@@ -109,7 +109,7 @@
         WHERE tracks.position = ? OR tracks.position = ?
         GROUP BY cds.cdid
         LIMIT 3 OFFSET 4
-      ) count_subq
+      ) cds
     )',
     [ [ 'tracks.position' => 1 ], [ 'tracks.position' => 2 ] ],
     'count_rs db-side limit applied',

Modified: DBIx-Class/0.08/branches/extended_rels/t/count/prefetch.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/count/prefetch.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/count/prefetch.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -31,7 +31,7 @@
             JOIN artist artist ON artist.artistid = cds.artist
           WHERE tracks.position = ? OR tracks.position = ?
           GROUP BY cds.cdid
-        ) count_subq
+        ) cds
     )',
     [ map { [ 'tracks.position' => $_ ] } (1, 2) ],
   );
@@ -63,7 +63,7 @@
           WHERE ( genre.name = ? )
           GROUP BY genre.genreid
         )
-      count_subq
+      genre
     )',
     [ [ 'genre.name' => 'emo' ] ],
   );

Modified: DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_firebird.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_firebird.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_firebird.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -7,8 +7,6 @@
 use DBICTest;
 use Scope::Guard ();
 
-# XXX we're only testing TIMESTAMP here
-
 my ($dsn, $user, $pass)    = @ENV{map { "DBICTEST_FIREBIRD_${_}" }      qw/DSN USER PASS/};
 my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_FIREBIRD_ODBC_${_}" } qw/DSN USER PASS/};
 

Added: DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_informix.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_informix.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_informix.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,78 @@
+use strict;
+use warnings;  
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use Scope::Guard ();
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_INFORMIX_${_}" } qw/DSN USER PASS/};
+
+if (not $dsn) {
+  plan skip_all => <<'EOF';
+Set $ENV{DBICTEST_INFORMIX_DSN} _USER and _PASS to run this test'.
+Warning: This test drops and creates a table called 'event'";
+EOF
+} else {
+  eval "use DateTime; use DateTime::Format::Strptime;";
+  if ($@) {
+    plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing';
+  }
+}
+
+my $schema;
+
+{
+  $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+    on_connect_call => [ 'datetime_setup' ],
+  });
+
+  my $sg = Scope::Guard->new(\&cleanup);
+
+  eval { $schema->storage->dbh->do('DROP TABLE event') };
+  $schema->storage->dbh->do(<<'SQL');
+  CREATE TABLE event (
+    id INT PRIMARY KEY,
+    starts_at DATE,
+    created_on DATETIME YEAR TO FRACTION(5)
+  );
+SQL
+  my $rs = $schema->resultset('Event');
+
+  my $dt = DateTime->now;
+  $dt->set_nanosecond(555640000);
+
+  my $date_only = DateTime->new(
+    year => $dt->year, month => $dt->month, day => $dt->day
+  );
+
+  my $row;
+  ok( $row = $rs->create({
+    id => 1,
+    starts_at => $date_only, 
+    created_on => $dt,
+  }));
+  ok( $row = $rs->search({ id => 1 }, { select => [qw/starts_at created_on/] })
+    ->first
+  );
+  is $row->created_on, $dt, 'TIMESTAMP as DateTime roundtrip';
+
+  cmp_ok $row->created_on->nanosecond, '==', $dt->nanosecond,
+    'fractional part of a second survived';
+
+  is $row->starts_at, $date_only, 'DATE as DateTime roundtrip';
+}
+
+done_testing;
+
+# clean up our mess
+sub cleanup {
+  my $dbh; 
+  eval {
+    $dbh = $schema->storage->dbh;
+  };
+  return unless $dbh;
+
+  eval { $dbh->do(qq{DROP TABLE $_}) } for qw/event/;
+}

Modified: DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_mssql.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_mssql.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_mssql.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,61 +1,91 @@
 use strict;
-use warnings;  
+use warnings;
 
 use Test::More;
 use Test::Exception;
+use Scope::Guard ();
 use lib qw(t/lib);
 use DBICTest;
 
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
+# use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else
+BEGIN {
+  if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
+    unshift @INC, $_ for split /:/, $lib_dirs;
+  }
+}
 
-if (not ($dsn && $user)) {
+my ($dsn, $user, $pass)    = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_MSSQL_${_}" }      qw/DSN USER PASS/};
+
+if (not ($dsn || $dsn2)) {
   plan skip_all =>
-    'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test' .
+    'Set $ENV{DBICTEST_MSSQL_ODBC_DSN} and/or $ENV{DBICTEST_MSSQL_DSN} _USER '
+    .'and _PASS to run this test' .
     "\nWarning: This test drops and creates a table called 'track'";
 } else {
   eval "use DateTime; use DateTime::Format::Strptime;";
   if ($@) {
     plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing';
   }
-  else {
-    plan tests => 4 * 2; # (tests * dt_types)
-  }
 }
 
-my $schema = DBICTest::Schema->clone;
+my @connect_info = (
+  [ $dsn,  $user,  $pass ],
+  [ $dsn2, $user2, $pass2 ],
+);
 
-$schema->connection($dsn, $user, $pass);
-$schema->storage->ensure_connected;
+my $schema;
 
+SKIP:
+for my $connect_info (@connect_info) {
+  my ($dsn, $user, $pass) = @$connect_info;
+
+  next unless $dsn;
+
+  $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+    on_connect_call => 'datetime_setup'
+  });
+
+  {
+    my $w;
+    local $SIG{__WARN__} = sub { $w = shift };
+    $schema->storage->ensure_connected;
+    if ($w =~ /Your DBD::Sybase is too old to support DBIx::Class::InflateColumn::DateTime/) {
+      skip "Skipping tests on old DBD::Sybase " . DBD::Sybase->VERSION, 1;
+    }
+  }
+
+  my $guard = Scope::Guard->new(\&cleanup);
+
 # coltype, column, datehash
-my @dt_types = (
-  ['DATETIME',
-   'last_updated_at',
-   {
-    year => 2004,
-    month => 8,
-    day => 21,
-    hour => 14,
-    minute => 36,
-    second => 48,
-    nanosecond => 500000000,
-  }],
-  ['SMALLDATETIME', # minute precision
-   'small_dt',
-   {
-    year => 2004,
-    month => 8,
-    day => 21,
-    hour => 14,
-    minute => 36,
-  }],
-);
+  my @dt_types = (
+    ['DATETIME',
+     'last_updated_at',
+     {
+      year => 2004,
+      month => 8,
+      day => 21,
+      hour => 14,
+      minute => 36,
+      second => 48,
+      nanosecond => 500000000,
+    }],
+    ['SMALLDATETIME', # minute precision
+     'small_dt',
+     {
+      year => 2004,
+      month => 8,
+      day => 21,
+      hour => 14,
+      minute => 36,
+    }],
+  );
 
-for my $dt_type (@dt_types) {
-  my ($type, $col, $sample_dt) = @$dt_type;
+  for my $dt_type (@dt_types) {
+    my ($type, $col, $sample_dt) = @$dt_type;
 
-  eval { $schema->storage->dbh->do("DROP TABLE track") };
-  $schema->storage->dbh->do(<<"SQL");
+    eval { $schema->storage->dbh->do("DROP TABLE track") };
+    $schema->storage->dbh->do(<<"SQL");
 CREATE TABLE track (
  trackid INT IDENTITY PRIMARY KEY,
  cd INT,
@@ -63,23 +93,30 @@
  $col $type,
 )
 SQL
-  ok(my $dt = DateTime->new($sample_dt));
+    ok(my $dt = DateTime->new($sample_dt));
 
-  my $row;
-  ok( $row = $schema->resultset('Track')->create({
-        $col => $dt,
-        cd => 1,
-      }));
-  ok( $row = $schema->resultset('Track')
-    ->search({ trackid => $row->trackid }, { select => [$col] })
-    ->first
-  );
-  is( $row->$col, $dt, 'DateTime roundtrip' );
+    my $row;
+    ok( $row = $schema->resultset('Track')->create({
+          $col => $dt,
+          cd => 1,
+        }));
+    ok( $row = $schema->resultset('Track')
+      ->search({ trackid => $row->trackid }, { select => [$col] })
+      ->first
+    );
+    is( $row->$col, $dt, "$type roundtrip" );
+
+    cmp_ok( $row->$col->nanosecond, '==', $sample_dt->{nanosecond},
+      'DateTime fractional portion roundtrip' )
+      if exists $sample_dt->{nanosecond};
+  }
 }
 
+done_testing;
+
 # clean up our mess
-END {
-  if (my $dbh = eval { $schema->storage->_dbh }) {
+sub cleanup {
+  if (my $dbh = eval { $schema->storage->dbh }) {
     $dbh->do('DROP TABLE track');
   }
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -70,7 +70,11 @@
       ->search({ trackid => $row->trackid }, { select => [$col] })
       ->first
     );
-    is( $row->$col, $dt, 'DateTime roundtrip' );
+    is( $row->$col, $dt, "$type roundtrip" );
+
+    is( $row->$col->nanosecond, $dt->nanosecond,
+      'fractional DateTime portion roundtrip' )
+      if $dt->nanosecond > 0;
   }
 
   # test a computed datetime column

Modified: DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase_asa.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase_asa.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/inflate/datetime_sybase_asa.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -3,6 +3,7 @@
 
 use Test::More;
 use Test::Exception;
+use Scope::Guard ();
 use lib qw(t/lib);
 use DBICTest;
 
@@ -27,20 +28,20 @@
   [ $dsn2, $user2, $pass2 ],
 );
 
-my @handles_to_clean;
+my $schema;
 
 foreach my $info (@info) {
   my ($dsn, $user, $pass) = @$info;
 
   next unless $dsn;
 
-  my $schema = DBICTest::Schema->clone;
+  $schema = DBICTest::Schema->clone;
 
   $schema->connection($dsn, $user, $pass, {
     on_connect_call => [ 'datetime_setup' ],
   });
 
-  push @handles_to_clean, $schema->storage->dbh;
+  my $sg = Scope::Guard->new(\&cleanup); 
 
 # coltype, col, date
   my @dt_types = (
@@ -72,7 +73,7 @@
       ->search({ trackid => $row->trackid }, { select => [$col] })
       ->first
     );
-    is( $row->$col, $dt, 'DateTime roundtrip' );
+    is( $row->$col, $dt, "$type roundtrip" );
 
     is $row->$col->nanosecond, $dt->nanosecond,
         'nanoseconds survived' if 0+$dt->nanosecond;
@@ -82,8 +83,8 @@
 done_testing;
 
 # clean up our mess
-END {
-  foreach my $dbh (@handles_to_clean) {
+sub cleanup {
+  if (my $dbh = $schema->storage->dbh) {
     eval { $dbh->do("DROP TABLE $_") } for qw/track/;
   }
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/inflate/hri.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/inflate/hri.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/inflate/hri.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -2,6 +2,7 @@
 use warnings;
 
 use Test::More;
+use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
 my $schema = DBICTest->init_schema();
@@ -9,26 +10,64 @@
 # Under some versions of SQLite if the $rs is left hanging around it will lock
 # So we create a scope here cos I'm lazy
 {
-    my $rs = $schema->resultset('CD')->search ({}, { order_by => 'cdid' });
+    my $rs = $schema->resultset('CD')->search ({}, {
+        order_by => 'cdid',
+    });
 
-    # get the defined columns
-    my @dbic_cols = sort $rs->result_source->columns;
+    my $orig_resclass = $rs->result_class;
+    eval "package DBICTest::CDSubclass; use base '$orig_resclass'";
 
-    # use the hashref inflator class as result class
-    $rs->result_class('DBIx::Class::ResultClass::HashRefInflator');
+# override on a specific $rs object, should not chain
+    $rs->result_class ('DBICTest::CDSubclass');
 
-    # fetch first record
-    my $datahashref1 = $rs->first;
+    my $cd = $rs->find ({cdid => 1});
+    is (ref $cd, 'DBICTest::CDSubclass', 'result_class override propagates to find');
 
-    my @hashref_cols = sort keys %$datahashref1;
+    $cd = $rs->search({ cdid => 1 })->single;
+    is (ref $cd, $orig_resclass, 'result_class override does not propagate over seach+single');
 
-    is_deeply( \@dbic_cols, \@hashref_cols, 'returned columns' );
+    $cd = $rs->search()->find ({ cdid => 1 });
+    is (ref $cd, $orig_resclass, 'result_class override does not propagate over seach+find');
 
-    my $cd1 = $rs->find ({cdid => 1});
-    is_deeply ( $cd1, $datahashref1, 'first/find return the same thing');
+# set as attr - should propagate
+    my $hri_rs = $rs->search ({}, { result_class => 'DBIx::Class::ResultClass::HashRefInflator' });
+    is ($rs->result_class, 'DBICTest::CDSubclass', 'original class unchanged');
+    is ($hri_rs->result_class, 'DBIx::Class::ResultClass::HashRefInflator', 'result_class accessor pre-set via attribute');
 
-    my $cd2 = $rs->search({ cdid => 1 })->single;
-    is_deeply ( $cd2, $datahashref1, 'first/search+single return the same thing');
+
+    my $datahashref1 = $hri_rs->next;
+    is_deeply(
+      [ sort keys %$datahashref1 ],
+      [ sort $rs->result_source->columns ],
+      'returned correct columns',
+    );
+
+    $cd = $hri_rs->find ({cdid => 1});
+    is_deeply ( $cd, $datahashref1, 'first/find return the same thing (result_class attr propagates)');
+
+    $cd = $hri_rs->search({ cdid => 1 })->single;
+    is_deeply ( $cd, $datahashref1, 'first/search+single return the same thing (result_class attr propagates)');
+
+    $hri_rs->result_class ('DBIx::Class::Row'); # something bogus
+    is(
+        $hri_rs->search->result_class, 'DBIx::Class::ResultClass::HashRefInflator',
+        'result_class set using accessor does not propagate over unused search'
+    );
+
+# test result class auto-loading
+    throws_ok (
+      sub { $rs->result_class ('nonexsitant_bogus_class') },
+      qr/Can't locate nonexsitant_bogus_class.pm/,
+      'Attempt to load on accessor override',
+    );
+    is ($rs->result_class, 'DBICTest::CDSubclass', 'class unchanged');
+
+    throws_ok (
+      sub { $rs->search ({}, { result_class => 'nonexsitant_bogus_class' }) },
+      qr/Can't locate nonexsitant_bogus_class.pm/,
+      'Attempt to load on accessor override',
+    );
+    is ($rs->result_class, 'DBICTest::CDSubclass', 'class unchanged');
 }
 
 sub check_cols_of {

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/DBIC/DebugObj.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBIC/DebugObj.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBIC/DebugObj.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -41,7 +41,7 @@
 
 sub query_end { }
 
-sub txn_start { }
+sub txn_begin { }
 
 sub txn_commit { }
 

Added: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/Result/D.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/Result/D.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/Result/D.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,5 @@
+package DBICNSTest::Result::D;
+use base qw/DBIx::Class::Core/;
+__PACKAGE__->table('d');
+__PACKAGE__->add_columns('d');
+1;

Added: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/ResultSet/D.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/ResultSet/D.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICNSTest/ResultSet/D.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,2 @@
+package DBICNSTest::ResultSet::D;
+1;

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/AuthorCheck.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/AuthorCheck.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/AuthorCheck.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -31,7 +31,7 @@
 
   # not using file->stat as it invokes File::stat which in turn breaks stat(_)
   my ($mf_pl_mtime, $mf_mtime, $optdeps_mtime) = ( map
-    { (stat ($root->file ($_)) )[9] }
+    { (stat ($root->file ($_)) )[9] || undef }  # stat returns () on nonexistent files
     (qw|Makefile.PL  Makefile|, $optdeps)
   );
 
@@ -43,17 +43,18 @@
     push @fail_reasons, "Missing ./inc directory";
   }
 
-  if (not $mf_mtime) {
+  if(not $mf_mtime) {
     push @fail_reasons, "Missing ./Makefile";
   }
-  elsif($mf_mtime < $mf_pl_mtime) {
-    push @fail_reasons, "./Makefile.PL is newer than ./Makefile";
+  else {
+    if($mf_mtime < $mf_pl_mtime) {
+      push @fail_reasons, "./Makefile.PL is newer than ./Makefile";
+    }
+    if($mf_mtime < $optdeps_mtime) {
+      push @fail_reasons, "./$optdeps is newer than ./Makefile";
+    }
   }
 
-  if ($mf_mtime < $optdeps_mtime) {
-    push @fail_reasons, "./$optdeps is newer than ./Makefile";
-  }
-
   if (@fail_reasons) {
     print STDERR <<'EOE';
 
@@ -65,10 +66,16 @@
 We have a number of reasons to believe that this is a development
 checkout and that you, the user, did not run `perl Makefile.PL`
 before using this code. You absolutely _must_ perform this step,
-and ensure you have all required dependencies present. Not doing
+to ensure you have all required dependencies present. Not doing
 so often results in a lot of wasted time for other contributors
 trying to assit you with spurious "its broken!" problems.
 
+By default DBICs Makefile.PL turns all optional dependenciess into
+*HARD REQUIREMENTS*, in order to make sure that the entire test
+suite is executed, and no tests are skipped due to missing modules.
+If you for some reason need to disable this behavior - supply the
+--skip_author_deps option when running perl Makefile.PL
+
 If you are seeing this message unexpectedly (i.e. you are in fact
 attempting a regular installation be it through CPAN or manually),
 please report the situation to either the mailing list or to the

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/BooksInLibrary.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/BooksInLibrary.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/BooksInLibrary.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -27,6 +27,8 @@
 );
 __PACKAGE__->set_primary_key('id');
 
+__PACKAGE__->add_unique_constraint (['title']);
+
 __PACKAGE__->resultset_attributes({where => { source => "Library" } });
 
 __PACKAGE__->belongs_to ( owner => 'DBICTest::Schema::Owners', 'owner' );


Property changes on: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/EventTZPg.pm
___________________________________________________________________
Deleted: svn:mergeinfo
   - 

Added: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/TimestampPrimaryKey.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/TimestampPrimaryKey.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema/TimestampPrimaryKey.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,17 @@
+package # hide from PAUSE 
+    DBICTest::Schema::TimestampPrimaryKey;
+
+use base qw/DBICTest::BaseResult/;
+
+__PACKAGE__->table('timestamp_primary_key_test');
+
+__PACKAGE__->add_columns(
+  'id' => {
+    data_type => 'timestamp',
+    default_value => \'current_timestamp',
+  },
+);
+
+__PACKAGE__->set_primary_key('id');
+
+1;

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICTest/Schema.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -22,6 +22,7 @@
   Year1999CDs
   CustomSql
   Money
+  TimestampPrimaryKey
   /,
   { 'DBICTest::Schema' => [qw/
     LinerNotes

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v1.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v1.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v1.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -36,12 +36,8 @@
 
 __PACKAGE__->register_class('Table', 'DBICVersion::Table');
 __PACKAGE__->load_components('+DBIx::Class::Schema::Versioned');
+__PACKAGE__->upgrade_directory('t/var/');
 
-sub upgrade_directory
-{
-    return 't/var/';
-}
-
 sub ordered_schema_versions {
   return('1.0','2.0','3.0');
 }

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v2.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v2.pm	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v2.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -47,9 +47,4 @@
 __PACKAGE__->upgrade_directory('t/var/');
 __PACKAGE__->backup_directory('t/var/backup/');
 
-#sub upgrade_directory
-#{
-#    return 't/var/';
-#}
-
 1;


Property changes on: DBIx-Class/0.08/branches/extended_rels/t/lib/DBICVersion_v3.pm
___________________________________________________________________
Deleted: svn:keywords
   - "Author Date Id Revision Url"

Modified: DBIx-Class/0.08/branches/extended_rels/t/lib/sqlite.sql
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/sqlite.sql	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/sqlite.sql	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,6 +1,6 @@
 -- 
 -- Created by SQL::Translator::Producer::SQLite
--- Created on Sat Mar  6 18:04:27 2010
+-- Created on Mon Mar 22 11:08:33 2010
 -- 
 ;
 
@@ -169,6 +169,14 @@
 );
 
 --
+-- Table: timestamp_primary_key_test
+--
+CREATE TABLE timestamp_primary_key_test (
+  id timestamp NOT NULL DEFAULT current_timestamp,
+  PRIMARY KEY (id)
+);
+
+--
 -- Table: treelike
 --
 CREATE TABLE treelike (
@@ -448,4 +456,4 @@
 -- View: year2000cds
 --
 CREATE VIEW year2000cds AS
-    SELECT cdid, artist, title, year, genreid, single_track FROM cd WHERE year = "2000"
+    SELECT cdid, artist, title, year, genreid, single_track FROM cd WHERE year = "2000"
\ No newline at end of file

Added: DBIx-Class/0.08/branches/extended_rels/t/lib/testinclude/DBICTestAdminInc.pm
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/lib/testinclude/DBICTestAdminInc.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/lib/testinclude/DBICTestAdminInc.pm	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,6 @@
+package DBICTestAdminInc;
+use base 'DBIx::Class::Schema';
+
+sub connect { exit 70 } # this is what the test will expect to see
+
+1;


Property changes on: DBIx-Class/0.08/branches/extended_rels/t/ordered/cascade_delete.t
___________________________________________________________________
Deleted: svn:mergeinfo
   - 

Modified: DBIx-Class/0.08/branches/extended_rels/t/prefetch/grouped.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/prefetch/grouped.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/prefetch/grouped.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -76,7 +76,7 @@
           WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
           GROUP BY me.cd
         )
-      count_subq
+      me
     )',
     [ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
     'count() query generated expected SQL',
@@ -151,7 +151,7 @@
           WHERE ( me.cdid IS NOT NULL )
           GROUP BY me.cdid
           LIMIT 2
-        ) count_subq
+        ) me
     )',
     [],
     'count() query generated expected SQL',
@@ -262,7 +262,7 @@
           WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
           GROUP BY SUBSTR(me.cd, 1, 1)
         )
-      count_subq
+      me
     )',
     [ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
     'count() query generated expected SQL',

Modified: DBIx-Class/0.08/branches/extended_rels/t/prefetch/via_search_related.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/prefetch/via_search_related.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/prefetch/via_search_related.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -37,7 +37,6 @@
 
 }, 'search_related prefetch with order_by works');
 
-TODO: { local $TODO = 'Unqualified columns in where clauses can not be fixed without an SQLA rewrite' if SQL::Abstract->VERSION < 2;
 lives_ok ( sub {
   my $no_prefetch = $schema->resultset('Track')->search_related(cd =>
     {
@@ -65,9 +64,7 @@
   is($use_prefetch->count, $no_prefetch->count, 'counts with and without prefetch match');
 
 }, 'search_related prefetch with condition referencing unqualified column of a joined table works');
-}
 
-
 lives_ok (sub {
     my $rs = $schema->resultset("Artwork")->search(undef, {distinct => 1})
               ->search_related('artwork_to_artist')->search_related('artist',

Modified: DBIx-Class/0.08/branches/extended_rels/t/resultset/as_subselect_rs.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/resultset/as_subselect_rs.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/resultset/as_subselect_rs.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -22,4 +22,21 @@
    '... and chaining off the virtual view works';
 dies_ok  { $new_rs->as_subselect_rs->search({'artwork_to_artist.artwork_cd_id'=> 1})->count }
    q{... but chaining off of a virtual view using join doesn't work};
+
+my $book_rs = $schema->resultset ('BooksInLibrary')->search ({}, { join => 'owner' });
+
+is_same_sql_bind (
+  $book_rs->as_subselect_rs->as_query,
+  '(SELECT me.id, me.source, me.owner, me.title, me.price 
+      FROM (
+        SELECT me.id, me.source, me.owner, me.title, me.price
+          FROM books me
+          JOIN owners owner ON owner.id = me.owner
+        WHERE ( source = ? )
+      ) me
+  )',
+  [ [ source => 'Library' ] ],
+  'Resultset-class attributes do not seep outside of the subselect',
+);
+
 done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/resultset/update_delete.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/resultset/update_delete.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/resultset/update_delete.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -5,10 +5,9 @@
 use Test::More;
 use Test::Exception;
 use DBICTest;
+use DBIC::DebugObj;
+use DBIC::SqlMakerTest;
 
-#plan tests => 5;
-plan 'no_plan';
-
 my $schema = DBICTest->init_schema();
 
 my $tkfks = $schema->resultset('FourKeys_to_TwoKeys');
@@ -108,5 +107,36 @@
 );
 
 $sub_rs->delete;
+is ($tkfks->count, $tkfk_cnt -= 2, 'Only two rows deleted');
 
-is ($tkfks->count, $tkfk_cnt -= 2, 'Only two rows deleted');
+# make sure limit-only deletion works
+cmp_ok ($tkfk_cnt, '>', 1, 'More than 1 row left');
+$tkfks->search ({}, { rows => 1 })->delete;
+is ($tkfks->count, $tkfk_cnt -= 1, 'Only one row deleted');
+
+
+# Make sure prefetch is properly stripped too
+# check with sql-equality, as sqlite will accept bad sql just fine
+my ($sql, @bind);
+my $orig_debugobj = $schema->storage->debugobj;
+my $orig_debug = $schema->storage->debug;
+
+$schema->storage->debugobj (DBIC::DebugObj->new (\$sql, \@bind) );
+$schema->storage->debug (1);
+$schema->resultset('CD')->search(
+  { year => { '!=' => 2010 } },
+  { prefetch => 'liner_notes' },
+)->delete;
+
+$schema->storage->debugobj ($orig_debugobj);
+$schema->storage->debug ($orig_debug);
+
+is_same_sql_bind (
+  $sql,
+  \@bind,
+  'DELETE FROM cd WHERE ( cdid IN ( SELECT me.cdid FROM cd me WHERE ( year != ? ) GROUP BY me.cdid ) )',
+  ["'2010'"],
+  'Update on prefetching resultset strips prefetch correctly'
+);
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/row/filter_column.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/row/filter_column.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/row/filter_column.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,142 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+my $from_storage_ran = 0;
+my $to_storage_ran = 0;
+my $schema = DBICTest->init_schema();
+DBICTest::Schema::Artist->load_components(qw(FilterColumn InflateColumn));
+DBICTest::Schema::Artist->filter_column(rank => {
+  filter_from_storage => sub { $from_storage_ran++; $_[1] * 2 },
+  filter_to_storage   => sub { $to_storage_ran++; $_[1] / 2 },
+});
+Class::C3->reinitialize();
+
+my $artist = $schema->resultset('Artist')->create( { rank => 20 } );
+
+# this should be using the cursor directly, no inflation/processing of any sort
+my ($raw_db_rank) = $schema->resultset('Artist')
+                             ->search ($artist->ident_condition)
+                               ->get_column('rank')
+                                ->_resultset
+                                 ->cursor
+                                  ->next;
+
+is ($raw_db_rank, 10, 'INSERT: correctly unfiltered on insertion');
+
+for my $reloaded (0, 1) {
+  my $test = $reloaded ? 'reloaded' : 'stored';
+  $artist->discard_changes if $reloaded;
+
+  is( $artist->rank , 20, "got $test filtered rank" );
+}
+
+$artist->update;
+$artist->discard_changes;
+is( $artist->rank , 20, "got filtered rank" );
+
+$artist->update ({ rank => 40 });
+($raw_db_rank) = $schema->resultset('Artist')
+                             ->search ($artist->ident_condition)
+                               ->get_column('rank')
+                                ->_resultset
+                                 ->cursor
+                                  ->next;
+is ($raw_db_rank, 20, 'UPDATE: correctly unflitered on update');
+
+$artist->discard_changes;
+$artist->rank(40);
+ok( !$artist->is_column_changed('rank'), 'column is not dirty after setting the same value' );
+
+MC: {
+   my $cd = $schema->resultset('CD')->create({
+      artist => { rank => 20 },
+      title => 'fun time city!',
+      year => 'forevertime',
+   });
+   ($raw_db_rank) = $schema->resultset('Artist')
+                                ->search ($cd->artist->ident_condition)
+                                  ->get_column('rank')
+                                   ->_resultset
+                                    ->cursor
+                                     ->next;
+
+   is $raw_db_rank, 10, 'artist rank gets correctly unfiltered w/ MC';
+   is $cd->artist->rank, 20, 'artist rank gets correctly filtered w/ MC';
+}
+
+CACHE_TEST: {
+  my $expected_from = $from_storage_ran;
+  my $expected_to   = $to_storage_ran;
+
+  # ensure we are creating a fresh obj
+  $artist = $schema->resultset('Artist')->single($artist->ident_condition);
+
+  is $from_storage_ran, $expected_from, 'from has not run yet';
+  is $to_storage_ran, $expected_to, 'to has not run yet';
+
+  $artist->rank;
+  cmp_ok (
+    $artist->get_filtered_column('rank'),
+      '!=',
+    $artist->get_column('rank'),
+    'filter/unfilter differ'
+  );
+  is $from_storage_ran, ++$expected_from, 'from ran once, therefor caches';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->rank(6);
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, ++$expected_to,  'to ran once';
+
+  ok ($artist->is_column_changed ('rank'), 'Column marked as dirty');
+
+  $artist->rank;
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->update;
+
+  $artist->set_column(rank => 3);
+  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same set_column value');
+  is ($artist->rank, '6', 'Column set properly (cache blown)');
+  is $from_storage_ran, ++$expected_from, 'from ran once (set_column blew cache)';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->rank(6);
+  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same accessor-set value');
+  is ($artist->rank, '6', 'Column set properly');
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->store_column(rank => 4);
+  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on differing store_column value');
+  is ($artist->rank, '8', 'Cache properly blown');
+  is $from_storage_ran, ++$expected_from, 'from did not run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+}
+
+IC_DIE: {
+  dies_ok {
+     DBICTest::Schema::Artist->inflate_column(rank =>
+        { inflate => sub {}, deflate => sub {} }
+     );
+  } q(Can't inflate column after filter column);
+
+  DBICTest::Schema::Artist->inflate_column(name =>
+     { inflate => sub {}, deflate => sub {} }
+  );
+
+  dies_ok {
+     DBICTest::Schema::Artist->filter_column(name => {
+        filter_to_storage => sub {},
+        filter_from_storage => sub {}
+     });
+  } q(Can't filter column after inflate column);
+}
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/row/inflate_result.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/row/inflate_result.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/row/inflate_result.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,111 @@
+package My::Schema::Result::User;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Core/;
+
+### Define what our admin class is, for ensure_class_loaded()
+my $admin_class = __PACKAGE__ . '::Admin';
+
+__PACKAGE__->table('users');
+
+__PACKAGE__->add_columns(
+    qw/user_id   email    password
+      firstname lastname active
+      admin/
+);
+
+__PACKAGE__->set_primary_key('user_id');
+
+sub inflate_result {
+    my $self = shift;
+    my $ret  = $self->next::method(@_);
+    if ( $ret->admin ) {    ### If this is an admin, rebless for extra functions
+        $self->ensure_class_loaded($admin_class);
+        bless $ret, $admin_class;
+    }
+    return $ret;
+}
+
+sub hello {
+    return "I am a regular user.";
+}
+
+package My::Schema::Result::User::Admin;
+
+use strict;
+use warnings;
+use base qw/My::Schema::Result::User/;
+
+# This line is important
+__PACKAGE__->table('users');
+
+sub hello {
+    return "I am an admin.";
+}
+
+sub do_admin_stuff {
+    return "I am doing admin stuff";
+}
+
+package My::Schema;
+
+use base qw/DBIx::Class::Schema/;
+
+My::Schema->register_class( Admin => 'My::Schema::Result::User::Admin' );
+My::Schema->register_class( User  => 'My::Schema::Result::User' );
+
+1;
+
+package main;
+
+use lib qw(t/lib);
+use DBICTest;
+
+use Test::More;
+
+my $user_data = {
+    email    => 'someguy at place.com',
+    password => 'pass1',
+    admin    => 0
+};
+
+my $admin_data = {
+    email    => 'someadmin at adminplace.com',
+    password => 'pass2',
+    admin    => 1
+};
+
+ok( my $schema = My::Schema->connection('dbi:SQLite:dbname=:memory:') );
+
+ok(
+    $schema->storage->dbh->do(
+"create table users (user_id, email, password, firstname, lastname, active,  admin)"
+    )
+);
+
+TODO: {
+    local $TODO = 'New objects should also be inflated';
+    my $user  = $schema->resultset('User')->create($user_data);
+    my $admin = $schema->resultset('User')->create($admin_data);
+
+    is( ref $user,  'My::Schema::Result::User' );
+    is( ref $admin, 'My::Schema::Result::User::Admin' );
+
+}
+
+my $user  = $schema->resultset('User')->single($user_data);
+my $admin = $schema->resultset('User')->single($admin_data);
+
+is( ref $user,  'My::Schema::Result::User' );
+is( ref $admin, 'My::Schema::Result::User::Admin' );
+
+is( $user->password,  'pass1' );
+is( $admin->password, 'pass2' );
+is( $user->hello,     'I am a regular user.' );
+is( $admin->hello,    'I am an admin.' );
+
+ok( !$user->can('do_admin_stuff') );
+ok( $admin->can('do_admin_stuff') );
+
+done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/search/subquery.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/search/subquery.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/search/subquery.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -16,12 +16,12 @@
 my @tests = (
   {
     rs => $cdrs,
-    search => \[ "title = ? AND year LIKE ?", 'buahaha', '20%' ],
+    search => \[ "title = ? AND year LIKE ?", [ title => 'buahaha' ], [ year => '20%' ] ],
     attrs => { rows => 5 },
     sqlbind => \[
       "( SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE (title = ? AND year LIKE ?) LIMIT 5)",
-      'buahaha',
-      '20%',
+      [ title => 'buahaha' ],
+      [ year => '20%' ],
     ],
   },
 
@@ -157,8 +157,6 @@
 );
 
 
-plan tests => @tests * 2;
-
 for my $i (0 .. $#tests) {
   my $t = $tests[$i];
   for my $p (1, 2) {  # repeat everything twice, make sure we do not clobber search arguments
@@ -169,3 +167,5 @@
     );
   }
 }
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/generic_subq.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/generic_subq.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/generic_subq.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,125 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema;
+
+$schema->storage->_sql_maker->limit_dialect ('GenericSubQ');
+
+my $rs = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+columns' => [{ owner_name => 'owner.name' }],
+  join => 'owner',
+  rows => 2,
+  order_by => 'me.title',
+});
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(
+    SELECT  id, source, owner, title, price,
+            owner_name
+      FROM (
+        SELECT  me.id, me.source, me.owner, me.title, me.price,
+                owner.name AS owner_name
+          FROM books me
+          JOIN owners owner ON owner.id = me.owner
+        WHERE ( source = ? )
+      ) me
+    WHERE
+      (
+        SELECT COUNT(*)
+          FROM books rownum__emulation
+        WHERE rownum__emulation.title < me.title
+      ) < 2
+    ORDER BY me.title
+  )',
+  [  [ 'source', 'Library' ] ],
+);
+
+is_deeply (
+  [ $rs->get_column ('title')->all ],
+  ['Best Recipe Cookbook', 'Dynamical Systems'],
+  'Correct columns selected with rows',
+);
+
+$schema->storage->_sql_maker->quote_char ('"');
+$schema->storage->_sql_maker->name_sep ('.');
+
+$rs = $schema->resultset ('BooksInLibrary')->search ({}, {
+  order_by => { -desc => 'title' },
+  '+select' => ['owner.name'],
+  '+as' => ['owner.name'],
+  join => 'owner',
+  rows => 3,
+  offset => 1,
+});
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(
+    SELECT  "id", "source", "owner", "title", "price",
+            "owner__name"
+      FROM (
+        SELECT  "me"."id", "me"."source", "me"."owner", "me"."title", "me"."price",
+                "owner"."name" AS "owner__name"
+          FROM "books" "me"
+          JOIN "owners" "owner" ON "owner"."id" = "me"."owner"
+        WHERE ( "source" = ? )
+      ) "me"
+    WHERE
+      (
+        SELECT COUNT(*)
+          FROM "books" "rownum__emulation"
+        WHERE "rownum__emulation"."title" > "me"."title"
+      ) BETWEEN 1 AND 3
+    ORDER BY "title" DESC
+  )',
+  [ [ 'source', 'Library' ] ],
+);
+
+is_deeply (
+  [ $rs->get_column ('title')->all ],
+  [ 'Dynamical Systems', 'Best Recipe Cookbook' ],
+  'Correct columns selected with rows',
+);
+
+$rs = $schema->resultset ('BooksInLibrary')->search ({}, {
+  order_by => 'title',
+  'select' => ['owner.name'],
+  'as' => ['owner_name'],
+  join => 'owner',
+  offset => 1,
+});
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(
+    SELECT "owner_name"
+      FROM (
+        SELECT "owner"."name" AS "owner_name", "title"
+          FROM "books" "me"
+          JOIN "owners" "owner" ON "owner"."id" = "me"."owner"
+        WHERE ( "source" = ? )
+      ) "me"
+    WHERE
+      (
+        SELECT COUNT(*)
+          FROM "books" "rownum__emulation"
+        WHERE "rownum__emulation"."title" < "me"."title"
+      ) BETWEEN 1 AND 4294967295
+    ORDER BY "title"
+  )',
+  [ [ 'source', 'Library' ] ],
+);
+
+is_deeply (
+  [ $rs->get_column ('owner_name')->all ],
+  [ ('Newton') x 2 ],
+  'Correct columns selected with rows',
+);
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rno.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rno.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rno.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,74 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema;
+
+$schema->storage->_sql_maker->limit_dialect ('RowNumberOver');
+
+my $rs_selectas_col = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+select' => ['owner.name'],
+  '+as' => ['owner.name'],
+  join => 'owner',
+  rows => 1,
+});
+
+is_same_sql_bind(
+  $rs_selectas_col->as_query,
+  '(
+    SELECT  id, source, owner, title, price,
+            owner__name
+      FROM (
+        SELECT  id, source, owner, title, price,
+                owner__name,
+                ROW_NUMBER() OVER( ) AS rno__row__index
+          FROM (
+            SELECT  me.id, me.source, me.owner, me.title, me.price,
+                    owner.name AS owner__name
+              FROM books me
+              JOIN owners owner ON owner.id = me.owner
+            WHERE ( source = ? )
+          ) me
+      ) me
+    WHERE rno__row__index BETWEEN 1 AND 1
+  )',
+  [  [ 'source', 'Library' ] ],
+);
+
+$schema->storage->_sql_maker->quote_char ([qw/ [ ] /]);
+$schema->storage->_sql_maker->name_sep ('.');
+
+my $rs_selectas_rel = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+select' => ['owner.name'],
+  '+as' => ['owner_name'],
+  join => 'owner',
+  rows => 1,
+});
+
+is_same_sql_bind(
+  $rs_selectas_rel->as_query,
+  '(
+    SELECT  [id], [source], [owner], [title], [price],
+            [owner_name]
+      FROM (
+        SELECT  [id], [source], [owner], [title], [price],
+                [owner_name],
+                ROW_NUMBER() OVER( ) AS [rno__row__index]
+          FROM (
+            SELECT  [me].[id], [me].[source], [me].[owner], [me].[title], [me].[price],
+                    [owner].[name] AS [owner_name]
+              FROM [books] [me]
+              JOIN [owners] [owner] ON [owner].[id] = [me].[owner]
+            WHERE ( [source] = ? )
+          ) [me]
+      ) [me]
+    WHERE [rno__row__index] BETWEEN 1 AND 1
+  )',
+  [ [ 'source', 'Library' ] ],
+);
+
+done_testing;

Copied: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rownum.t (from rev 6886, DBIx-Class/0.08/branches/extended_rels/t/41orrible.t)
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rownum.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/rownum.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,35 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $s = DBICTest->init_schema (no_deploy => 1, );
+$s->storage->sql_maker->limit_dialect ('RowNum');
+
+my $rs = $s->resultset ('CD');
+
+is_same_sql_bind (
+  $rs->search ({}, { rows => 1, offset => 3,columns => [
+      { id => 'foo.id' },
+      { 'bar.id' => 'bar.id' },
+      { bleh => \ 'TO_CHAR (foo.womble, "blah")' },
+    ]})->as_query,
+  '(SELECT id, bar__id, bleh
+      FROM (
+        SELECT id, bar__id, bleh, ROWNUM rownum__index
+          FROM (
+            SELECT foo.id AS id, bar.id AS bar__id, TO_CHAR(foo.womble, "blah") AS bleh
+              FROM cd me
+          ) me
+      ) me
+    WHERE rownum__index BETWEEN 4 AND 4
+  )',
+  [],
+  'Rownum subsel aliasing works correctly'
+);
+
+done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/toplimit.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/toplimit.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/limit_dialects/toplimit.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -14,139 +14,184 @@
 delete $schema->storage->_sql_maker->{_cached_syntax};
 $schema->storage->_sql_maker->limit_dialect ('Top');
 
-my $rs = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 1, offset => 3 });
+my $books_45_and_owners = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 2, offset => 3 });
 
-sub default_test_order {
-   my $order_by = shift;
-   is_same_sql_bind(
-      $rs->search ({}, {order_by => $order_by})->as_query,
-      "(SELECT
-        TOP 1 me__id, source, owner, title, price, owner__id, name FROM
-         (SELECT
-           TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name
-           FROM books me
-           JOIN owners owner ON
-           owner.id = me.owner
-           WHERE ( source = ? )
-           ORDER BY me__id ASC
-         ) me ORDER BY me__id DESC
-       )",
+for my $null_order (
+  undef,
+  '',
+  {},
+  [],
+  [{}],
+) {
+  my $rs = $books_45_and_owners->search ({}, {order_by => $null_order });
+  is_same_sql_bind(
+      $rs->as_query,
+      '(SELECT TOP 2
+            id, source, owner, title, price, owner__id, owner__name
+          FROM (
+            SELECT TOP 5
+                me.id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name AS owner__name
+              FROM books me
+              JOIN owners owner ON owner.id = me.owner
+            WHERE ( source = ? )
+            ORDER BY me.id
+          ) me
+        ORDER BY me.id DESC
+       )',
     [ [ source => 'Library' ] ],
   );
 }
 
-sub test_order {
-  my $args = shift;
 
-  my $req_order = $args->{order_req}
-    ? "ORDER BY $args->{order_req}"
-    : ''
-  ;
-
-  is_same_sql_bind(
-    $rs->search ({}, {order_by => $args->{order_by}})->as_query,
-    "(SELECT
-      me__id, source, owner, title, price, owner__id, name FROM
-      (SELECT
-        TOP 1 me__id, source, owner, title, price, owner__id, name FROM
-         (SELECT
-           TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name FROM
-           books me
-           JOIN owners owner ON owner.id = me.owner
-           WHERE ( source = ? )
-           ORDER BY $args->{order_inner}
-         ) me ORDER BY $args->{order_outer}
-      ) me $req_order
-    )",
-    [ [ source => 'Library' ] ],
-  );
-}
-
-my @tests = (
+for my $ord_set (
   {
     order_by => \'foo DESC',
-    order_req => 'foo DESC',
     order_inner => 'foo DESC',
-    order_outer => 'foo ASC'
+    order_outer => 'ORDER__BY__1 ASC',
+    order_req => 'ORDER__BY__1 DESC',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
     order_by => { -asc => 'foo'  },
-    order_req => 'foo ASC',
     order_inner => 'foo ASC',
-    order_outer => 'foo DESC',
+    order_outer => 'ORDER__BY__1 DESC',
+    order_req => 'ORDER__BY__1 ASC',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
-    order_by => 'foo',
-    order_req => 'foo',
-    order_inner => 'foo ASC',
-    order_outer => 'foo DESC',
+    order_by => { -desc => 'foo' },
+    order_inner => 'foo DESC',
+    order_outer => 'ORDER__BY__1 ASC',
+    order_req => 'ORDER__BY__1 DESC',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
-    order_by => [ qw{ foo bar}   ],
-    order_req => 'foo, bar',
-    order_inner => 'foo ASC, bar ASC',
-    order_outer => 'foo DESC, bar DESC',
+    order_by => 'foo',
+    order_inner => 'foo',
+    order_outer => 'ORDER__BY__1 DESC',
+    order_req => 'ORDER__BY__1',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
-    order_by => { -desc => 'foo' },
-    order_req => 'foo DESC',
-    order_inner => 'foo DESC',
-    order_outer => 'foo ASC',
+    order_by => [ qw{ foo me.owner}   ],
+    order_inner => 'foo, me.owner',
+    order_outer => 'ORDER__BY__1 DESC, me.owner DESC',
+    order_req => 'ORDER__BY__1, me.owner',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
     order_by => ['foo', { -desc => 'bar' } ],
-    order_req => 'foo, bar DESC',
-    order_inner => 'foo ASC, bar DESC',
-    order_outer => 'foo DESC, bar ASC',
+    order_inner => 'foo, bar DESC',
+    order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 ASC',
+    order_req => 'ORDER__BY__1, ORDER__BY__2 DESC',
+    exselect_outer => 'ORDER__BY__1, ORDER__BY__2',
+    exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2',
   },
   {
     order_by => { -asc => [qw{ foo bar }] },
-    order_req => 'foo ASC, bar ASC',
     order_inner => 'foo ASC, bar ASC',
-    order_outer => 'foo DESC, bar DESC',
+    order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 DESC',
+    order_req => 'ORDER__BY__1 ASC, ORDER__BY__2 ASC',
+    exselect_outer => 'ORDER__BY__1, ORDER__BY__2',
+    exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2',
   },
   {
     order_by => [
-      { -asc => 'foo' },
+      'foo',
       { -desc => [qw{bar}] },
-      { -asc  => [qw{hello sensors}]},
+      { -asc  => [qw{me.owner sensors}]},
     ],
-    order_req => 'foo ASC, bar DESC, hello ASC, sensors ASC',
-    order_inner => 'foo ASC, bar DESC, hello ASC, sensors ASC',
-    order_outer => 'foo DESC, bar ASC, hello DESC, sensors DESC',
+    order_inner => 'foo, bar DESC, me.owner ASC, sensors ASC',
+    order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 ASC, me.owner DESC, ORDER__BY__3 DESC',
+    order_req => 'ORDER__BY__1, ORDER__BY__2 DESC, me.owner ASC, ORDER__BY__3 ASC',
+    exselect_outer => 'ORDER__BY__1, ORDER__BY__2, ORDER__BY__3',
+    exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2, sensors AS ORDER__BY__3',
   },
-);
+) {
+  my $o_sel = $ord_set->{exselect_outer}
+    ? ', ' . $ord_set->{exselect_outer}
+    : ''
+  ;
+  my $i_sel = $ord_set->{exselect_inner}
+    ? ', ' . $ord_set->{exselect_inner}
+    : ''
+  ;
 
-my @default_tests = ( undef, '', {}, [] );
+  is_same_sql_bind(
+    $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}})->as_query,
+    "(SELECT TOP 2
+          id, source, owner, title, price, owner__id, owner__name
+        FROM (
+          SELECT TOP 2
+              id, source, owner, title, price, owner__id, owner__name$o_sel
+            FROM (
+              SELECT TOP 5
+                  me.id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name AS owner__name$i_sel
+                FROM books me
+                JOIN owners owner ON owner.id = me.owner
+              WHERE ( source = ? )
+              ORDER BY $ord_set->{order_inner}
+            ) me
+          ORDER BY $ord_set->{order_outer}
+        ) me
+      ORDER BY $ord_set->{order_req}
+    )",
+    [ [ source => 'Library' ] ],
+  );
+}
 
-plan (tests => scalar @tests + scalar @default_tests + 1);
-
-test_order ($_) for @tests;
-default_test_order ($_) for @default_tests;
-
-
+# with groupby
 is_same_sql_bind (
-  $rs->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
-'(SELECT
-me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name FROM
-   ( SELECT
-      id, source, owner, title, price FROM
-      ( SELECT
-         TOP 1 id, source, owner, title, price FROM
-         ( SELECT
-            TOP 4 me.id, me.source, me.owner, me.title, me.price FROM
-            books me  JOIN
-            owners owner ON owner.id = me.owner
-            WHERE ( source = ? )
-            GROUP BY title
-            ORDER BY title ASC
-         ) me
-         ORDER BY title DESC
+  $books_45_and_owners->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
+  '(SELECT me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name
+      FROM (
+        SELECT TOP 2 id, source, owner, title, price
+          FROM (
+            SELECT TOP 2
+                id, source, owner, title, price
+              FROM (
+                SELECT TOP 5
+                    me.id, me.source, me.owner, me.title, me.price
+                  FROM books me
+                  JOIN owners owner ON owner.id = me.owner
+                WHERE ( source = ? )
+                GROUP BY title
+                ORDER BY title
+              ) me
+            ORDER BY title DESC
+          ) me
+        ORDER BY title
       ) me
-      ORDER BY title
-   ) me  JOIN
-   owners owner ON owner.id = me.owner WHERE
-   ( source = ? )
-   ORDER BY title)' ,
+      JOIN owners owner ON owner.id = me.owner
+    WHERE ( source = ? )
+    ORDER BY title
+  )',
   [ [ source => 'Library' ], [ source => 'Library' ] ],
 );
+
+# test deprecated column mixing over join boundaries
+my $rs_selectas_top = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+select' => ['owner.name'],
+  '+as' => ['owner_name'],
+  join => 'owner',
+  rows => 1 
+});
+
+is_same_sql_bind( $rs_selectas_top->search({})->as_query,
+                  '(SELECT
+                      TOP 1 me.id, me.source, me.owner, me.title, me.price,
+                      owner.name AS owner_name
+                    FROM books me
+                    JOIN owners owner ON owner.id = me.owner
+                    WHERE ( source = ? )
+                    ORDER BY me.id
+                   )',
+                   [ [ 'source', 'Library' ] ],
+                );
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oracle.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oracle.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oracle.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,80 @@
+
+use strict;
+use warnings;
+use Test::More;
+use Test::Exception;
+use Data::Dumper::Concise;
+use lib qw(t/lib);
+use DBIC::SqlMakerTest;
+use DBIx::Class::SQLAHacks::Oracle;
+
+# 
+#  Offline test for connect_by 
+#  ( without acitve database connection)
+# 
+my @handle_tests = (
+    {
+        connect_by  => { 'parentid' => { '-prior' => \'artistid' } },
+        stmt        => '"parentid" = PRIOR artistid',
+        bind        => [],
+        msg         => 'Simple: "parentid" = PRIOR artistid',
+    },
+    {
+        connect_by  => { 'parentid' => { '!=' => { '-prior' => \'artistid' } } },
+        stmt        => '"parentid" != ( PRIOR artistid )',
+        bind        => [],
+        msg         => 'Simple: "parentid" != ( PRIOR artistid )',
+    },
+    # Examples from http://download.oracle.com/docs/cd/B19306_01/server.102/b14200/queries003.htm
+
+    # CONNECT BY last_name != 'King' AND PRIOR employee_id = manager_id ...
+    {
+        connect_by  => [
+            last_name => { '!=' => 'King' },
+            manager_id => { '-prior' => \'employee_id' },
+        ],
+        stmt        => '( "last_name" != ? OR "manager_id" = PRIOR employee_id )',
+        bind        => ['King'],
+        msg         => 'oracle.com example #1',
+    },
+    # CONNECT BY PRIOR employee_id = manager_id and 
+    #            PRIOR account_mgr_id = customer_id ...
+    {
+        connect_by  => {
+            manager_id => { '-prior' => \'employee_id' },
+            customer_id => { '>', { '-prior' => \'account_mgr_id' } },
+        },
+        stmt        => '( "customer_id" > ( PRIOR account_mgr_id ) AND "manager_id" = PRIOR employee_id )',
+        bind        => [],
+        msg         => 'oracle.com example #2',
+    },
+    # CONNECT BY NOCYCLE PRIOR employee_id = manager_id AND LEVEL <= 4;
+    # TODO: NOCYCLE parameter doesn't work
+);
+
+my $sqla_oracle = DBIx::Class::SQLAHacks::Oracle->new( quote_char => '"', name_sep => '.' );
+isa_ok($sqla_oracle, 'DBIx::Class::SQLAHacks::Oracle');
+
+
+my $test_count = ( @handle_tests * 2 ) + 1;
+
+for my $case (@handle_tests) {
+    my ( $stmt, @bind );
+    my $msg = sprintf("Offline: %s",
+        $case->{msg} || substr($case->{stmt},0,25),
+    );
+    lives_ok(
+        sub {
+            ( $stmt, @bind ) = $sqla_oracle->_recurse_where( $case->{connect_by} );
+            is_same_sql_bind( $stmt, \@bind, $case->{stmt}, $case->{bind},$msg )
+              || diag "Search term:\n" . Dumper $case->{connect_by};
+        }
+    ,sprintf("lives is ok from '%s'",$msg));
+}
+
+# 
+#   Online Tests?
+# 
+$test_count += 0;
+
+done_testing( $test_count );

Copied: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oraclejoin.t (from rev 6886, DBIx-Class/0.08/branches/extended_rels/t/41orrible.t)
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oraclejoin.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/oraclejoin.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,71 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBIx::Class::SQLAHacks::OracleJoins;
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $sa = new DBIx::Class::SQLAHacks::OracleJoins;
+
+# search with undefined or empty $cond
+
+#  my ($self, $table, $fields, $where, $order, @rest) = @_;
+my ($sql, @bind) = $sa->select(
+    [
+        { me => "cd" },
+        [
+            { "-join_type" => "LEFT", artist => "artist" },
+            { "artist.artistid" => "me.artist" },
+        ],
+    ],
+    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+    undef,
+    undef
+);
+is_same_sql_bind(
+  $sql, \@bind,
+  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( artist.artistid(+) = me.artist )', [],
+  'WhereJoins search with empty where clause'
+);
+
+($sql, @bind) = $sa->select(
+    [
+        { me => "cd" },
+        [
+            { "-join_type" => "", artist => "artist" },
+            { "artist.artistid" => "me.artist" },
+        ],
+    ],
+    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+    { 'artist.artistid' => 3 },
+    undef
+);
+is_same_sql_bind(
+  $sql, \@bind,
+  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid = me.artist ) AND ( artist.artistid = ? ) ) )', [3],
+  'WhereJoins search with where clause'
+);
+
+($sql, @bind) = $sa->select(
+    [
+        { me => "cd" },
+        [
+            { "-join_type" => "LEFT", artist => "artist" },
+            { "artist.artistid" => "me.artist" },
+        ],
+    ],
+    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+    [{ 'artist.artistid' => 3 }, { 'me.cdid' => 5 }],
+    undef
+);
+is_same_sql_bind(
+  $sql, \@bind,
+  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid(+) = me.artist ) AND ( ( ( artist.artistid = ? ) OR ( me.cdid = ? ) ) ) ) )', [3, 5],
+  'WhereJoins search with or in where clause'
+);
+
+done_testing;
+

Added: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/order_by_func.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/order_by_func.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/order_by_func.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,35 @@
+use strict;
+use warnings;
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema();
+
+my $rs = $schema->resultset('CD')->search({}, {
+    'join' => 'tracks',
+    order_by => {
+        -desc => {
+            count => 'tracks.track_id',
+        },
+    },
+    distinct => 1,
+    rows => 2,
+    page => 1,
+});
+my $match = q{
+    SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me
+    GROUP BY me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+    ORDER BY COUNT(tracks.trackid) DESC
+};
+
+TODO: {
+    todo_skip 'order_by using function', 2;
+    is_same_sql($rs->as_query, $match, 'order by with func query');
+
+    ok($rs->count == 2, 'amount of rows return in order by func query');
+}
+
+done_testing;

Modified: DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/sql_maker/sql_maker_quote.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/sql_maker/sql_maker_quote.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/sqlahacks/sql_maker/sql_maker_quote.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -48,7 +48,7 @@
             'artist.name' => 'Caterwauler McCrae',
             'me.year' => 2001
           },
-          [],
+          {},
           undef,
           undef
 );
@@ -80,7 +80,7 @@
             'me.year'
           ],
           undef,
-          'year DESC',
+          { order_by => 'year DESC' },
           undef,
           undef
 );
@@ -105,10 +105,10 @@
             'me.year'
           ],
           undef,
-          [
+          { order_by => [
             'year DESC',
             'title ASC'
-          ],
+          ]},
           undef,
           undef
 );
@@ -133,7 +133,7 @@
               'me.year'
             ],
             undef,
-            { -desc => 'year' },
+            { order_by => { -desc => 'year' } },
             undef,
             undef
   );
@@ -158,10 +158,10 @@
               'me.year'
             ],
             undef,
-            [
+            { order_by => [
               { -desc => 'year' },
-              { -asc => 'title' }
-            ],
+              { -asc => 'title' },
+            ]},
             undef,
             undef
   );
@@ -188,7 +188,7 @@
             'me.year'
           ],
           undef,
-          \'year DESC',
+          { order_by => \'year DESC' },
           undef,
           undef
 );
@@ -213,10 +213,10 @@
             'me.year'
           ],
           undef,
-          [
+          { order_by => [
             \'year DESC',
             \'title ASC'
-          ],
+          ]},
           undef,
           undef
 );
@@ -283,9 +283,9 @@
           'me.*'
         ],
         undef,
-        [],
         undef,
-        undef    
+        undef,
+        undef,
   );
 
   is_same_sql_bind(
@@ -328,9 +328,9 @@
             'artist.name' => 'Caterwauler McCrae',
             'me.year' => 2001
           },
-          [],
           undef,
-          undef
+          undef,
+          undef,
 );
 
 is_same_sql_bind(

Modified: DBIx-Class/0.08/branches/extended_rels/t/storage/dbh_do.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/storage/dbh_do.t	2010-06-02 17:39:16 UTC (rev 9556)
+++ DBIx-Class/0.08/branches/extended_rels/t/storage/dbh_do.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -1,9 +1,9 @@
 #!/usr/bin/perl
 
 use strict;
-use warnings;  
+use warnings;
 
-use Test::More tests => 8;
+use Test::More;
 use lib qw(t/lib);
 use DBICTest;
 
@@ -11,23 +11,32 @@
 my $schema = DBICTest->init_schema();
 my $storage = $schema->storage;
 
-my $test_func = sub {
-    is $_[0], $storage;
-    is $_[1], $storage->dbh;
-    is $_[2], "foo";
-    is $_[3], "bar";
-};
+my @args;
+my $test_func = sub { @args = @_ };
 
-$storage->dbh_do(
-    $test_func,
-    "foo", "bar"
+$storage->dbh_do($test_func, "foo", "bar");
+is_deeply (
+  \@args,
+  [ $storage, $storage->dbh, "foo", "bar" ],
 );
 
+
 my $storage_class = ref $storage;
 {
-    no strict 'refs';
-    *{$storage_class .'::__test_method'} = $test_func;
+  no strict 'refs';
+  local *{$storage_class .'::__test_method'} = $test_func;
+  $storage->dbh_do("__test_method", "baz", "buz");
 }
-$storage->dbh_do("__test_method", "foo", "bar");
 
-    
\ No newline at end of file
+is_deeply (
+  \@args,
+  [ $storage, $storage->dbh, "baz", "buz" ],
+);
+
+# test aliasing
+my $res = 'original';
+$storage->dbh_do (sub { $_[2] = 'changed' }, $res);
+
+is ($res, 'changed', "Arguments properly aliased for dbh_do");
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/storage/dbi_env.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/storage/dbi_env.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/storage/dbi_env.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,90 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use lib qw(t/lib);
+use DBICTest;
+use Test::More;
+use Test::Exception;
+
+BEGIN { delete @ENV{qw(DBI_DSN DBI_DRIVER)} }
+
+my $schema;
+
+DBICTest->init_schema(sqlite_use_file => 1);
+
+my $dbname = DBICTest->_sqlite_dbname(sqlite_use_file => 1);
+
+sub count_sheep {
+    my $schema = shift;
+    scalar $schema->resultset('Artist')->search( { name => "Exploding Sheep" } )
+        ->all;
+}
+
+$schema = DBICTest::Schema->connect("dbi::$dbname");
+throws_ok { count_sheep($schema) } qr{I can't work out what driver to use},
+    'Driver in DSN empty';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$schema = DBICTest::Schema->connect("dbi:Test_NonExistant_DBD:$dbname");
+throws_ok { count_sheep($schema) }
+    qr{Can't locate DBD/Test_NonExistant_DBD\.pm in \@INC},
+    "Driver class doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi::$dbname";
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) } qr{I can't work out what driver to use},
+    "Driver class not defined in DBI_DSN either.";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi:Test_NonExistant_DBD2:$dbname";
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) }
+    qr{Can't locate DBD/Test_NonExistant_DBD2\.pm in \@INC},
+    "Driver class defined in DBI_DSN doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi::$dbname";
+$ENV{DBI_DRIVER} = 'Test_NonExistant_DBD3';
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) }
+    qr{Can't locate DBD/Test_NonExistant_DBD3\.pm in \@INC},
+    "Driver class defined in DBI_DRIVER doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi:Test_NonExistant_DBD4:$dbname";
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) }
+qr{Can't locate DBD/Test_NonExistant_DBD4\.pm in \@INC},
+    "Driver class defined in DBI_DSN doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+delete @ENV{qw(DBI_DSN DBI_DRIVER)};
+
+$schema = DBICTest::Schema->connect("dbi:SQLite:$dbname");
+lives_ok { count_sheep($schema) } 'SQLite passed to connect_info';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+$ENV{DBI_DRIVER} = 'SQLite';
+$schema = DBICTest::Schema->connect("dbi::$dbname");
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DRIVER';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+delete $ENV{DBI_DRIVER};
+$ENV{DBI_DSN} = "dbi:SQLite:$dbname";
+$schema = DBICTest::Schema->connect;
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DSN';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+$ENV{DBI_DRIVER} = 'SQLite';
+$schema = DBICTest::Schema->connect;
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DSN (and DBI_DRIVER)';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+$ENV{DBI_DSN} = "dbi::$dbname";
+$ENV{DBI_DRIVER} = 'SQLite';
+$schema = DBICTest::Schema->connect;
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DRIVER (not DBI_DSN)';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/storage/deploy.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/storage/deploy.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/storage/deploy.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,38 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+BEGIN {
+  require DBIx::Class;
+  plan skip_all =>
+      'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
+    unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')
+}
+
+use File::Spec;
+use Path::Class qw/dir/;
+use File::Path qw/make_path remove_tree/;
+my $schema = DBICTest->init_schema();
+
+my $var = dir (qw| t var create_ddl_dir |);
+-d $var
+    or make_path( "$var" )
+    or die "can't create $var: $!";
+
+my $test_dir_1 = $var->subdir ('test1', 'foo', 'bar' );
+remove_tree( "$test_dir_1" ) if -d $test_dir_1;
+$schema->create_ddl_dir( undef, undef, $test_dir_1 );
+
+ok( -d $test_dir_1, 'create_ddl_dir did a make_path on its target dir' );
+ok( scalar( glob $test_dir_1.'/*.sql' ), 'there are sql files in there' );
+
+TODO: {
+    local $TODO = 'we should probably add some tests here for actual deployability of the DDL?';
+    ok( 0 );
+}
+
+done_testing;

Added: DBIx-Class/0.08/branches/extended_rels/t/storage/global_destruction.t
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/storage/global_destruction.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/storage/global_destruction.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,57 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBICTest;
+
+for my $type (qw/PG MYSQL/) {
+
+  SKIP: {
+    skip "Skipping $type tests without DBICTEST_${type}_DSN", 1
+      unless $ENV{"DBICTEST_${type}_DSN"};
+
+    my $schema = DBICTest::Schema->connect (@ENV{map { "DBICTEST_${type}_${_}" } qw/DSN USER PASS/});
+
+    # emulate a singleton-factory, just cache the object *somewhere in a different package*
+    # to induce out-of-order destruction
+    $DBICTest::FakeSchemaFactory::schema = $schema;
+
+    # so we can see the retry exceptions (if any)
+    $ENV{DBIC_DBIRETRY_DEBUG} = 1;
+
+    ok (!$schema->storage->connected, "$type: start disconnected");
+
+    lives_ok (sub {
+      $schema->txn_do (sub {
+
+        ok ($schema->storage->connected, "$type: transaction starts connected");
+
+        my $pid = fork();
+        SKIP: {
+          skip "Fork failed: $!", 1 if (! defined $pid);
+
+          if ($pid) {
+            note "Parent $$ sleeping...";
+            wait();
+            note "Parent $$ woken up after child $pid exit";
+          }
+          else {
+            note "Child $$ terminating";
+            exit 0;
+          }
+
+          ok ($schema->storage->connected, "$type: parent still connected (in txn_do)");
+        }
+      });
+    });
+
+    ok ($schema->storage->connected, "$type: parent still connected (outside of txn_do)");
+
+    undef $DBICTest::FakeSchemaFactory::schema;
+  }
+}
+
+done_testing;

Copied: DBIx-Class/0.08/branches/extended_rels/t/storage/txn.t (from rev 8990, DBIx-Class/0.08/branches/extended_rels/t/81transactions.t)
===================================================================
--- DBIx-Class/0.08/branches/extended_rels/t/storage/txn.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/extended_rels/t/storage/txn.t	2010-06-02 17:41:37 UTC (rev 9557)
@@ -0,0 +1,404 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Warn;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+my $code = sub {
+  my ($artist, @cd_titles) = @_;
+
+  $artist->create_related('cds', {
+    title => $_,
+    year => 2006,
+  }) foreach (@cd_titles);
+
+  return $artist->cds->all;
+};
+
+# Test checking of parameters
+{
+  throws_ok (sub {
+    (ref $schema)->txn_do(sub{});
+  }, qr/storage/, "can't call txn_do without storage");
+
+  throws_ok ( sub {
+    $schema->txn_do('');
+  }, qr/must be a CODE reference/, '$coderef parameter check ok');
+}
+
+# Test successful txn_do() - scalar context
+{
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my @titles = map {'txn_do test CD ' . $_} (1..5);
+  my $artist = $schema->resultset('Artist')->find(1);
+  my $count_before = $artist->cds->count;
+  my $count_after = $schema->txn_do($code, $artist, @titles);
+  is($count_after, $count_before+5, 'successful txn added 5 cds');
+  is($artist->cds({
+    title => "txn_do test CD $_",
+  })->first->year, 2006, "new CD $_ year correct") for (1..5);
+
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
+}
+
+# Test successful txn_do() - list context
+{
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my @titles = map {'txn_do test CD ' . $_} (6..10);
+  my $artist = $schema->resultset('Artist')->find(1);
+  my $count_before = $artist->cds->count;
+  my @cds = $schema->txn_do($code, $artist, @titles);
+  is(scalar @cds, $count_before+5, 'added 5 CDs and returned in list context');
+  is($artist->cds({
+    title => "txn_do test CD $_",
+  })->first->year, 2006, "new CD $_ year correct") for (6..10);
+
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
+}
+
+# Test txn_do() @_ aliasing support
+{
+  my $res = 'original';
+  $schema->storage->txn_do (sub { $_[0] = 'changed' }, $res);
+  is ($res, 'changed', "Arguments properly aliased for txn_do");
+}
+
+# Test nested successful txn_do()
+{
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my $nested_code = sub {
+    my ($schema, $artist, $code) = @_;
+
+    my @titles1 = map {'nested txn_do test CD ' . $_} (1..5);
+    my @titles2 = map {'nested txn_do test CD ' . $_} (6..10);
+
+    $schema->txn_do($code, $artist, @titles1);
+    $schema->txn_do($code, $artist, @titles2);
+  };
+
+  my $artist = $schema->resultset('Artist')->find(2);
+  my $count_before = $artist->cds->count;
+
+  lives_ok (sub {
+    $schema->txn_do($nested_code, $schema, $artist, $code);
+  }, 'nested txn_do succeeded');
+
+  is($artist->cds({
+    title => 'nested txn_do test CD '.$_,
+  })->first->year, 2006, qq{nested txn_do CD$_ year ok}) for (1..10);
+  is($artist->cds->count, $count_before+10, 'nested txn_do added all CDs');
+
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
+}
+
+my $fail_code = sub {
+  my ($artist) = @_;
+  $artist->create_related('cds', {
+    title => 'this should not exist',
+    year => 2005,
+  });
+  die "the sky is falling";
+};
+
+# Test failed txn_do()
+{
+
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my $artist = $schema->resultset('Artist')->find(3);
+
+  throws_ok (sub {
+    $schema->txn_do($fail_code, $artist);
+  }, qr/the sky is falling/, 'failed txn_do threw an exception');
+
+  my $cd = $artist->cds({
+    title => 'this should not exist',
+    year => 2005,
+  })->first;
+  ok(!defined($cd), q{failed txn_do didn't change the cds table});
+
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
+}
+
+# do the same transaction again
+{
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my $artist = $schema->resultset('Artist')->find(3);
+
+  throws_ok (sub {
+    $schema->txn_do($fail_code, $artist);
+  }, qr/the sky is falling/, 'failed txn_do threw an exception');
+
+  my $cd = $artist->cds({
+    title => 'this should not exist',
+    year => 2005,
+  })->first;
+  ok(!defined($cd), q{failed txn_do didn't change the cds table});
+
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth has been reset');
+}
+
+# Test failed txn_do() with failed rollback
+{
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my $artist = $schema->resultset('Artist')->find(3);
+
+  # Force txn_rollback() to throw an exception
+  no warnings 'redefine';
+  no strict 'refs';
+
+  # die in rollback
+  local *{"DBIx::Class::Storage::DBI::SQLite::txn_rollback"} = sub{
+    my $storage = shift;
+    die 'FAILED';
+  };
+
+  throws_ok (
+    sub {
+      $schema->txn_do($fail_code, $artist);
+    },
+    qr/the sky is falling.+Rollback failed/s,
+    'txn_rollback threw a rollback exception (and included the original exception'
+  );
+
+  my $cd = $artist->cds({
+    title => 'this should not exist',
+    year => 2005,
+  })->first;
+  isa_ok($cd, 'DBICTest::CD', q{failed txn_do with a failed txn_rollback }.
+         q{changed the cds table});
+  $cd->delete; # Rollback failed
+  $cd = $artist->cds({
+    title => 'this should not exist',
+    year => 2005,
+  })->first;
+  ok(!defined($cd), q{deleted the failed txn's cd});
+  $schema->storage->_dbh->rollback;
+}
+
+# reset schema object (the txn_rollback meddling screws it up)
+$schema = DBICTest->init_schema();
+
+# Test nested failed txn_do()
+{
+  is( $schema->storage->{transaction_depth}, 0, 'txn depth starts at 0');
+
+  my $nested_fail_code = sub {
+    my ($schema, $artist, $code1, $code2) = @_;
+
+    my @titles = map {'nested txn_do test CD ' . $_} (1..5);
+
+    $schema->txn_do($code1, $artist, @titles); # successful txn
+    $schema->txn_do($code2, $artist);          # failed txn
+  };
+
+  my $artist = $schema->resultset('Artist')->find(3);
+
+  throws_ok ( sub {
+    $schema->txn_do($nested_fail_code, $schema, $artist, $code, $fail_code);
+  }, qr/the sky is falling/, 'nested failed txn_do threw exception');
+
+  ok(!defined($artist->cds({
+    title => 'nested txn_do test CD '.$_,
+    year => 2006,
+  })->first), qq{failed txn_do didn't add first txn's cd $_}) for (1..5);
+  my $cd = $artist->cds({
+    title => 'this should not exist',
+    year => 2005,
+  })->first;
+  ok(!defined($cd), q{failed txn_do didn't add failed txn's cd});
+}
+
+# Grab a new schema to test txn before connect
+{
+    my $schema2 = DBICTest->init_schema(no_deploy => 1);
+    lives_ok (sub {
+        $schema2->txn_begin();
+        $schema2->txn_begin();
+    }, 'Pre-connection nested transactions.');
+
+    # although not connected DBI would still warn about rolling back at disconnect
+    $schema2->txn_rollback;
+    $schema2->txn_rollback;
+    $schema2->storage->disconnect;
+}
+$schema->storage->disconnect;
+
+# Test txn_scope_guard
+{
+  my $schema = DBICTest->init_schema();
+
+  is($schema->storage->transaction_depth, 0, "Correct transaction depth");
+  my $artist_rs = $schema->resultset('Artist');
+  my $fn = __FILE__;
+  throws_ok {
+   my $guard = $schema->txn_scope_guard;
+
+
+    $artist_rs->create({
+      name => 'Death Cab for Cutie',
+      made_up_column => 1,
+    });
+
+   $guard->commit;
+  } qr/No such column made_up_column .*? at .*?$fn line \d+/s, "Error propogated okay";
+
+  ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
+
+  my $inner_exception = '';  # set in inner() below
+  throws_ok (sub {
+    outer($schema, 1);
+  }, qr/$inner_exception/, "Nested exceptions propogated");
+
+  ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
+
+  lives_ok (sub {
+    warnings_exist ( sub {
+      # The 0 arg says don't die, just let the scope guard go out of scope
+      # forcing a txn_rollback to happen
+      outer($schema, 0);
+    }, qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./, 'Out of scope warning detected');
+    ok(!$artist_rs->find({name => 'Death Cab for Cutie'}), "Artist not created");
+  }, 'rollback successful withot exception');
+
+  sub outer {
+    my ($schema) = @_;
+
+    my $guard = $schema->txn_scope_guard;
+    $schema->resultset('Artist')->create({
+      name => 'Death Cab for Cutie',
+    });
+    inner(@_);
+  }
+
+  sub inner {
+    my ($schema, $fatal) = @_;
+
+    my $inner_guard = $schema->txn_scope_guard;
+    is($schema->storage->transaction_depth, 2, "Correct transaction depth");
+
+    my $artist = $artist_rs->find({ name => 'Death Cab for Cutie' });
+
+    eval {
+      $artist->cds->create({
+        title => 'Plans',
+        year => 2005,
+        $fatal ? ( foo => 'bar' ) : ()
+      });
+    };
+    if ($@) {
+      # Record what got thrown so we can test it propgates out properly.
+      $inner_exception = $@;
+      die $@;
+    }
+
+    # inner guard should commit without consequences
+    $inner_guard->commit;
+  }
+}
+
+# make sure the guard does not eat exceptions
+{
+  my $schema = DBICTest->init_schema();
+  throws_ok (sub {
+    my $guard = $schema->txn_scope_guard;
+    $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
+
+    $schema->storage->disconnect;  # this should freak out the guard rollback
+
+    die 'Deliberate exception';
+  }, qr/Deliberate exception.+Rollback failed/s);
+}
+
+# make sure it warns *big* on failed rollbacks
+{
+  my $schema = DBICTest->init_schema();
+
+  # something is really confusing Test::Warn here, no time to debug
+=begin
+  warnings_exist (
+    sub {
+      my $guard = $schema->txn_scope_guard;
+      $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
+
+      $schema->storage->disconnect;  # this should freak out the guard rollback
+    },
+    [
+      qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./,
+      qr/\*+ ROLLBACK FAILED\!\!\! \*+/,
+    ],
+    'proper warnings generated on out-of-scope+rollback failure'
+  );
+=cut
+
+  my @want = (
+    qr/A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back./,
+    qr/\*+ ROLLBACK FAILED\!\!\! \*+/,
+  );
+
+  my @w;
+  local $SIG{__WARN__} = sub {
+    if (grep {$_[0] =~ $_} (@want)) {
+      push @w, $_[0];
+    }
+    else {
+      warn $_[0];
+    }
+  };
+  {
+      my $guard = $schema->txn_scope_guard;
+      $schema->resultset ('Artist')->create ({ name => 'bohhoo'});
+
+      $schema->storage->disconnect;  # this should freak out the guard rollback
+  }
+
+  is (@w, 2, 'Both expected warnings found');
+}
+
+# make sure AutoCommit => 0 on external handles behaves correctly with scope_guard
+{
+  my $factory = DBICTest->init_schema (AutoCommit => 0);
+  cmp_ok ($factory->resultset('CD')->count, '>', 0, 'Something to delete');
+  my $dbh = $factory->storage->dbh;
+
+  ok (!$dbh->{AutoCommit}, 'AutoCommit is off on $dbh');
+  my $schema = DBICTest::Schema->connect (sub { $dbh });
+
+
+  lives_ok ( sub {
+    my $guard = $schema->txn_scope_guard;
+    $schema->resultset('CD')->delete;
+    $guard->commit;
+  }, 'No attempt to start a transaction with scope guard');
+
+  is ($schema->resultset('CD')->count, 0, 'Deletion successful');
+}
+
+# make sure AutoCommit => 0 on external handles behaves correctly with txn_do
+{
+  my $factory = DBICTest->init_schema (AutoCommit => 0);
+  cmp_ok ($factory->resultset('CD')->count, '>', 0, 'Something to delete');
+  my $dbh = $factory->storage->dbh;
+
+  ok (!$dbh->{AutoCommit}, 'AutoCommit is off on $dbh');
+  my $schema = DBICTest::Schema->connect (sub { $dbh });
+
+
+  lives_ok ( sub {
+    $schema->txn_do (sub { $schema->resultset ('CD')->delete });
+  }, 'No attempt to start a atransaction with txn_do');
+
+  is ($schema->resultset('CD')->count, 0, 'Deletion successful');
+}
+
+done_testing;


Property changes on: DBIx-Class/0.08/branches/extended_rels/t/var
___________________________________________________________________
Added: svn:ignore
   + *





More information about the Bast-commits mailing list