[Bast-commits] r9468 - in DBIx-Class/0.08/branches/pg_cursors: . lib/DBIx lib/DBIx/Class lib/DBIx/Class/CDBICompat lib/DBIx/Class/InflateColumn lib/DBIx/Class/Manual lib/DBIx/Class/Optional lib/DBIx/Class/Relationship lib/DBIx/Class/SQLAHacks lib/DBIx/Class/Schema lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI lib/DBIx/Class/Storage/DBI/ODBC lib/DBIx/Class/Storage/DBI/Oracle lib/DBIx/Class/Storage/DBI/Replicated lib/DBIx/Class/Storage/DBI/Sybase lib/SQL/Translator/Parser/DBIx t t/bind t/cdbi t/count t/inflate t/lib/DBIC t/lib/DBICNSTest/Result t/lib/DBICNSTest/ResultSet t/lib/DBICTest t/lib/DBICTest/Schema t/prefetch t/resultset t/row t/search t/sqlahacks t/sqlahacks/limit_dialects t/sqlahacks/sql_maker t/storage

ribasushi at dev.catalyst.perl.org ribasushi at dev.catalyst.perl.org
Mon May 31 07:55:05 GMT 2010


Author: ribasushi
Date: 2010-05-31 08:55:05 +0100 (Mon, 31 May 2010)
New Revision: 9468

Added:
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/FilterColumn.pm
   DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/Result/D.pm
   DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/ResultSet/D.pm
   DBIx-Class/0.08/branches/pg_cursors/t/row/filter_column.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/generic_subq.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rno.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rownum.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/oraclejoin.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/order_by_func.t
   DBIx-Class/0.08/branches/pg_cursors/t/storage/deploy.t
   DBIx-Class/0.08/branches/pg_cursors/t/storage/global_destruction.t
Removed:
   DBIx-Class/0.08/branches/pg_cursors/t/41orrible.t
Modified:
   DBIx-Class/0.08/branches/pg_cursors/
   DBIx-Class/0.08/branches/pg_cursors/Changes
   DBIx-Class/0.08/branches/pg_cursors/Makefile.PL
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Admin.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/CDBICompat/ColumnCase.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Componentised.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Exception.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn/DateTime.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Manual/Cookbook.pod
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Optional/Dependencies.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/Base.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/BelongsTo.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasMany.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasOne.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSet.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSource.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Row.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/OracleJoins.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/SQLite.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema/Versioned.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ADO.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Cursor.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/InterBase.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/MSSQL.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBIHacks.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/TxnScopeGuard.pm
   DBIx-Class/0.08/branches/pg_cursors/lib/SQL/Translator/Parser/DBIx/Class.pm
   DBIx-Class/0.08/branches/pg_cursors/t/03podcoverage.t
   DBIx-Class/0.08/branches/pg_cursors/t/39load_namespaces_1.t
   DBIx-Class/0.08/branches/pg_cursors/t/60core.t
   DBIx-Class/0.08/branches/pg_cursors/t/72pg.t
   DBIx-Class/0.08/branches/pg_cursors/t/73oracle.t
   DBIx-Class/0.08/branches/pg_cursors/t/746mssql.t
   DBIx-Class/0.08/branches/pg_cursors/t/74mssql.t
   DBIx-Class/0.08/branches/pg_cursors/t/85utf8.t
   DBIx-Class/0.08/branches/pg_cursors/t/90join_torture.t
   DBIx-Class/0.08/branches/pg_cursors/t/93single_accessor_object.t
   DBIx-Class/0.08/branches/pg_cursors/t/94versioning.t
   DBIx-Class/0.08/branches/pg_cursors/t/bind/order_by.t
   DBIx-Class/0.08/branches/pg_cursors/t/cdbi/columns_as_hashes.t
   DBIx-Class/0.08/branches/pg_cursors/t/count/count_rs.t
   DBIx-Class/0.08/branches/pg_cursors/t/count/prefetch.t
   DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_mssql.t
   DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_sybase_asa.t
   DBIx-Class/0.08/branches/pg_cursors/t/inflate/hri.t
   DBIx-Class/0.08/branches/pg_cursors/t/lib/DBIC/DebugObj.pm
   DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/AuthorCheck.pm
   DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/Schema/BooksInLibrary.pm
   DBIx-Class/0.08/branches/pg_cursors/t/prefetch/grouped.t
   DBIx-Class/0.08/branches/pg_cursors/t/resultset/as_subselect_rs.t
   DBIx-Class/0.08/branches/pg_cursors/t/resultset/update_delete.t
   DBIx-Class/0.08/branches/pg_cursors/t/search/subquery.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/toplimit.t
   DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/sql_maker/sql_maker_quote.t
   DBIx-Class/0.08/branches/pg_cursors/t/storage/dbi_env.t
Log:
 r9200 at Thesaurus (orig r9187):  ribasushi | 2010-04-18 23:06:29 +0200
 Fix leftover tabs
 r9201 at Thesaurus (orig r9188):  castaway | 2010-04-20 08:06:26 +0200
 Warn if a class found in ResultSet/ is not a subclass of ::ResultSet
 
 r9203 at Thesaurus (orig r9190):  rbuels | 2010-04-20 21:12:22 +0200
 create_ddl_dir mkpaths its dir if necessary.  also, added storage/deploy.t as place to put deployment tests
 r9204 at Thesaurus (orig r9191):  rbuels | 2010-04-20 21:20:06 +0200
 do not croak, rbuels!  jeez.
 r9205 at Thesaurus (orig r9192):  castaway | 2010-04-21 08:03:08 +0200
 Added missing test file (oops)
 
 r9213 at Thesaurus (orig r9200):  rabbit | 2010-04-24 02:23:05 +0200
 10% speed up on quoted statement generation
 r9215 at Thesaurus (orig r9202):  rabbit | 2010-04-24 02:27:47 +0200
 Revert bogus commit
 r9216 at Thesaurus (orig r9203):  ribasushi | 2010-04-24 02:31:06 +0200
 _quote is now properly handled in SQLA
 r9217 at Thesaurus (orig r9204):  caelum | 2010-04-24 02:32:58 +0200
 add "IMPROVING PERFORMANCE" section to Cookbook
 r9231 at Thesaurus (orig r9218):  ribasushi | 2010-04-26 13:13:13 +0200
 Bump CAG and SQLA dependencies
 r9232 at Thesaurus (orig r9219):  ribasushi | 2010-04-26 15:27:38 +0200
 Bizarre fork failure
 r9233 at Thesaurus (orig r9220):  castaway | 2010-04-26 21:45:32 +0200
 Add tests using select/as to sqlahacks
 
 r9234 at Thesaurus (orig r9221):  castaway | 2010-04-26 21:49:10 +0200
 Add test for fetching related obj/col as well
 
 r9245 at Thesaurus (orig r9232):  abraxxa | 2010-04-27 15:58:56 +0200
 fixed missing ' in update_or_create with key attr example
 
 r9247 at Thesaurus (orig r9234):  ribasushi | 2010-04-27 16:53:06 +0200
 Better concurrency in test (parent blocks)
 r9248 at Thesaurus (orig r9235):  ribasushi | 2010-04-27 16:53:34 +0200
 Reformat tests/comments a bit
 r9249 at Thesaurus (orig r9236):  ribasushi | 2010-04-27 18:40:10 +0200
 Better comment
 r9250 at Thesaurus (orig r9237):  ribasushi | 2010-04-27 18:40:31 +0200
 Rename test
 r9251 at Thesaurus (orig r9238):  ribasushi | 2010-04-27 19:11:45 +0200
 Fix global destruction problems
 r9271 at Thesaurus (orig r9258):  ribasushi | 2010-04-28 11:10:00 +0200
 Refactor SQLA/select interaction (in reality just cleanup)
 r9272 at Thesaurus (orig r9259):  caelum | 2010-04-28 11:20:08 +0200
 update ::DBI::Replicated
 r9273 at Thesaurus (orig r9260):  caelum | 2010-04-28 12:20:01 +0200
 add _verify_pid and _verify_tid to methods that croak in ::Replicated
 r9274 at Thesaurus (orig r9261):  ribasushi | 2010-04-28 14:39:02 +0200
 Fix failing test and some warnings
 r9288 at Thesaurus (orig r9275):  rabbit | 2010-04-29 10:32:10 +0200
 Allow limit syntax change in-flight without digging into internals
 r9292 at Thesaurus (orig r9279):  castaway | 2010-04-30 12:26:52 +0200
 Argh.. committing missing test file for load_namespaces tests
 
 r9295 at Thesaurus (orig r9282):  rabbit | 2010-05-01 11:06:21 +0200
 The final version of the test
 r9309 at Thesaurus (orig r9296):  rabbit | 2010-05-04 09:44:51 +0200
 Test for RT#56257
 r9310 at Thesaurus (orig r9297):  rabbit | 2010-05-04 10:00:11 +0200
 Refactor count handling, make count-resultset attribute lists inclusive rather than exclusive (side effect - solves RT#56257
 r9318 at Thesaurus (orig r9305):  rabbit | 2010-05-05 11:49:51 +0200
  r9296 at Thesaurus (orig r9283):  ribasushi | 2010-05-01 11:51:15 +0200
  Branch to clean up various limit dialects
  r9297 at Thesaurus (orig r9284):  rabbit | 2010-05-01 11:55:04 +0200
  Preliminary version
  r9301 at Thesaurus (orig r9288):  rabbit | 2010-05-03 18:31:24 +0200
  Fix incorrect comparison
  r9302 at Thesaurus (orig r9289):  rabbit | 2010-05-03 18:32:36 +0200
  Do not add TOP prefixes to queries already containing it
  r9303 at Thesaurus (orig r9290):  rabbit | 2010-05-03 18:33:15 +0200
  Add an as selector to a prefetch subquery to aid the subselecting-limit analyzer
  r9304 at Thesaurus (orig r9291):  rabbit | 2010-05-03 18:34:49 +0200
  Rewrite mssql test to verify both types of limit dialects with and without quoting, rewrite the RNO, Top and RowNum dialects to rely on a factored out column re-aliaser
  r9305 at Thesaurus (orig r9292):  rabbit | 2010-05-03 21:06:01 +0200
  Fix Top tests, make extra col selector order consistent
  r9307 at Thesaurus (orig r9294):  ribasushi | 2010-05-04 00:50:35 +0200
  Fix test warning
  r9308 at Thesaurus (orig r9295):  ribasushi | 2010-05-04 01:04:32 +0200
  Some databases (db2) do not like leading __s - use a different weird identifier for extra selector names
  r9313 at Thesaurus (orig r9300):  rabbit | 2010-05-05 11:08:33 +0200
  Rename test
  r9314 at Thesaurus (orig r9301):  rabbit | 2010-05-05 11:11:32 +0200
  If there was no offset, there is no sense in reordering
  r9315 at Thesaurus (orig r9302):  rabbit | 2010-05-05 11:12:19 +0200
  Split and fix oracle tests
  r9317 at Thesaurus (orig r9304):  rabbit | 2010-05-05 11:49:33 +0200
  Changes
 
 r9321 at Thesaurus (orig r9308):  rabbit | 2010-05-05 13:01:35 +0200
 Changes
 r9322 at Thesaurus (orig r9309):  rabbit | 2010-05-05 13:02:39 +0200
 Fix obsucre bug with as_subselect_rs (gah wrong commit msg)
 r9323 at Thesaurus (orig r9310):  rabbit | 2010-05-05 14:56:38 +0200
 Forgotten pieces
 r9329 at Thesaurus (orig r9316):  rabbit | 2010-05-07 10:15:52 +0200
 Failure to determine dbms version is *not* a fatal error - trap exceptions
 r9330 at Thesaurus (orig r9317):  caelum | 2010-05-07 11:57:24 +0200
 detect row_number() over support in MSSQL if version detection fails
 r9331 at Thesaurus (orig r9318):  caelum | 2010-05-07 14:56:57 +0200
 minor change
 r9332 at Thesaurus (orig r9319):  nigel | 2010-05-07 15:03:00 +0200
 empty update OK even if row is not in database
 r9333 at Thesaurus (orig r9320):  nigel | 2010-05-07 15:28:06 +0200
 Added reference to cascade_* in relationship attributes
 r9334 at Thesaurus (orig r9321):  nigel | 2010-05-07 15:39:37 +0200
 empty update OK even if row is not in database (fixed)
 r9335 at Thesaurus (orig r9322):  nigel | 2010-05-07 15:48:19 +0200
 empty update OK even if row is not in database (fixed2)
 r9336 at Thesaurus (orig r9323):  nigel | 2010-05-07 15:54:36 +0200
 Clarification to cascade_update attribute documentation
 r9337 at Thesaurus (orig r9324):  nigel | 2010-05-07 16:08:17 +0200
 Clarification cascade_* attribute defaults documentation
 r9350 at Thesaurus (orig r9337):  rabbit | 2010-05-08 11:23:56 +0200
 Make sure missing author-deps do not kill makefile creation
 r9358 at Thesaurus (orig r9344):  rabbit | 2010-05-11 16:46:47 +0200
  r9147 at Thesaurus (orig r9134):  frew | 2010-04-13 16:54:24 +0200
  branch for FilterColumn
  r9148 at Thesaurus (orig r9135):  frew | 2010-04-13 18:09:57 +0200
  change names wrap accessors
  r9158 at Thesaurus (orig r9145):  frew | 2010-04-14 17:55:14 +0200
  basic tests and a tiny fix
  r9159 at Thesaurus (orig r9146):  frew | 2010-04-14 19:30:46 +0200
  working filter column impl
  r9160 at Thesaurus (orig r9147):  frew | 2010-04-14 19:31:18 +0200
  useless var
  r9161 at Thesaurus (orig r9148):  frew | 2010-04-14 20:10:57 +0200
  MultiCreate test
  r9163 at Thesaurus (orig r9150):  frew | 2010-04-14 20:22:10 +0200
  test db in MC
  r9178 at Thesaurus (orig r9165):  rabbit | 2010-04-14 23:35:00 +0200
  Not sure how this was never noticed, but it definitely doesn't seem right and all tests pass...
  r9191 at Thesaurus (orig r9178):  frew | 2010-04-15 06:34:16 +0200
  better namiology
  r9193 at Thesaurus (orig r9180):  frew | 2010-04-15 16:14:28 +0200
  method and arg rename
  r9194 at Thesaurus (orig r9181):  frew | 2010-04-15 16:35:25 +0200
  use result source for filtering instead of result
  r9195 at Thesaurus (orig r9182):  frew | 2010-04-15 17:04:38 +0200
  initial stab at incomplete docs
  r9278 at Thesaurus (orig r9265):  frew | 2010-04-28 22:05:36 +0200
  doc, removal of source stuff, and Changes
  r9324 at Thesaurus (orig r9311):  frew | 2010-05-06 01:49:25 +0200
  test caching
  r9327 at Thesaurus (orig r9314):  rabbit | 2010-05-06 16:30:36 +0200
  Play nicer with lower-level methods
  r9328 at Thesaurus (orig r9315):  frew | 2010-05-07 04:27:18 +0200
  no filter and inflate column
  r9352 at Thesaurus (orig r9339):  rabbit | 2010-05-10 13:40:00 +0200
  Maintain full coherence between filtered cache and unfiltered results, including store_column
  r9353 at Thesaurus (orig r9340):  rabbit | 2010-05-10 13:40:48 +0200
  Fix typo
  r9357 at Thesaurus (orig r9343):  rabbit | 2010-05-11 16:45:50 +0200
  Comment weird looking code
 
 r9360 at Thesaurus (orig r9346):  caelum | 2010-05-11 17:44:15 +0200
 clearer logic
 r9364 at Thesaurus (orig r9350):  wreis | 2010-05-12 03:44:39 +0200
 add failing test for order_by using a function
 r9378 at Thesaurus (orig r9364):  rabbit | 2010-05-14 11:57:45 +0200
 cleanup test by wreis
 r9396 at Thesaurus (orig r9382):  rabbit | 2010-05-15 17:50:58 +0200
 Fix stupid typo-bug
 r9397 at Thesaurus (orig r9383):  rabbit | 2010-05-15 18:04:59 +0200
 Revert erroneous commit (belongs in a branch)
 r9402 at Thesaurus (orig r9388):  ash | 2010-05-16 12:28:13 +0200
 Fix how Schema::Versioned gets connection attributes
 r9408 at Thesaurus (orig r9394):  caelum | 2010-05-16 19:29:14 +0200
 add sql_maker to @rdbms_specific_methods
 r9420 at Thesaurus (orig r9406):  caelum | 2010-05-20 16:28:18 +0200
 support INSERT OR UPDATE triggers for Oracle
 r9421 at Thesaurus (orig r9407):  matthewt | 2010-05-20 19:19:14 +0200
 don't try and ensure_class_loaded an object. this doesn't work.
 r9422 at Thesaurus (orig r9408):  matthewt | 2010-05-20 19:36:01 +0200
 fix result_class setter behaviour to not stuff attrs (line commented out to prevent this regression being mistakenly re-introduced)
 r9423 at Thesaurus (orig r9409):  matthewt | 2010-05-20 19:49:32 +0200
 forgot to commit fixes
 r9424 at Thesaurus (orig r9410):  matthewt | 2010-05-20 20:09:52 +0200
 fix find() since that was also broken in r8754
 r9435 at Thesaurus (orig r9421):  rabbit | 2010-05-25 11:14:29 +0200
 Fix undef warning
 r9436 at Thesaurus (orig r9422):  rabbit | 2010-05-25 11:15:01 +0200
 Rewrite test as to not propagate several ways to do the same thing
 r9452 at Thesaurus (orig r9438):  caelum | 2010-05-25 21:33:37 +0200
  r24317 at hlagh (orig r9367):  tonvoon | 2010-05-14 12:24:35 -0400
  Branch for converting eval {} to Try::Tiny
  
  r24319 at hlagh (orig r9369):  tonvoon | 2010-05-14 17:25:02 -0400
  Conversion of eval => try (part 1)
  
  r24325 at hlagh (orig r9375):  tonvoon | 2010-05-14 18:03:03 -0400
  Add eval => try
  
  r24326 at hlagh (orig r9376):  tonvoon | 2010-05-14 18:22:57 -0400
  Another eval => try
  
  r24327 at hlagh (orig r9377):  tonvoon | 2010-05-14 18:45:27 -0400
  Corrected usage of $@ in catch block
  
  r24328 at hlagh (orig r9378):  tonvoon | 2010-05-14 19:29:52 -0400
  txn_do's eval => try
  
  r24329 at hlagh (orig r9379):  tonvoon | 2010-05-14 19:46:44 -0400
  eval => try where tests for $@ done
  
  r24330 at hlagh (orig r9380):  tonvoon | 2010-05-14 20:38:43 -0400
  All expected evals converted to try, except where no test is done,
  runtime evaluation, or base perl (such as "require"). Only one test
  failure due to string difference in output
  
  r24346 at hlagh (orig r9396):  tonvoon | 2010-05-17 08:52:28 -0400
  Fix missing $@ in try::tiny conversion
  
  r24347 at hlagh (orig r9397):  tonvoon | 2010-05-17 08:55:13 -0400
  Revert to eval instead of try::tiny because no check for $@
  
  r24348 at hlagh (orig r9398):  tonvoon | 2010-05-17 08:55:45 -0400
  Added myself to contributors
  
  r24349 at hlagh (orig r9399):  tonvoon | 2010-05-17 10:23:57 -0400
  Fixed exception logic due to not being able to use return with a catch{}
  
  r24350 at hlagh (orig r9400):  tonvoon | 2010-05-17 10:31:32 -0400
  Removed tab
  
  r24430 at hlagh (orig r9424):  ribasushi | 2010-05-25 10:09:39 -0400
  More try::tiny conversions
  r24432 at hlagh (orig r9426):  ribasushi | 2010-05-25 11:40:45 -0400
  Try::Tiny conversion finished
  r24433 at hlagh (orig r9427):  ribasushi | 2010-05-25 11:46:52 -0400
  Missed use
  r24440 at hlagh (orig r9434):  rkitover | 2010-05-25 13:47:25 -0400
  fix Oracle
  r24441 at hlagh (orig r9435):  rkitover | 2010-05-25 14:04:10 -0400
  fix odbc/mssql dynamic cursors
  r24442 at hlagh (orig r9436):  rkitover | 2010-05-25 14:32:41 -0400
  fix hang in SQLAnywhere DateTime tests
 
 r9454 at Thesaurus (orig r9440):  rabbit | 2010-05-26 11:28:37 +0200
 Simplify oracle retrial logic
 r9455 at Thesaurus (orig r9441):  rabbit | 2010-05-26 12:00:20 +0200
 Can not return from within a try block
 r9456 at Thesaurus (orig r9442):  rabbit | 2010-05-26 12:17:55 +0200
 Really fix logic
 r9464 at Thesaurus (orig r9450):  jester | 2010-05-27 16:06:43 +0200
 Light doc tweaks
 
 r9475 at Thesaurus (orig r9461):  ribasushi | 2010-05-31 00:17:29 +0200
 Rewrite GenericSubQ from SQLA::L to be actually useful
 Since it now works it is no longer necessary to turn on softlimit when genericsubq is detected
 Switch all sprintf()ed limit/offset specs to unsigned integers
 Lower the default rows-without-offset to 2^32
 r9476 at Thesaurus (orig r9462):  rabbit | 2010-05-31 00:25:01 +0200
 New format of changelog (easier to read)
 r9477 at Thesaurus (orig r9463):  rabbit | 2010-05-31 00:27:18 +0200
 Fix MC double-object creation (important for e.g. IC::FS which otherwise leaves orphaned files)
 r9479 at Thesaurus (orig r9465):  rabbit | 2010-05-31 00:37:23 +0200
 Fix tests to survive the new SQLA bindtype checks



Property changes on: DBIx-Class/0.08/branches/pg_cursors
___________________________________________________________________
Modified: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/chaining_fixes:8626
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_pod:8786
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_refactor:8691
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dephandling:8674
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/handle_all_storage_methods_in_replicated:8612
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_quotes:8812
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pod_fixes:8902
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pri_key_refactor:8751
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-interbase:8929
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/chaining_fixes:8626
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_pod:8786
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_refactor:8691
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dephandling:8674
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/filter_column:9343
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/handle_all_storage_methods_in_replicated:8612
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_quotes:8812
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pod_fixes:8902
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pri_key_refactor:8751
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-interbase:8929
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subqueried_limit_fixes:9304
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/try-tiny:9436
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:9465
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510

Modified: DBIx-Class/0.08/branches/pg_cursors/Changes
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/Changes	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/Changes	2010-05-31 07:55:05 UTC (rev 9468)
@@ -1,13 +1,48 @@
 Revision history for DBIx::Class
 
+    * New Features
+        - Add DBIx::Class::FilterColumn for non-ref filtering
+        - ::Storage::DBI now correctly preserves a parent $dbh from
+          terminating children, even during interpreter-global
+          out-of-order destruction
         - InflateColumn::DateTime support for MSSQL via DBD::Sybase
-        - millisecond precision support for MSSQL datetimes for
+        - Millisecond precision support for MSSQL datetimes for
           InflateColumn::DateTime
-        - support connecting using $ENV{DBI_DSN} and $ENV{DBI_DRIVER}
+        - Support connecting using $ENV{DBI_DSN} and $ENV{DBI_DRIVER}
         - current_source_alias method on ResultSet objects to
           determine the alias to use in programatically assembled
           search()es (originally added in 0.08100 but unmentioned)
+        - Rewrite/unification of all subselecting limit emulations
+          (RNO, Top, RowNum) to be much more robust wrt complex joined
+          resultsets
+        - MSSQL limits now don't require nearly as many applications of
+          the unsafe_subselect_ok attribute, due to optimized queries
+        - Support for Generic Subquery limit "emulation" - awfully slow
+          and inefficient but works on almost any db, and is preferred
+          to a soft-limit
 
+    * Fixes
+        - Fix nasty potentially data-eating bug when deleting/updating
+          a limited resultset
+        - Fix find() to use result_class set on object
+        - Fix result_class setter behaviour to not mistakenly stuff attrs.
+        - Don't try and ensure_class_loaded an object. This doesn't work.
+        - Fix as_subselect_rs to not inject resultset class-wide where
+          conditions outside of the resulting subquery
+        - update() on row not in_storage no longer throws an exception
+          if there are no dirty columns to update (fixes cascaded update
+          annoyances)
+        - Update Schema::Versioned to respect hashref style of
+          connection_info
+        - Do not recreate the same related object twice during MultiCreate
+          (solves the problem of orphaned IC::FS files)
+
+    * Misc
+        - Add a warning to load_namespaces if a class in ResultSet/
+          is not a subclass of DBIx::Class::ResultSet
+        - All DBIC exception-handling switched to Try::Tiny
+        - Depend on optimized SQL::Abstract (faster SQL generation)
+
 0.08121 2010-04-11 18:43:00 (UTC)
         - Support for Firebird RDBMS with DBD::InterBase and ODBC
         - Add core support for INSERT RETURNING (for storages that

Modified: DBIx-Class/0.08/branches/pg_cursors/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/Makefile.PL	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/Makefile.PL	2010-05-31 07:55:05 UTC (rev 9468)
@@ -18,7 +18,6 @@
 ### All of them should go to DBIx::Class::Optional::Dependencies
 ###
 
-
 name     'DBIx-Class';
 perl_version '5.008001';
 all_from 'lib/DBIx/Class.pm';
@@ -37,7 +36,7 @@
 
 my $runtime_requires = {
   'Carp::Clan'               => '6.0',
-  'Class::Accessor::Grouped' => '0.09002',
+  'Class::Accessor::Grouped' => '0.09003',
   'Class::C3::Componentised' => '1.0005',
   'Class::Inspector'         => '1.24',
   'Data::Page'               => '2.00',
@@ -45,12 +44,13 @@
   'MRO::Compat'              => '0.09',
   'Module::Find'             => '0.06',
   'Path::Class'              => '0.18',
-  'SQL::Abstract'            => '1.64',
+  'SQL::Abstract'            => '1.66',
   'SQL::Abstract::Limit'     => '0.13',
   'Sub::Name'                => '0.04',
   'Data::Dumper::Concise'    => '1.000',
   'Scope::Guard'             => '0.03',
   'Context::Preserve'        => '0.01',
+  'Try::Tiny'                => '0.04',
 };
 
 # this is so we can order requires alphabetically

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Admin.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Admin.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Admin.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -209,8 +209,8 @@
 sub _build_config {
   my ($self) = @_;
 
-  eval { require Config::Any }
-    or die ("Config::Any is required to parse the config file.\n");
+  try { require Config::Any }
+    catch { die ("Config::Any is required to parse the config file.\n") };
 
   my $cfg = Config::Any->load_files ( {files => [$self->config_file], use_ext =>1, flatten_to_hash=>1});
 

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/CDBICompat/ColumnCase.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/CDBICompat/ColumnCase.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/CDBICompat/ColumnCase.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -25,9 +25,15 @@
 
 sub has_many {
   my ($class, $rel, $f_class, $f_key, @rest) = @_;
-  return $class->next::method($rel, $f_class, ( ref($f_key) ?
-                                                          $f_key :
-                                                          lc($f_key) ), @rest);
+  return $class->next::method(
+    $rel,
+    $f_class,
+    (ref($f_key) ?
+      $f_key :
+      lc($f_key||'')
+    ),
+    @rest
+  );
 }
 
 sub get_inflated_column {

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Componentised.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Componentised.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Componentised.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -37,7 +37,7 @@
     for (qw/DBIx::Class::UTF8Columns DBIx::Class::ForceUTF8/) {
       if ($comp->isa ($_) ) {
         $keep_checking = 0; # no use to check from this point on
-        carp "Use of $_ is strongly discouraged. See documentationm of DBIx::Class::UTF8Columns for more info\n"
+        carp "Use of $_ is strongly discouraged. See documentation of DBIx::Class::UTF8Columns for more info\n"
           unless ($warned->{UTF8Columns}++ || $ENV{DBIC_UTF8COLUMNS_OK});
         last;
       }

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Exception.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Exception.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Exception.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -3,8 +3,9 @@
 use strict;
 use warnings;
 
-use Carp::Clan qw/^DBIx::Class/;
+use Carp::Clan qw/^DBIx::Class|^Try::Tiny/;
 use Scalar::Util qw/blessed/;
+use Try::Tiny;
 
 use overload
     '""' => sub { shift->{msg} },
@@ -42,7 +43,7 @@
 L<Carp::Clan/croak>.
 
   DBIx::Class::Exception->throw('Foo');
-  eval { ... }; DBIx::Class::Exception->throw($@) if $@;
+  try { ... } catch { DBIx::Class::Exception->throw(shift) }
 
 =cut
 
@@ -54,9 +55,7 @@
 
     # use Carp::Clan's croak if we're not stack tracing
     if(!$stacktrace) {
-        local $@;
-        eval { croak $msg };
-        $msg = $@
+        try { croak $msg } catch { $msg = shift };
     }
     else {
         $msg = Carp::longmess($msg);

Added: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/FilterColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/FilterColumn.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/FilterColumn.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,205 @@
+package DBIx::Class::FilterColumn;
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Row/;
+
+sub filter_column {
+  my ($self, $col, $attrs) = @_;
+
+  $self->throw_exception("FilterColumn does not work with InflateColumn")
+    if $self->isa('DBIx::Class::InflateColumn') &&
+      defined $self->column_info($col)->{_inflate_info};
+
+  $self->throw_exception("No such column $col to filter")
+    unless $self->has_column($col);
+
+  $self->throw_exception("filter_column needs attr hashref")
+    unless ref $attrs eq 'HASH';
+
+  $self->column_info($col)->{_filter_info} = $attrs;
+  my $acc = $self->column_info($col)->{accessor};
+  $self->mk_group_accessors(filtered_column => [ (defined $acc ? $acc : $col), $col]);
+  return 1;
+}
+
+sub _column_from_storage {
+  my ($self, $col, $value) = @_;
+
+  return $value unless defined $value;
+
+  my $info = $self->column_info($col)
+    or $self->throw_exception("No column info for $col");
+
+  return $value unless exists $info->{_filter_info};
+
+  my $filter = $info->{_filter_info}{filter_from_storage};
+  $self->throw_exception("No filter for $col") unless defined $filter;
+
+  return $self->$filter($value);
+}
+
+sub _column_to_storage {
+  my ($self, $col, $value) = @_;
+
+  my $info = $self->column_info($col) or
+    $self->throw_exception("No column info for $col");
+
+  return $value unless exists $info->{_filter_info};
+
+  my $unfilter = $info->{_filter_info}{filter_to_storage};
+  $self->throw_exception("No unfilter for $col") unless defined $unfilter;
+  return $self->$unfilter($value);
+}
+
+sub get_filtered_column {
+  my ($self, $col) = @_;
+
+  $self->throw_exception("$col is not a filtered column")
+    unless exists $self->column_info($col)->{_filter_info};
+
+  return $self->{_filtered_column}{$col}
+    if exists $self->{_filtered_column}{$col};
+
+  my $val = $self->get_column($col);
+
+  return $self->{_filtered_column}{$col} = $self->_column_from_storage($col, $val);
+}
+
+sub get_column {
+  my ($self, $col) = @_;
+  if (exists $self->{_filtered_column}{$col}) {
+    return $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col});
+  }
+
+  return $self->next::method ($col);
+}
+
+# sadly a separate codepath in Row.pm ( used by insert() )
+sub get_columns {
+  my $self = shift;
+
+  foreach my $col (keys %{$self->{_filtered_column}||{}}) {
+    $self->{_column_data}{$col} ||= $self->_column_to_storage ($col, $self->{_filtered_column}{$col})
+      if exists $self->{_filtered_column}{$col};
+  }
+
+  $self->next::method (@_);
+}
+
+sub store_column {
+  my ($self, $col) = (shift, @_);
+
+  # blow cache
+  delete $self->{_filtered_column}{$col};
+
+  $self->next::method(@_);
+}
+
+sub set_filtered_column {
+  my ($self, $col, $filtered) = @_;
+
+  # do not blow up the cache via set_column unless necessary
+  # (filtering may be expensive!)
+  if (exists $self->{_filtered_column}{$col}) {
+    return $filtered
+      if ($self->_eq_column_values ($col, $filtered, $self->{_filtered_column}{$col} ) );
+
+    $self->make_column_dirty ($col); # so the comparison won't run again
+  }
+
+  $self->set_column($col, $self->_column_to_storage($col, $filtered));
+
+  return $self->{_filtered_column}{$col} = $filtered;
+}
+
+sub update {
+  my ($self, $attrs, @rest) = @_;
+
+  foreach my $key (keys %{$attrs||{}}) {
+    if (
+      $self->has_column($key)
+        &&
+      exists $self->column_info($key)->{_filter_info}
+    ) {
+      $self->set_filtered_column($key, delete $attrs->{$key});
+
+      # FIXME update() reaches directly into the object-hash
+      # and we may *not* have a filtered value there - thus
+      # the void-ctx filter-trigger
+      $self->get_column($key) unless exists $self->{_column_data}{$key};
+    }
+  }
+
+  return $self->next::method($attrs, @rest);
+}
+
+sub new {
+  my ($class, $attrs, @rest) = @_;
+  my $source = $attrs->{-result_source}
+    or $class->throw_exception('Sourceless rows are not supported with DBIx::Class::FilterColumn');
+
+  my $obj = $class->next::method($attrs, @rest);
+  foreach my $key (keys %{$attrs||{}}) {
+    if ($obj->has_column($key) &&
+          exists $obj->column_info($key)->{_filter_info} ) {
+      $obj->set_filtered_column($key, $attrs->{$key});
+    }
+  }
+
+  return $obj;
+}
+
+1;
+
+=head1 NAME
+
+DBIx::Class::FilterColumn - Automatically convert column data
+
+=head1 SYNOPSIS
+
+ # In your result classes
+ __PACKAGE__->filter_column( money => {
+     filter_to_storage => 'to_pennies',
+     filter_from_storage => 'from_pennies',
+ });
+
+ sub to_pennies   { $_[1] * 100 }
+
+ sub from_pennies { $_[1] / 100 }
+
+ 1;
+
+=head1 DESCRIPTION
+
+This component is meant to be a more powerful, but less DWIM-y,
+L<DBIx::Class::InflateColumn>.  One of the major issues with said component is
+that it B<only> works with references.  Generally speaking anything that can
+be done with L<DBIx::Class::InflateColumn> can be done with this component.
+
+=head1 METHODS
+
+=head2 filter_column
+
+ __PACKAGE__->filter_column( colname => {
+     filter_from_storage => 'method',
+     filter_to_storage   => 'method',
+ })
+
+This is the method that you need to call to set up a filtered column.  It takes
+exactly two arguments; the first being the column name the second being a
+C<HashRef> with C<filter_from_storage> and C<filter_to_storage> having
+something that can be called as a method.  The method will be called with
+the value of the column as the first non-C<$self> argument.
+
+=head2 get_filtered_column
+
+ $obj->get_filtered_column('colname')
+
+Returns the filtered value of the column
+
+=head2 set_filtered_column
+
+ $obj->set_filtered_column(colname => 'new_value')
+
+Sets the filtered value of the column

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn/DateTime.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn/DateTime.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn/DateTime.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -4,6 +4,7 @@
 use warnings;
 use base qw/DBIx::Class/;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
 
 =head1 NAME
 
@@ -167,13 +168,18 @@
           inflate => sub {
             my ($value, $obj) = @_;
 
-            my $dt = eval { $obj->_inflate_to_datetime( $value, \%info ) };
-            if (my $err = $@ ) {
-              return undef if ($undef_if_invalid);
-              $self->throw_exception ("Error while inflating ${value} for ${column} on ${self}: $err");
-            }
+            my $dt = try
+              { $obj->_inflate_to_datetime( $value, \%info ) }
+              catch {
+                $self->throw_exception ("Error while inflating ${value} for ${column} on ${self}: $_")
+                  unless $undef_if_invalid;
+                undef;  # rv
+              };
 
-            return $obj->_post_inflate_datetime( $dt, \%info );
+            return (defined $dt)
+              ? $obj->_post_inflate_datetime( $dt, \%info )
+              : undef
+            ;
           },
           deflate => sub {
             my ($value, $obj) = @_;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/InflateColumn.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -37,7 +37,7 @@
 to work.
 
 If you want to filter plain scalar values and replace them with
-something else, contribute a filtering component.
+something else, see L<DBIx::Class::FilterColumn>.
 
 =head1 METHODS
 
@@ -74,6 +74,11 @@
 
 sub inflate_column {
   my ($self, $col, $attrs) = @_;
+
+  $self->throw_exception("InflateColumn does not work with FilterColumn")
+    if $self->isa('DBIx::Class::FilterColumn') &&
+      defined $self->column_info($col)->{_filter_info};
+
   $self->throw_exception("No such column $col to inflate")
     unless $self->has_column($col);
   $self->throw_exception("inflate_column needs attr hashref")
@@ -146,9 +151,9 @@
   $self->set_column($col, $self->_deflated_column($col, $inflated));
 #  if (blessed $inflated) {
   if (ref $inflated && ref($inflated) ne 'SCALAR') {
-    $self->{_inflated_column}{$col} = $inflated; 
+    $self->{_inflated_column}{$col} = $inflated;
   } else {
-    delete $self->{_inflated_column}{$col};      
+    delete $self->{_inflated_column}{$col};
   }
   return $inflated;
 }

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Manual/Cookbook.pod	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Manual/Cookbook.pod	2010-05-31 07:55:05 UTC (rev 9468)
@@ -292,7 +292,7 @@
   my $count = $rs->count;
 
   # Equivalent SQL:
-  # SELECT COUNT( * ) FROM (SELECT me.name FROM artist me GROUP BY me.name) count_subq:
+  # SELECT COUNT( * ) FROM (SELECT me.name FROM artist me GROUP BY me.name) me:
 
 =head2 Grouping results
 
@@ -1244,17 +1244,17 @@
     return $genus->species;
   };
 
+  use Try::Tiny;
   my $rs;
-  eval {
+  try {
     $rs = $schema->txn_do($coderef1);
-  };
-
-  if ($@) {                             # Transaction failed
+  } catch {
+    # Transaction failed
     die "the sky is falling!"           #
-      if ($@ =~ /Rollback failed/);     # Rollback failed
+      if ($_ =~ /Rollback failed/);     # Rollback failed
 
     deal_with_failed_transaction();
-  }
+  };
 
 Note: by default C<txn_do> will re-run the coderef one more time if an
 error occurs due to client disconnection (e.g. the server is bounced).
@@ -1281,8 +1281,10 @@
   my $schema = MySchema->connect("dbi:Pg:dbname=my_db");
 
   # Start a transaction. Every database change from here on will only be 
-  # committed into the database if the eval block succeeds.
-  eval {
+  # committed into the database if the try block succeeds.
+  use Try::Tiny;
+  my $exception;
+  try {
     $schema->txn_do(sub {
       # SQL: BEGIN WORK;
 
@@ -1292,7 +1294,7 @@
       for (1..10) {
 
         # Start a nested transaction, which in fact sets a savepoint.
-        eval {
+        try {
           $schema->txn_do(sub {
             # SQL: SAVEPOINT savepoint_0;
 
@@ -1307,8 +1309,7 @@
               #      WHERE ( id = 42 );
             }
           });
-        };
-        if ($@) {
+        } catch {
           # SQL: ROLLBACK TO SAVEPOINT savepoint_0;
 
           # There was an error while creating a $thing. Depending on the error
@@ -1316,14 +1317,14 @@
           # changes related to the creation of this $thing
 
           # Abort the whole job
-          if ($@ =~ /horrible_problem/) {
+          if ($_ =~ /horrible_problem/) {
             print "something horrible happend, aborting job!";
-            die $@;                # rethrow error
+            die $_;                # rethrow error
           }
 
           # Ignore this $thing, report the error, and continue with the
           # next $thing
-          print "Cannot create thing: $@";
+          print "Cannot create thing: $_";
         }
         # There was no error, so save all changes since the last 
         # savepoint.
@@ -1331,8 +1332,11 @@
         # SQL: RELEASE SAVEPOINT savepoint_0;
       }
     });
-  };
-  if ($@) {
+  } catch {
+    $exception = $_;
+  }
+
+  if ($caught) {
     # There was an error while handling the $job. Rollback all changes
     # since the transaction started, including the already committed
     # ('released') savepoints. There will be neither a new $job nor any
@@ -1340,7 +1344,7 @@
 
     # SQL: ROLLBACK;
 
-    print "ERROR: $@\n";
+    print "ERROR: $exception\n";
   }
   else {
     # There was no error while handling the $job. Commit all changes.
@@ -1354,7 +1358,7 @@
 
 In this example it might be hard to see where the rollbacks, releases and
 commits are happening, but it works just the same as for plain L<<txn_do>>: If
-the C<eval>-block around C<txn_do> fails, a rollback is issued. If the C<eval>
+the C<try>-block around C<txn_do> fails, a rollback is issued. If the C<try>
 succeeds, the transaction is committed (or the savepoint released).
 
 While you can get more fine-grained control using C<svp_begin>, C<svp_release>
@@ -2084,6 +2088,47 @@
 statement and dig down to see if certain parameters cause aberrant behavior.
 You might want to check out L<DBIx::Class::QueryLog> as well.
 
+=head1 IMPROVING PERFORMANCE
+
+=over
+
+=item *
+
+Install L<Class::XSAccessor> to speed up L<Class::Accessor::Grouped>.
+
+=item *
+
+On Perl 5.8 install L<Class::C3::XS>.
+
+=item *
+
+L<prefetch|DBIx::Class::ResultSet/prefetch> relationships, where possible. See
+L</Using joins and prefetch>.
+
+=item *
+
+Use L<populate|DBIx::Class::ResultSet/populate> in void context to insert data
+when you don't need the resulting L<DBIx::Class::Row> objects, if possible, but
+see the caveats.
+
+When inserting many rows, for best results, populate a large number of rows at a
+time, but not so large that the table is locked for an unacceptably long time.
+
+If using L<create|DBIx::Class::ResultSet/create> instead, use a transaction and
+commit every C<X> rows; where C<X> gives you the best performance without
+locking the table for too long. 
+
+=item *
+
+When selecting many rows, if you don't need full-blown L<DBIx::Class::Row>
+objects, consider using L<DBIx::Class::ResultClass::HashRefInflator>.
+
+=item *
+
+See also L</STARTUP SPEED> and L</MEMORY USAGE> in this document.
+
+=back
+
 =head1 STARTUP SPEED
 
 L<DBIx::Class|DBIx::Class> programs can have a significant startup delay

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Optional/Dependencies.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Optional/Dependencies.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Optional/Dependencies.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -283,12 +283,29 @@
 
 # This is to be called by the author only (automatically in Makefile.PL)
 sub _gen_pod {
+
   my $class = shift;
   my $modfn = __PACKAGE__ . '.pm';
   $modfn =~ s/\:\:/\//g;
 
-  require DBIx::Class;
-  my $distver = DBIx::Class->VERSION;
+  my $podfn = __FILE__;
+  $podfn =~ s/\.pm$/\.pod/;
+
+  my $distver =
+    eval { require DBIx::Class; DBIx::Class->VERSION; }
+      ||
+    do {
+      warn
+"\n\n---------------------------------------------------------------------\n" .
+'Unable to load core DBIx::Class module to determine current version, '.
+'possibly due to missing dependencies. Author-mode autodocumentation ' .
+"halted\n\n" . $@ .
+"\n\n---------------------------------------------------------------------\n"
+      ;
+      '*UNKNOWN*';  # rv
+    }
+  ;
+
   my $sqltver = $class->req_list_for ('deploy')->{'SQL::Translator'}
     or die "Hrmm? No sqlt dep?";
 
@@ -431,10 +448,7 @@
     'You may distribute this code under the same terms as Perl itself',
   );
 
-  my $fn = __FILE__;
-  $fn =~ s/\.pm$/\.pod/;
-
-  open (my $fh, '>', $fn) or croak "Unable to write to $fn: $!";
+  open (my $fh, '>', $podfn) or croak "Unable to write to $podfn: $!";
   print $fh join ("\n\n", @chunks);
   close ($fh);
 }

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/Base.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/Base.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/Base.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 
 use Scalar::Util ();
 use base qw/DBIx::Class/;
+use Try::Tiny;
 
 =head1 NAME
 
@@ -122,6 +123,40 @@
 should, set this attribute to a true or false value to override the detection
 of when to create constraints.
 
+=item cascade_copy
+
+If C<cascade_copy> is true on a C<has_many> relationship for an
+object, then when you copy the object all the related objects will
+be copied too. To turn this behaviour off, pass C<< cascade_copy => 0 >> 
+in the C<$attr> hashref. 
+
+The behaviour defaults to C<< cascade_copy => 1 >> for C<has_many>
+relationships.
+
+=item cascade_delete
+
+By default, DBIx::Class cascades deletes across C<has_many>,
+C<has_one> and C<might_have> relationships. You can disable this
+behaviour on a per-relationship basis by supplying 
+C<< cascade_delete => 0 >> in the relationship attributes.
+
+The cascaded operations are performed after the requested delete,
+so if your database has a constraint on the relationship, it will
+have deleted/updated the related records or raised an exception
+before DBIx::Class gets to perform the cascaded operation.
+
+=item cascade_update
+
+By default, DBIx::Class cascades updates across C<has_one> and
+C<might_have> relationships. You can disable this behaviour on a
+per-relationship basis by supplying C<< cascade_update => 0 >> in
+the relationship attributes.
+
+This is not a RDMS style cascade update - it purely means that when
+an object has update called on it, all the related objects also
+have update called. It will not change foreign keys automatically -
+you must arrange to do this yourself.
+
 =item on_delete / on_update
 
 If you are using L<SQL::Translator> to create SQL for you, you can use these
@@ -203,16 +238,17 @@
 
     # condition resolution may fail if an incomplete master-object prefetch
     # is encountered - that is ok during prefetch construction (not yet in_storage)
-    my $cond = eval { $source->_resolve_condition( $rel_info->{cond}, $rel, $self ) };
-    if (my $err = $@) {
+    my $cond = try {
+      $source->_resolve_condition( $rel_info->{cond}, $rel, $self )
+    }
+    catch {
       if ($self->in_storage) {
-        $self->throw_exception ($err);
+        $self->throw_exception ($_);
       }
-      else {
-        $cond = $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION;
-      }
-    }
 
+      $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION;  # RV
+    };
+
     if ($cond eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) {
       my $reverse = $source->reverse_relationship_info($rel);
       foreach my $rev_rel (keys %$reverse) {

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/BelongsTo.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/BelongsTo.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/BelongsTo.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -6,6 +6,7 @@
 
 use strict;
 use warnings;
+use Try::Tiny;
 
 our %_pod_inherit_config = 
   (
@@ -24,10 +25,10 @@
   # no join condition or just a column name
   if (!ref $cond) {
     $class->ensure_class_loaded($f_class);
-    my %f_primaries = map { $_ => 1 } eval { $f_class->_pri_cols };
-    $class->throw_exception(
-      "Can't infer join condition for ${rel} on ${class}: $@"
-    ) if $@;
+    my %f_primaries = map { $_ => 1 } try { $f_class->_pri_cols }
+      catch {
+        $class->throw_exception( "Can't infer join condition for ${rel} on ${class}: $_");
+      };
 
     my ($pri, $too_many) = keys %f_primaries;
     $class->throw_exception(

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasMany.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasMany.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasMany.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -3,6 +3,7 @@
 
 use strict;
 use warnings;
+use Try::Tiny;
 
 our %_pod_inherit_config = 
   (
@@ -14,10 +15,10 @@
 
   unless (ref $cond) {
     $class->ensure_class_loaded($f_class);
-    my ($pri, $too_many) = eval { $class->_pri_cols };
-    $class->throw_exception(
-      "Can't infer join condition for ${rel} on ${class}: $@"
-    ) if $@;
+    my ($pri, $too_many) = try { $class->_pri_cols } 
+      catch {
+        $class->throw_exception("Can't infer join condition for ${rel} on ${class}: $_");
+      };
 
     $class->throw_exception(
       "has_many can only infer join for a single primary key; ".
@@ -39,7 +40,7 @@
       $guess = "using our class name '$class' as foreign key";
     }
 
-    my $f_class_loaded = eval { $f_class->columns };
+    my $f_class_loaded = try { $f_class->columns };
     $class->throw_exception(
       "No such column ${f_key} on foreign class ${f_class} ($guess)"
     ) if $f_class_loaded && !$f_class->has_column($f_key);

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasOne.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasOne.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Relationship/HasOne.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -4,6 +4,7 @@
 use strict;
 use warnings;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
 
 our %_pod_inherit_config = 
   (
@@ -30,7 +31,7 @@
       "${class} has none"
     ) if !defined $pri && (!defined $cond || !length $cond);
 
-    my $f_class_loaded = eval { $f_class->columns };
+    my $f_class_loaded = try { $f_class->columns };
     my ($f_key,$too_many,$guess);
     if (defined $cond && length $cond) {
       $f_key = $cond;
@@ -60,10 +61,10 @@
 sub _get_primary_key {
   my ( $class, $target_class ) = @_;
   $target_class ||= $class;
-  my ($pri, $too_many) = eval { $target_class->_pri_cols };
-  $class->throw_exception(
-    "Can't infer join condition on ${target_class}: $@"
-  ) if $@;
+  my ($pri, $too_many) = try { $target_class->_pri_cols }
+    catch {
+      $class->throw_exception("Can't infer join condition on ${target_class}: $_");
+    };
 
   $class->throw_exception(
     "might_have/has_one can only infer join for a single primary key; ".

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSet.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSet.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -57,8 +57,13 @@
 
 The query that the ResultSet represents is B<only> executed against
 the database when these methods are called:
-L</find> L</next> L</all> L</first> L</single> L</count>
+L</find>, L</next>, L</all>, L</first>, L</single>, L</count>.
 
+If a resultset is used in a numeric context it returns the L</count>.
+However, if it is used in a boolean context it is B<always> true.  So if
+you want to check if a resultset has any results, you must use C<if $rs
+!= 0>.
+
 =head1 EXAMPLES
 
 =head2 Chaining resultsets
@@ -101,7 +106,7 @@
 L</join>, L</prefetch>, L</+select>, L</+as> attributes are merged
 into the existing ones from the original resultset.
 
-The L</where>, L</having> attribute, and any search conditions are
+The L</where> and L</having> attributes, and any search conditions, are
 merged with an SQL C<AND> to the existing condition from the original
 resultset.
 
@@ -142,13 +147,6 @@
 
 See: L</search>, L</count>, L</get_column>, L</all>, L</create>.
 
-=head1 OVERLOADING
-
-If a resultset is used in a numeric context it returns the L</count>.
-However, if it is used in a boolean context it is always true.  So if
-you want to check if a resultset has any results use C<if $rs != 0>.
-C<if $rs> will always be true.
-
 =head1 METHODS
 
 =head2 new
@@ -538,8 +536,8 @@
       : $self->_add_alias($input_query, $alias);
   }
 
-  # Run the query
-  my $rs = $self->search ($query, $attrs);
+  # Run the query, passing the result_class since it should propagate for find
+  my $rs = $self->search ($query, {result_class => $self->result_class, %$attrs});
   if (keys %{$rs->_resolved_attrs->{collapse}}) {
     my $row = $rs->next;
     carp "Query returned more than one row" if $rs->next;
@@ -1138,9 +1136,14 @@
 sub result_class {
   my ($self, $result_class) = @_;
   if ($result_class) {
-    $self->ensure_class_loaded($result_class);
+    unless (ref $result_class) { # don't fire this for an object
+      $self->ensure_class_loaded($result_class);
+    }
     $self->_result_class($result_class);
-    $self->{attrs}{result_class} = $result_class if ref $self;
+    # THIS LINE WOULD BE A BUG - this accessor specifically exists to
+    # permit the user to set result class on one result set only; it only
+    # chains if provided to search()
+    #$self->{attrs}{result_class} = $result_class if ref $self;
   }
   $self->_result_class;
 }
@@ -1235,11 +1238,12 @@
   my $rsrc = $self->result_source;
   $attrs ||= $self->_resolved_attrs;
 
-  my $tmp_attrs = { %$attrs };
+  # only take pieces we need for a simple count
+  my $tmp_attrs = { map
+    { $_ => $attrs->{$_} }
+    qw/ alias from where bind join /
+  };
 
-  # take off any limits, record_filter is cdbi, and no point of ordering a count
-  delete $tmp_attrs->{$_} for (qw/select as rows offset order_by record_filter/);
-
   # overwrite the selector (supplied by the storage)
   $tmp_attrs->{select} = $rsrc->storage->_count_select ($rsrc, $tmp_attrs);
   $tmp_attrs->{as} = 'count';
@@ -1256,37 +1260,43 @@
   my ($self, $attrs) = @_;
 
   my $rsrc = $self->result_source;
-  $attrs ||= $self->_resolved_attrs_copy;
+  $attrs ||= $self->_resolved_attrs;
 
-  my $sub_attrs = { %$attrs };
+  my $sub_attrs = { map
+    { $_ => $attrs->{$_} }
+    qw/ alias from where bind join group_by having rows offset /
+  };
 
-  # extra selectors do not go in the subquery and there is no point of ordering it
-  delete $sub_attrs->{$_} for qw/collapse select _prefetch_select as order_by/;
-
   # if we multi-prefetch we group_by primary keys only as this is what we would
   # get out of the rs via ->next/->all. We *DO WANT* to clobber old group_by regardless
   if ( keys %{$attrs->{collapse}}  ) {
     $sub_attrs->{group_by} = [ map { "$attrs->{alias}.$_" } ($rsrc->_pri_cols) ]
   }
 
-  $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $attrs);
+  # Calculate subquery selector
+  if (my $g = $sub_attrs->{group_by}) {
 
-  # this is so that the query can be simplified e.g.
-  # * ordering can be thrown away in things like Top limit
-  $sub_attrs->{-for_count_only} = 1;
+    # necessary as the group_by may refer to aliased functions
+    my $sel_index;
+    for my $sel (@{$attrs->{select}}) {
+      $sel_index->{$sel->{-as}} = $sel
+        if (ref $sel eq 'HASH' and $sel->{-as});
+    }
 
-  my $sub_rs = $rsrc->resultset_class->new ($rsrc, $sub_attrs);
+    for my $g_part (@$g) {
+      push @{$sub_attrs->{select}}, $sel_index->{$g_part} || $g_part;
+    }
+  }
+  else {
+    my @pcols = map { "$attrs->{alias}.$_" } ($rsrc->primary_columns);
+    $sub_attrs->{select} = @pcols ? \@pcols : [ 1 ];
+  }
 
-  $attrs->{from} = [{
-    -alias => 'count_subq',
-    -source_handle => $rsrc->handle,
-    count_subq => $sub_rs->as_query,
-  }];
-
-  # the subquery replaces this
-  delete $attrs->{$_} for qw/where bind collapse group_by having having_bind rows offset/;
-
-  return $self->_count_rs ($attrs);
+  return $rsrc->resultset_class
+               ->new ($rsrc, $sub_attrs)
+                ->as_subselect_rs
+                 ->search ({}, { columns => { count => $rsrc->storage->_count_select ($rsrc, $attrs) } })
+                  -> get_column ('count');
 }
 
 sub _bool {
@@ -1417,7 +1427,7 @@
   my $cond = $rsrc->schema->storage->_strip_cond_qualifiers ($self->{cond});
 
   my $needs_group_by_subq = $self->_has_resolved_attr (qw/collapse group_by -join/);
-  my $needs_subq = $needs_group_by_subq || (not defined $cond) || $self->_has_resolved_attr(qw/row offset/);
+  my $needs_subq = $needs_group_by_subq || (not defined $cond) || $self->_has_resolved_attr(qw/rows offset/);
 
   if ($needs_group_by_subq or $needs_subq) {
 
@@ -2298,7 +2308,7 @@
     producer => $producer,
     name => 'harry',
   }, {
-    key => 'primary,
+    key => 'primary',
   });
 
 
@@ -2668,16 +2678,26 @@
 =cut
 
 sub as_subselect_rs {
-   my $self = shift;
+  my $self = shift;
 
-   return $self->result_source->resultset->search( undef, {
-      alias => $self->current_source_alias,
-      from => [{
-            $self->current_source_alias => $self->as_query,
-            -alias         => $self->current_source_alias,
-            -source_handle => $self->result_source->handle,
-         }]
-   });
+  my $attrs = $self->_resolved_attrs;
+
+  my $fresh_rs = (ref $self)->new (
+    $self->result_source
+  );
+
+  # these pieces will be locked in the subquery
+  delete $fresh_rs->{cond};
+  delete @{$fresh_rs->{attrs}}{qw/where bind/};
+
+  return $fresh_rs->search( {}, {
+    from => [{
+      $attrs->{alias} => $self->as_query,
+      -alias         => $attrs->{alias},
+      -source_handle => $self->result_source->handle,
+    }],
+    alias => $attrs->{alias},
+  });
 }
 
 # This code is called by search_related, and makes sure there
@@ -2702,7 +2722,7 @@
   # ->_resolve_join as otherwise they get lost - captainL
   my $join = $self->_merge_attr( $attrs->{join}, $attrs->{prefetch} );
 
-  delete @{$attrs}{qw/join prefetch collapse distinct select as columns +select +as +columns/};
+  delete @{$attrs}{qw/join prefetch collapse group_by distinct select as columns +select +as +columns/};
 
   my $seen = { %{ (delete $attrs->{seen_join}) || {} } };
 
@@ -2728,7 +2748,7 @@
       -alias => $attrs->{alias},
       $attrs->{alias} => $rs_copy->as_query,
     }];
-    delete @{$attrs}{@force_subq_attrs, 'where'};
+    delete @{$attrs}{@force_subq_attrs, qw/where bind/};
     $seen->{-relation_chain_depth} = 0;
   }
   elsif ($attrs->{from}) {  #shallow copy suffices

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSource.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSource.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/ResultSource.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -8,6 +8,7 @@
 
 use DBIx::Class::Exception;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
 
 use base qw/DBIx::Class/;
 
@@ -367,9 +368,10 @@
     $self->{_columns_info_loaded}++;
     my $info = {};
     my $lc_info = {};
-    # eval for the case of storage without table
-    eval { $info = $self->storage->columns_info_for( $self->from ) };
-    unless ($@) {
+
+    # try for the case of storage without table
+    try {
+      $info = $self->storage->columns_info_for( $self->from );
       for my $realcol ( keys %{$info} ) {
         $lc_info->{lc $realcol} = $info->{$realcol};
       }
@@ -379,7 +381,7 @@
           %{ $info->{$col} || $lc_info->{lc $col} || {} }
         };
       }
-    }
+    };
   }
   return $self->_columns->{$column};
 }
@@ -1022,7 +1024,7 @@
 
   return $self;
 
-  # XXX disabled. doesn't work properly currently. skip in tests.
+# XXX disabled. doesn't work properly currently. skip in tests.
 
   my $f_source = $self->schema->source($f_source_name);
   unless ($f_source) {
@@ -1035,13 +1037,14 @@
   }
   return unless $f_source; # Can't test rel without f_source
 
-  eval { $self->_resolve_join($rel, 'me', {}, []) };
+  try { $self->_resolve_join($rel, 'me', {}, []) }
+  catch {
+    # If the resolve failed, back out and re-throw the error
+    delete $rels{$rel};
+    $self->_relationships(\%rels);
+    $self->throw_exception("Error creating relationship $rel: $_");
+  };
 
-  if ($@) { # If the resolve failed, back out and re-throw the error
-    delete $rels{$rel}; #
-    $self->_relationships(\%rels);
-    $self->throw_exception("Error creating relationship $rel: $@");
-  }
   1;
 }
 

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Row.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Row.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Row.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -7,6 +7,7 @@
 
 use DBIx::Class::Exception;
 use Scalar::Util ();
+use Try::Tiny;
 
 ###
 ### Internal method
@@ -314,7 +315,7 @@
 
       MULTICREATE_DEBUG and warn "MC $self pre-reconstructing $relname $rel_obj\n";
 
-      my $them = { %{$rel_obj->{_relationship_data} || {} }, $rel_obj->get_inflated_columns };
+      my $them = { %{$rel_obj->{_relationship_data} || {} }, $rel_obj->get_columns };
       my $existing;
 
       # if there are no keys - nothing to search for
@@ -404,19 +405,14 @@
       my $reverse = $source->reverse_relationship_info($relname);
       foreach my $obj (@cands) {
         $obj->set_from_related($_, $self) for keys %$reverse;
-        my $them = { %{$obj->{_relationship_data} || {} }, $obj->get_inflated_columns };
         if ($self->__their_pk_needs_us($relname)) {
           if (exists $self->{_ignore_at_insert}{$relname}) {
             MULTICREATE_DEBUG and warn "MC $self skipping post-insert on $relname";
-          } else {
-            MULTICREATE_DEBUG and warn "MC $self re-creating $relname $obj";
-            my $re = $self->result_source
-                          ->related_source($relname)
-                          ->resultset
-                          ->create($them);
-            %{$obj} = %{$re};
-            MULTICREATE_DEBUG and warn "MC $self new $relname $obj";
           }
+          else {
+            MULTICREATE_DEBUG and warn "MC $self inserting $relname $obj";
+            $obj->insert;
+          }
         } else {
           MULTICREATE_DEBUG and warn "MC $self post-inserting $obj";
           $obj->insert();
@@ -518,16 +514,18 @@
 
 sub update {
   my ($self, $upd) = @_;
-  $self->throw_exception( "Not in database" ) unless $self->in_storage;
 
   my $ident_cond = $self->{_orig_ident} || $self->ident_condition;
 
+  $self->set_inflated_columns($upd) if $upd;
+  my %to_update = $self->get_dirty_columns;
+  return $self unless keys %to_update;
+
+  $self->throw_exception( "Not in database" ) unless $self->in_storage;
+
   $self->throw_exception('Unable to update a row with incomplete or no identity')
     if ! keys %$ident_cond;
 
-  $self->set_inflated_columns($upd) if $upd;
-  my %to_update = $self->get_dirty_columns;
-  return $self unless keys %to_update;
   my $rows = $self->result_source->storage->update(
     $self->result_source, \%to_update, $ident_cond
   );
@@ -860,34 +858,20 @@
   my ($self, $column, $new_value) = @_;
 
   # if we can't get an ident condition on first try - mark the object as unidentifiable
-  $self->{_orig_ident} ||= (eval { $self->ident_condition }) || {};
+  $self->{_orig_ident} ||= (try { $self->ident_condition }) || {};
 
   my $old_value = $self->get_column($column);
   $new_value = $self->store_column($column, $new_value);
 
-  my $dirty;
-  if (!$self->in_storage) { # no point tracking dirtyness on uninserted data
-    $dirty = 1;
-  }
-  elsif (defined $old_value xor defined $new_value) {
-    $dirty = 1;
-  }
-  elsif (not defined $old_value) {  # both undef
-    $dirty = 0;
-  }
-  elsif ($old_value eq $new_value) {
-    $dirty = 0;
-  }
-  else {  # do a numeric comparison if datatype allows it
-    if ($self->_is_column_numeric($column)) {
-      $dirty = $old_value != $new_value;
-    }
-    else {
-      $dirty = 1;
-    }
-  }
+  my $dirty =
+    $self->{_dirty_columns}{$column}
+      ||
+    $self->in_storage # no point tracking dirtyness on uninserted data
+      ? ! $self->_eq_column_values ($column, $old_value, $new_value)
+      : 1
+  ;
 
-  # sadly the update code just checks for keys, not for their value
+  # FIXME sadly the update code just checks for keys, not for their value
   $self->{_dirty_columns}{$column} = 1 if $dirty;
 
   # XXX clear out the relation cache for this column
@@ -896,6 +880,26 @@
   return $new_value;
 }
 
+sub _eq_column_values {
+  my ($self, $col, $old, $new) = @_;
+
+  if (defined $old xor defined $new) {
+    return 0;
+  }
+  elsif (not defined $old) {  # both undef
+    return 1;
+  }
+  elsif ($old eq $new) {
+    return 1;
+  }
+  elsif ($self->_is_column_numeric($col)) {  # do a numeric comparison if datatype allows it
+    return $old == $new;
+  }
+  else {
+    return 0;
+  }
+}
+
 =head2 set_columns
 
   $row->set_columns({ $col => $val, ... });
@@ -1375,7 +1379,6 @@
 
 sub discard_changes {
   my ($self, $attrs) = @_;
-  delete $self->{_dirty_columns};
   return unless $self->in_storage; # Don't reload if we aren't real!
 
   # add a replication default to read from the master only

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/OracleJoins.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/OracleJoins.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/OracleJoins.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,13 +5,13 @@
 use Carp::Clan qw/^DBIx::Class|^SQL::Abstract/;
 
 sub select {
-  my ($self, $table, $fields, $where, $order, @rest) = @_;
+  my ($self, $table, $fields, $where, $rs_attrs, @rest) = @_;
 
   if (ref($table) eq 'ARRAY') {
     $where = $self->_oracle_joins($where, @{ $table });
   }
 
-  return $self->SUPER::select($table, $fields, $where, $order, @rest);
+  return $self->SUPER::select($table, $fields, $where, $rs_attrs, @rest);
 }
 
 sub _recurse_from {

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/SQLite.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/SQLite.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks/SQLite.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -6,12 +6,16 @@
 
 #
 # SQLite does not understand SELECT ... FOR UPDATE
-# Adjust SQL here instead
+# Disable it here
 #
-sub select {
-  my $self = shift;
-  local $self->{_dbic_rs_attrs}{for} = undef;
-  return $self->SUPER::select (@_);
+sub _parse_rs_attrs {
+  my ($self, $attrs) = @_;
+
+  return $self->SUPER::_parse_rs_attrs ($attrs)
+    if ref $attrs ne 'HASH';
+
+  local $attrs->{for};
+  return $self->SUPER::_parse_rs_attrs ($attrs);
 }
 
 1;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/SQLAHacks.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -46,32 +46,165 @@
   $self;
 }
 
+# !!! THIS IS ALSO HORRIFIC !!! /me ashamed
+#
+# Generates inner/outer select lists for various limit dialects
+# which result in one or more subqueries (e.g. RNO, Top, RowNum)
+# Any non-root-table columns need to have their table qualifier
+# turned into a column alias (otherwise names in subqueries clash
+# and/or lose their source table)
+#
+# Returns inner/outer strings of SQL QUOTED selectors with aliases
+# (to be used in whatever select statement), and an alias index hashref
+# of QUOTED SEL => QUOTED ALIAS pairs (to maybe be used for string-subst
+# higher up).
+# If an order_by is supplied, the inner select needs to bring out columns
+# used in implicit (non-selected) orders, and the order condition itself
+# needs to be realiased to the proper names in the outer query. Thus we
+# also return a hashref (order doesn't matter) of QUOTED EXTRA-SEL =>
+# QUOTED ALIAS pairs, which is a list of extra selectors that do *not*
+# exist in the original select list
 
-# ANSI standard Limit/Offset implementation. DB2 and MSSQL use this
+sub _subqueried_limit_attrs {
+  my ($self, $rs_attrs) = @_;
+
+  croak 'Limit dialect implementation usable only in the context of DBIC (missing $rs_attrs)'
+    unless ref ($rs_attrs) eq 'HASH';
+
+  my ($re_sep, $re_alias) = map { quotemeta $_ } (
+    $self->name_sep || '.',
+    $rs_attrs->{alias},
+  );
+
+  # correlate select and as, build selection index
+  my (@sel, $in_sel_index);
+  for my $i (0 .. $#{$rs_attrs->{select}}) {
+
+    my $s = $rs_attrs->{select}[$i];
+    my $sql_sel = $self->_recurse_fields ($s);
+    my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef;
+
+
+    push @sel, {
+      sql => $sql_sel,
+      unquoted_sql => do { local $self->{quote_char}; $self->_recurse_fields ($s) },
+      as =>
+        $sql_alias
+          ||
+        $rs_attrs->{as}[$i]
+          ||
+        croak "Select argument $i ($s) without corresponding 'as'"
+      ,
+    };
+
+    $in_sel_index->{$sql_sel}++;
+    $in_sel_index->{$self->_quote ($sql_alias)}++ if $sql_alias;
+
+    # record unqualified versions too, so we do not have
+    # to reselect the same column twice (in qualified and
+    # unqualified form)
+    if (! ref $s && $sql_sel =~ / $re_sep (.+) $/x) {
+      $in_sel_index->{$1}++;
+    }
+  }
+
+
+  # re-alias and remove any name separators from aliases,
+  # unless we are dealing with the current source alias
+  # (which will transcend the subqueries as it is necessary
+  # for possible further chaining)
+  my (@in_sel, @out_sel, %renamed);
+  for my $node (@sel) {
+    if (List::Util::first { $_ =~ / (?<! $re_alias ) $re_sep /x } ($node->{as}, $node->{unquoted_sql}) )  {
+      $node->{as} =~ s/ $re_sep /__/xg;
+      my $quoted_as = $self->_quote($node->{as});
+      push @in_sel, sprintf '%s AS %s', $node->{sql}, $quoted_as;
+      push @out_sel, $quoted_as;
+      $renamed{$node->{sql}} = $quoted_as;
+    }
+    else {
+      push @in_sel, $node->{sql};
+      push @out_sel, $self->_quote ($node->{as});
+    }
+  }
+
+  # see if the order gives us anything
+  my %extra_order_sel;
+  for my $chunk ($self->_order_by_chunks ($rs_attrs->{order_by})) {
+    # order with bind
+    $chunk = $chunk->[0] if (ref $chunk) eq 'ARRAY';
+    $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
+
+    next if $in_sel_index->{$chunk};
+
+    $extra_order_sel{$chunk} ||= $self->_quote (
+      'ORDER__BY__' . scalar keys %extra_order_sel
+    );
+  }
+
+  return (
+    (map { join (', ', @$_ ) } (
+      \@in_sel,
+      \@out_sel)
+    ),
+    \%renamed,
+    keys %extra_order_sel ? \%extra_order_sel : (),
+  );
+}
+
+# ANSI standard Limit/Offset implementation. DB2 and MSSQL >= 2005 use this
 sub _RowNumberOver {
-  my ($self, $sql, $order, $rows, $offset ) = @_;
+  my ($self, $sql, $rs_attrs, $rows, $offset ) = @_;
 
-  # get the select to make the final amount of columns equal the original one
-  my ($select) = $sql =~ /^ \s* SELECT \s+ (.+?) \s+ FROM/ix
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
     or croak "Unrecognizable SELECT: $sql";
 
-  # get the order_by only (or make up an order if none exists)
-  my $order_by = $self->_order_by(
-    (delete $order->{order_by}) || $self->_rno_default_order
-  );
+  # get selectors, and scan the order_by (if any)
+  my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+    = $self->_subqueried_limit_attrs ( $rs_attrs );
 
-  # whatever is left of the order_by
-  my $group_having = $self->_order_by($order);
+  # make up an order if none exists
+  my $requested_order = (delete $rs_attrs->{order_by}) || $self->_rno_default_order;
+  my $rno_ord = $self->_order_by ($requested_order);
 
-  my $qalias = $self->_quote ($self->{_dbic_rs_attrs}{alias});
+  # this is the order supplement magic
+  my $mid_sel = $out_sel;
+  if ($extra_order_sel) {
+    for my $extra_col (sort
+      { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+      keys %$extra_order_sel
+    ) {
+      $in_sel .= sprintf (', %s AS %s',
+        $extra_col,
+        $extra_order_sel->{$extra_col},
+      );
 
+      $mid_sel .= ', ' . $extra_order_sel->{$extra_col};
+    }
+  }
+
+  # and this is order re-alias magic
+  for ($extra_order_sel, $alias_map) {
+    for my $col (keys %$_) {
+      my $re_col = quotemeta ($col);
+      $rno_ord =~ s/$re_col/$_->{$col}/;
+    }
+  }
+
+  # whatever is left of the order_by (only where is processed at this point)
+  my $group_having = $self->_parse_rs_attrs($rs_attrs);
+
+  my $qalias = $self->_quote ($rs_attrs->{alias});
+  my $idx_name = $self->_quote ('rno__row__index');
+
   $sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
 
-SELECT $select FROM (
-  SELECT $qalias.*, ROW_NUMBER() OVER($order_by ) AS rno__row__index FROM (
-    ${sql}${group_having}
+SELECT $out_sel FROM (
+  SELECT $mid_sel, ROW_NUMBER() OVER( $rno_ord ) AS $idx_name FROM (
+    SELECT $in_sel ${sql}${group_having}
   ) $qalias
-) $qalias WHERE rno__row__index BETWEEN %d AND %d
+) $qalias WHERE $idx_name BETWEEN %u AND %u
 
 EOS
 
@@ -86,233 +219,277 @@
 
 # Informix specific limit, almost like LIMIT/OFFSET
 sub _SkipFirst {
-  my ($self, $sql, $order, $rows, $offset) = @_;
+  my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
 
   $sql =~ s/^ \s* SELECT \s+ //ix
     or croak "Unrecognizable SELECT: $sql";
 
   return sprintf ('SELECT %s%s%s%s',
     $offset
-      ? sprintf ('SKIP %d ', $offset)
+      ? sprintf ('SKIP %u ', $offset)
       : ''
     ,
-    sprintf ('FIRST %d ', $rows),
+    sprintf ('FIRST %u ', $rows),
     $sql,
-    $self->_order_by ($order),
+    $self->_parse_rs_attrs ($rs_attrs),
   );
 }
 
 # Firebird specific limit, reverse of _SkipFirst for Informix
 sub _FirstSkip {
-  my ($self, $sql, $order, $rows, $offset) = @_;
+  my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
 
   $sql =~ s/^ \s* SELECT \s+ //ix
     or croak "Unrecognizable SELECT: $sql";
 
   return sprintf ('SELECT %s%s%s%s',
-    sprintf ('FIRST %d ', $rows),
+    sprintf ('FIRST %u ', $rows),
     $offset
-      ? sprintf ('SKIP %d ', $offset)
+      ? sprintf ('SKIP %u ', $offset)
       : ''
     ,
     $sql,
-    $self->_order_by ($order),
+    $self->_parse_rs_attrs ($rs_attrs),
   );
 }
 
-# Crappy Top based Limit/Offset support. Legacy from MSSQL.
-sub _Top {
-  my ( $self, $sql, $order, $rows, $offset ) = @_;
+# WhOracle limits
+sub _RowNum {
+  my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_;
 
-  # mangle the input sql so it can be properly aliased in the outer queries
-  $sql =~ s/^ \s* SELECT \s+ (.+?) \s+ (?=FROM)//ix
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
     or croak "Unrecognizable SELECT: $sql";
-  my $sql_select = $1;
-  my @sql_select = split (/\s*,\s*/, $sql_select);
 
-  # we can't support subqueries (in fact MSSQL can't) - croak
-  if (@sql_select != @{$self->{_dbic_rs_attrs}{select}}) {
-    croak (sprintf (
-      'SQL SELECT did not parse cleanly - retrieved %d comma separated elements, while '
-    . 'the resultset select attribure contains %d elements: %s',
-      scalar @sql_select,
-      scalar @{$self->{_dbic_rs_attrs}{select}},
-      $sql_select,
-    ));
-  }
+  my ($insel, $outsel) = $self->_subqueried_limit_attrs ($rs_attrs);
 
-  my $name_sep = $self->name_sep || '.';
-  my $esc_name_sep = "\Q$name_sep\E";
-  my $col_re = qr/ ^ (?: (.+) $esc_name_sep )? ([^$esc_name_sep]+) $ /x;
+  my $qalias = $self->_quote ($rs_attrs->{alias});
+  my $idx_name = $self->_quote ('rownum__index');
+  my $order_group_having = $self->_parse_rs_attrs($rs_attrs);
 
-  my $rs_alias = $self->{_dbic_rs_attrs}{alias};
-  my $quoted_rs_alias = $self->_quote ($rs_alias);
+  $sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
 
-  # construct the new select lists, rename(alias) some columns if necessary
-  my (@outer_select, @inner_select, %seen_names, %col_aliases, %outer_col_aliases);
+SELECT $outsel FROM (
+  SELECT $outsel, ROWNUM $idx_name FROM (
+    SELECT $insel ${sql}${order_group_having}
+  ) $qalias
+) $qalias WHERE $idx_name BETWEEN %u AND %u
 
-  for (@{$self->{_dbic_rs_attrs}{select}}) {
-    next if ref $_;
-    my ($table, $orig_colname) = ( $_ =~ $col_re );
-    next unless $table;
-    $seen_names{$orig_colname}++;
-  }
+EOS
 
-  for my $i (0 .. $#sql_select) {
+  $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger
+  return $sql;
+}
 
-    my $colsel_arg = $self->{_dbic_rs_attrs}{select}[$i];
-    my $colsel_sql = $sql_select[$i];
+# Crappy Top based Limit/Offset support. Legacy for MSSQL < 2005
+sub _Top {
+  my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_;
 
-    # this may or may not work (in case of a scalarref or something)
-    my ($table, $orig_colname) = ( $colsel_arg =~ $col_re );
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
+    or croak "Unrecognizable SELECT: $sql";
 
-    my $quoted_alias;
-    # do not attempt to understand non-scalar selects - alias numerically
-    if (ref $colsel_arg) {
-      $quoted_alias = $self->_quote ('column_' . (@inner_select + 1) );
-    }
-    # column name seen more than once - alias it
-    elsif ($orig_colname &&
-          ($seen_names{$orig_colname} && $seen_names{$orig_colname} > 1) ) {
-      $quoted_alias = $self->_quote ("${table}__${orig_colname}");
-    }
+  # get selectors
+  my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+    = $self->_subqueried_limit_attrs ($rs_attrs);
 
-    # we did rename - make a record and adjust
-    if ($quoted_alias) {
-      # alias inner
-      push @inner_select, "$colsel_sql AS $quoted_alias";
+  my $requested_order = delete $rs_attrs->{order_by};
 
-      # push alias to outer
-      push @outer_select, $quoted_alias;
+  my $order_by_requested = $self->_order_by ($requested_order);
 
-      # Any aliasing accumulated here will be considered
-      # both for inner and outer adjustments of ORDER BY
-      $self->__record_alias (
-        \%col_aliases,
-        $quoted_alias,
-        $colsel_arg,
-        $table ? $orig_colname : undef,
-      );
+  # make up an order unless supplied
+  my $inner_order = ($order_by_requested
+    ? $requested_order
+    : [ map
+      { join ('', $rs_attrs->{alias}, $self->{name_sep}||'.', $_ ) }
+      ( $rs_attrs->{_rsroot_source_handle}->resolve->_pri_cols )
+    ]
+  );
+
+  my ($order_by_inner, $order_by_reversed);
+
+  # localise as we already have all the bind values we need
+  {
+    local $self->{order_bind};
+    $order_by_inner = $self->_order_by ($inner_order);
+
+    my @out_chunks;
+    for my $ch ($self->_order_by_chunks ($inner_order)) {
+      $ch = $ch->[0] if ref $ch eq 'ARRAY';
+
+      $ch =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+      my $dir = uc ($1||'ASC');
+
+      push @out_chunks, \join (' ', $ch, $dir eq 'ASC' ? 'DESC' : 'ASC' );
     }
 
-    # otherwise just leave things intact inside, and use the abbreviated one outside
-    # (as we do not have table names anymore)
-    else {
-      push @inner_select, $colsel_sql;
+    $order_by_reversed = $self->_order_by (\@out_chunks);
+  }
 
-      my $outer_quoted = $self->_quote ($orig_colname);  # it was not a duplicate so should just work
-      push @outer_select, $outer_quoted;
-      $self->__record_alias (
-        \%outer_col_aliases,
-        $outer_quoted,
-        $colsel_arg,
-        $table ? $orig_colname : undef,
+  # this is the order supplement magic
+  my $mid_sel = $out_sel;
+  if ($extra_order_sel) {
+    for my $extra_col (sort
+      { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+      keys %$extra_order_sel
+    ) {
+      $in_sel .= sprintf (', %s AS %s',
+        $extra_col,
+        $extra_order_sel->{$extra_col},
       );
+
+      $mid_sel .= ', ' . $extra_order_sel->{$extra_col};
     }
   }
 
-  my $outer_select = join (', ', @outer_select );
-  my $inner_select = join (', ', @inner_select );
+  # and this is order re-alias magic
+  for my $map ($extra_order_sel, $alias_map) {
+    for my $col (keys %$map) {
+      my $re_col = quotemeta ($col);
+      $_ =~ s/$re_col/$map->{$col}/
+        for ($order_by_reversed, $order_by_requested);
+    }
+  }
 
-  %outer_col_aliases = (%outer_col_aliases, %col_aliases);
+  # generate the rest of the sql
+  my $grpby_having = $self->_parse_rs_attrs ($rs_attrs);
 
-  # deal with order
-  croak '$order supplied to SQLAHacks limit emulators must be a hash'
-    if (ref $order ne 'HASH');
+  my $quoted_rs_alias = $self->_quote ($rs_attrs->{alias});
 
-  $order = { %$order }; #copy
+  $sql = sprintf ('SELECT TOP %u %s %s %s %s',
+    $rows + ($offset||0),
+    $in_sel,
+    $sql,
+    $grpby_having,
+    $order_by_inner,
+  );
 
-  my $req_order = $order->{order_by};
+  $sql = sprintf ('SELECT TOP %u %s FROM ( %s ) %s %s',
+    $rows,
+    $mid_sel,
+    $sql,
+    $quoted_rs_alias,
+    $order_by_reversed,
+  ) if $offset;
 
-  # examine normalized version, collapses nesting
-  my $limit_order;
-  if (scalar $self->_order_by_chunks ($req_order)) {
-    $limit_order = $req_order;
-  }
-  else {
-    $limit_order = [ map
-      { join ('', $rs_alias, $name_sep, $_ ) }
-      ( $self->{_dbic_rs_attrs}{_source_handle}->resolve->primary_columns )
-    ];
-  }
+  $sql = sprintf ('SELECT TOP %u %s FROM ( %s ) %s %s',
+    $rows,
+    $out_sel,
+    $sql,
+    $quoted_rs_alias,
+    $order_by_requested,
+  ) if ( ($offset && $order_by_requested) || ($mid_sel ne $out_sel) );
 
-  my ( $order_by_inner, $order_by_outer ) = $self->_order_directions($limit_order);
-  my $order_by_requested = $self->_order_by ($req_order);
+  $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger
+  return $sql;
+}
 
-  # generate the rest
-  delete $order->{order_by};
-  my $grpby_having = $self->_order_by ($order);
+# This is the most evil limit "dialect" (more of a hack) for *really*
+# stupid databases. It works by ordering the set by some unique column,
+# and calculating amount of rows that have a less-er value (thus
+# emulating a RowNum-like index). Of course this implies the set can
+# only be ordered by a single unique columns.
+sub _GenericSubQ {
+  my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
 
-  # short circuit for counts - the ordering complexity is needless
-  if ($self->{_dbic_rs_attrs}{-for_count_only}) {
-    return "SELECT TOP $rows $inner_select $sql $grpby_having $order_by_outer";
-  }
+  my $root_rsrc = $rs_attrs->{_rsroot_source_handle}->resolve;
+  my $root_tbl_name = $root_rsrc->name;
 
-  # we can't really adjust the order_by columns, as introspection is lacking
-  # resort to simple substitution
-  for my $col (keys %outer_col_aliases) {
-    for ($order_by_requested, $order_by_outer) {
-      $_ =~ s/\s+$col\s+/ $outer_col_aliases{$col} /g;
-    }
+  # mangle the input sql as we will be replacing the selector
+  $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
+    or croak "Unrecognizable SELECT: $sql";
+
+  my ($order_by, @rest) = do {
+    local $self->{quote_char};
+    $self->_order_by_chunks ($rs_attrs->{order_by})
+  };
+
+  unless (
+    $order_by
+      &&
+    ! @rest
+      &&
+    ( ! ref $order_by
+        ||
+      ( ref $order_by eq 'ARRAY' and @$order_by == 1 )
+    )
+  ) {
+    croak (
+      'Generic Subquery Limit does not work on resultsets without an order, or resultsets '
+    . 'with complex order criteria (multicolumn and/or functions). Provide a single, '
+    . 'unique-column order criteria.'
+    );
   }
-  for my $col (keys %col_aliases) {
-    $order_by_inner =~ s/\s+$col\s+/ $col_aliases{$col} /g;
-  }
 
+  ($order_by) = @$order_by if ref $order_by;
 
-  my $inner_lim = $rows + $offset;
+  $order_by =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+  my $direction = lc ($1 || 'asc');
 
-  $sql = "SELECT TOP $inner_lim $inner_select $sql $grpby_having $order_by_inner";
+  my ($unq_sort_col) = $order_by =~ /(?:^|\.)([^\.]+)$/;
 
-  if ($offset) {
-    $sql = <<"SQL";
+  my $inf = $root_rsrc->storage->_resolve_column_info (
+    $rs_attrs->{from}, [$order_by, $unq_sort_col]
+  );
 
-    SELECT TOP $rows $outer_select FROM
-    (
-      $sql
-    ) $quoted_rs_alias
-    $order_by_outer
-SQL
+  my $ord_colinfo = $inf->{$order_by} || croak "Unable to determine source of order-criteria '$order_by'";
 
+  if ($ord_colinfo->{-result_source}->name ne $root_tbl_name) {
+    croak "Generic Subquery Limit order criteria can be only based on the root-source '"
+        . $root_rsrc->source_name . "' (aliased as '$rs_attrs->{alias}')";
   }
 
-  if ($order_by_requested) {
-    $sql = <<"SQL";
+  # make sure order column is qualified
+  $order_by = "$rs_attrs->{alias}.$order_by"
+    unless $order_by =~ /^$rs_attrs->{alias}\./;
 
-    SELECT $outer_select FROM
-      ( $sql ) $quoted_rs_alias
-    $order_by_requested
-SQL
-
+  my $is_u;
+  my $ucs = { $root_rsrc->unique_constraints };
+  for (values %$ucs ) {
+    if (@$_ == 1 && "$rs_attrs->{alias}.$_->[0]" eq $order_by) {
+      $is_u++;
+      last;
+    }
   }
+  croak "Generic Subquery Limit order criteria column '$order_by' must be unique (no unique constraint found)"
+    unless $is_u;
 
-  $sql =~ s/\s*\n\s*/ /g; # parsing out multiline statements is harder than a single line
-  return $sql;
-}
+  my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+    = $self->_subqueried_limit_attrs ($rs_attrs);
 
-# action at a distance to shorten Top code above
-sub __record_alias {
-  my ($self, $register, $alias, $fqcol, $col) = @_;
+  my $cmp_op = $direction eq 'desc' ? '>' : '<';
+  my $count_tbl_alias = 'rownum__emulation';
 
-  # record qualified name
-  $register->{$fqcol} = $alias;
-  $register->{$self->_quote($fqcol)} = $alias;
+  my $order_group_having = $self->_parse_rs_attrs($rs_attrs);
 
-  return unless $col;
+  # add the order supplement (if any) as this is what will be used for the outer WHERE
+  $in_sel .= ", $_" for keys %{$extra_order_sel||{}};
 
-  # record unqualified name, undef (no adjustment) if a duplicate is found
-  if (exists $register->{$col}) {
-    $register->{$col} = undef;
-  }
-  else {
-    $register->{$col} = $alias;
-  }
+  $sql = sprintf (<<EOS,
+SELECT $out_sel
+  FROM (
+    SELECT $in_sel ${sql}${order_group_having}
+  ) %s
+WHERE ( SELECT COUNT(*) FROM %s %s WHERE %s $cmp_op %s ) %s
+EOS
+    ( map { $self->_quote ($_) } (
+      $rs_attrs->{alias},
+      $root_tbl_name,
+      $count_tbl_alias,
+      "$count_tbl_alias.$unq_sort_col",
+      $order_by,
+    )),
+    $offset
+      ? sprintf ('BETWEEN %u AND %u', $offset, $offset + $rows - 1)
+      : sprintf ('< %u', $rows )
+    ,
+  );
 
-  $register->{$self->_quote($col)} = $register->{$col};
+  $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger
+  return $sql;
 }
 
 
-
 # While we're at it, this should make LIMIT queries more efficient,
 #  without digging into things too deeply
 sub _find_syntax {
@@ -320,14 +497,10 @@
   return $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax);
 }
 
-my $for_syntax = {
-  update => 'FOR UPDATE',
-  shared => 'FOR SHARE',
-};
 # Quotes table names, handles "limit" dialects (e.g. where rownum between x and
-# y), supports SELECT ... FOR UPDATE and SELECT ... FOR SHARE.
+# y)
 sub select {
-  my ($self, $table, $fields, $where, $order, @rest) = @_;
+  my ($self, $table, $fields, $where, $rs_attrs, @rest) = @_;
 
   $self->{"${_}_bind"} = [] for (qw/having from order/);
 
@@ -335,18 +508,13 @@
     $table = $self->_quote($table);
   }
 
-  local $self->{rownum_hack_count} = 1
-    if (defined $rest[0] && $self->{limit_dialect} eq 'RowNum');
   @rest = (-1) unless defined $rest[0];
   croak "LIMIT 0 Does Not Compute" if $rest[0] == 0;
     # and anyway, SQL::Abstract::Limit will cause a barf if we don't first
+
   my ($sql, @where_bind) = $self->SUPER::select(
-    $table, $self->_recurse_fields($fields), $where, $order, @rest
+    $table, $self->_recurse_fields($fields), $where, $rs_attrs, @rest
   );
-  if (my $for = delete $self->{_dbic_rs_attrs}{for}) {
-    $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
-  }
-
   return wantarray ? ($sql, @{$self->{from_bind}}, @where_bind, @{$self->{having_bind}}, @{$self->{order_bind}} ) : $sql;
 }
 
@@ -390,35 +558,36 @@
 
 sub _emulate_limit {
   my $self = shift;
+  # my ( $syntax, $sql, $order, $rows, $offset ) = @_;
+
   if ($_[3] == -1) {
-    return $_[1].$self->_order_by($_[2]);
+    return $_[1] . $self->_parse_rs_attrs($_[2]);
   } else {
     return $self->SUPER::_emulate_limit(@_);
   }
 }
 
 sub _recurse_fields {
-  my ($self, $fields, $params) = @_;
+  my ($self, $fields) = @_;
   my $ref = ref $fields;
   return $self->_quote($fields) unless $ref;
   return $$fields if $ref eq 'SCALAR';
 
   if ($ref eq 'ARRAY') {
-    return join(', ', map {
-      $self->_recurse_fields($_)
-        .(exists $self->{rownum_hack_count} && !($params && $params->{no_rownum_hack})
-          ? ' AS col'.$self->{rownum_hack_count}++
-          : '')
-      } @$fields);
+    return join(', ', map { $self->_recurse_fields($_) } @$fields);
   }
   elsif ($ref eq 'HASH') {
-    my %hash = %$fields;
+    my %hash = %$fields;  # shallow copy
 
     my $as = delete $hash{-as};   # if supplied
 
-    my ($func, $args) = each %hash;
-    delete $hash{$func};
+    my ($func, $args, @toomany) = %hash;
 
+    # there should be only one pair
+    if (@toomany) {
+      croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
+    }
+
     if (lc ($func) eq 'distinct' && ref $args eq 'ARRAY' && @$args > 1) {
       croak (
         'The select => { distinct => ... } syntax is not supported for multiple columns.'
@@ -435,11 +604,6 @@
         : ''
     );
 
-    # there should be nothing left
-    if (keys %hash) {
-      croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
-    }
-
     return $select;
   }
   # Is the second check absolutely necessary?
@@ -451,34 +615,55 @@
   }
 }
 
-sub _order_by {
+my $for_syntax = {
+  update => 'FOR UPDATE',
+  shared => 'FOR SHARE',
+};
+
+# this used to be a part of _order_by but is broken out for clarity.
+# What we have been doing forever is hijacking the $order arg of
+# SQLA::select to pass in arbitrary pieces of data (first the group_by,
+# then pretty much the entire resultset attr-hash, as more and more
+# things in the SQLA space need to have mopre info about the $rs they
+# create SQL for. The alternative would be to keep expanding the
+# signature of _select with more and more positional parameters, which
+# is just gross. All hail SQLA2!
+sub _parse_rs_attrs {
   my ($self, $arg) = @_;
 
-  if (ref $arg eq 'HASH' and keys %$arg and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) {
+  my $sql = '';
 
-    my $ret = '';
+  if (my $g = $self->_recurse_fields($arg->{group_by}) ) {
+    $sql .= $self->_sqlcase(' group by ') . $g;
+  }
 
-    if (my $g = $self->_recurse_fields($arg->{group_by}, { no_rownum_hack => 1 }) ) {
-      $ret = $self->_sqlcase(' group by ') . $g;
-    }
+  if (defined $arg->{having}) {
+    my ($frag, @bind) = $self->_recurse_where($arg->{having});
+    push(@{$self->{having_bind}}, @bind);
+    $sql .= $self->_sqlcase(' having ') . $frag;
+  }
 
-    if (defined $arg->{having}) {
-      my ($frag, @bind) = $self->_recurse_where($arg->{having});
-      push(@{$self->{having_bind}}, @bind);
-      $ret .= $self->_sqlcase(' having ').$frag;
-    }
+  if (defined $arg->{order_by}) {
+    $sql .= $self->_order_by ($arg->{order_by});
+  }
 
-    if (defined $arg->{order_by}) {
-      my ($frag, @bind) = $self->SUPER::_order_by($arg->{order_by});
-      push(@{$self->{order_bind}}, @bind);
-      $ret .= $frag;
-    }
+  if (my $for = $arg->{for}) {
+    $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
+  }
 
-    return $ret;
+  return $sql;
+}
+
+sub _order_by {
+  my ($self, $arg) = @_;
+
+  # check that we are not called in legacy mode (order_by as 4th argument)
+  if (ref $arg eq 'HASH' and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) {
+    return $self->_parse_rs_attrs ($arg);
   }
   else {
     my ($sql, @bind) = $self->SUPER::_order_by ($arg);
-    push(@{$self->{order_bind}}, @bind);
+    push @{$self->{order_bind}}, @bind;
     return $sql;
   }
 }
@@ -596,26 +781,12 @@
   }
 }
 
-sub _quote {
-  my ($self, $label) = @_;
-  return '' unless defined $label;
-  return $$label if ref($label) eq 'SCALAR';
-  return "*" if $label eq '*';
-  return $label unless $self->{quote_char};
-  if(ref $self->{quote_char} eq "ARRAY"){
-    return $self->{quote_char}->[0] . $label . $self->{quote_char}->[1]
-      if !defined $self->{name_sep};
-    my $sep = $self->{name_sep};
-    return join($self->{name_sep},
-        map { $self->{quote_char}->[0] . $_ . $self->{quote_char}->[1]  }
-       split(/\Q$sep\E/,$label));
-  }
-  return $self->SUPER::_quote($label);
-}
-
 sub limit_dialect {
     my $self = shift;
-    $self->{limit_dialect} = shift if @_;
+    if (@_) {
+      $self->{limit_dialect} = shift;
+      undef $self->{_cached_syntax};
+    }
     return $self->{limit_dialect};
 }
 

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema/Versioned.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema/Versioned.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema/Versioned.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -182,6 +182,7 @@
 
 use Carp::Clan qw/^DBIx::Class/;
 use Time::HiRes qw/gettimeofday/;
+use Try::Tiny;
 
 __PACKAGE__->mk_classdata('_filedata');
 __PACKAGE__->mk_classdata('upgrade_directory');
@@ -503,7 +504,7 @@
     my ($self, $rs) = @_;
 
     my $vtable = $self->{vschema}->resultset('Table');
-    my $version = eval {
+    my $version = try {
       $vtable->search({}, { order_by => { -desc => 'installed' }, rows => 1 } )
               ->get_column ('version')
                ->next;
@@ -558,24 +559,25 @@
 sub connection {
   my $self = shift;
   $self->next::method(@_);
-  $self->_on_connect($_[3]);
+  $self->_on_connect();
   return $self;
 }
 
 sub _on_connect
 {
-  my ($self, $args) = @_;
+  my ($self) = @_;
 
-  $args = {} unless $args;
+  my $info = $self->storage->connect_info;
+  my $args = $info->[-1];
 
-  $self->{vschema} = DBIx::Class::Version->connect(@{$self->storage->connect_info()});
+  $self->{vschema} = DBIx::Class::Version->connect(@$info);
   my $vtable = $self->{vschema}->resultset('Table');
 
   # useful when connecting from scripts etc
   return if ($args->{ignore_version} || ($ENV{DBIC_NO_VERSION_CHECK} && !exists $args->{ignore_version}));
 
   # check for legacy versions table and move to new if exists
-  my $vschema_compat = DBIx::Class::VersionCompat->connect(@{$self->storage->connect_info()});
+  my $vschema_compat = DBIx::Class::VersionCompat->connect(@$info);
   unless ($self->_source_exists($vtable)) {
     my $vtable_compat = $vschema_compat->resultset('TableCompat');
     if ($self->_source_exists($vtable_compat)) {
@@ -723,12 +725,9 @@
 {
     my ($self, $rs) = @_;
 
-    my $c = eval {
-        $rs->search({ 1, 0 })->count;
-    };
-    return 0 if $@ || !defined $c;
+    my $c = try { $rs->search({ 1, 0 })->count };
 
-    return 1;
+    return (defined $c) ? 1 : 0;
 }
 
 1;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Schema.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 
 use DBIx::Class::Exception;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
 use Scalar::Util ();
 use File::Spec;
 use Sub::Name ();
@@ -271,6 +272,10 @@
       }
       elsif($rs_class ||= $default_resultset_class) {
         $class->ensure_class_loaded($rs_class);
+        if(!$rs_class->isa("DBIx::Class::ResultSet")) {
+            carp "load_namespaces found ResultSet class $rs_class that does not subclass DBIx::Class::ResultSet";
+        }
+
         $class->_ns_get_rsrc_instance ($result_class)->resultset_class($rs_class);
       }
 
@@ -813,10 +818,14 @@
 
   $storage_class = 'DBIx::Class::Storage'.$storage_class
     if $storage_class =~ m/^::/;
-  eval { $self->ensure_class_loaded ($storage_class) };
-  $self->throw_exception(
-    "No arguments to load_classes and couldn't load ${storage_class} ($@)"
-  ) if $@;
+  try {
+    $self->ensure_class_loaded ($storage_class);
+  }
+  catch {
+    $self->throw_exception(
+      "No arguments to load_classes and couldn't load ${storage_class} ($_)"
+    );
+  };
   my $storage = $storage_class->new($self=>$args);
   $storage->connect_info(\@info);
   $self->storage($storage);
@@ -1396,10 +1405,13 @@
       unless ($INC{"DBIx/Class/CDBICompat.pm"} || $warn++);
 
     my $base = 'DBIx::Class::ResultSetProxy';
-    eval "require ${base};";
-    $self->throw_exception
-      ("No arguments to load_classes and couldn't load ${base} ($@)")
-        if $@;
+    try {
+      eval "require ${base};"
+    }
+    catch {
+      $self->throw_exception
+        ("No arguments to load_classes and couldn't load ${base} ($_)")
+    };
 
     if ($self eq $target) {
       # Pathological case, largely caused by the docs on early C::M::DBIC::Plain

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ADO.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ADO.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ADO.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -2,6 +2,7 @@
     DBIx::Class::Storage::DBI::ADO;
 
 use base 'DBIx::Class::Storage::DBI';
+use Try::Tiny;
 
 sub _rebless {
   my $self = shift;
@@ -10,20 +11,18 @@
 # XXX This should be using an OpenSchema method of some sort, but I don't know
 # how.
 # Current version is stolen from Sybase.pm
-  my $dbtype = eval {
-    @{$self->_get_dbh
+  try {
+    my $dbtype = @{$self->_get_dbh
       ->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})
-    }[2]
-  };
+    }[2];
 
-  unless ($@) {
     $dbtype =~ s/\W/_/gi;
     my $subclass = "DBIx::Class::Storage::DBI::ADO::${dbtype}";
     if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
       bless $self, $subclass;
       $self->_rebless;
     }
-  }
+  };
 }
 
 # Here I was just experimenting with ADO cursor types, left in as a comment in

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Cursor.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Cursor.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Cursor.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,8 @@
 
 use base qw/DBIx::Class::Cursor/;
 
+use Try::Tiny;
+
 __PACKAGE__->mk_group_accessors('simple' =>
     qw/sth/
 );
@@ -150,7 +152,8 @@
   my ($self) = @_;
 
   # No need to care about failures here
-  eval { $self->sth->finish if $self->sth && $self->sth->{Active} };
+  try { $self->sth->finish }
+    if $self->sth && $self->sth->{Active};
   $self->_soft_reset;
   return undef;
 }
@@ -176,8 +179,8 @@
   my ($self) = @_;
 
   # None of the reasons this would die matter if we're in DESTROY anyways
-  local $@;
-  eval { $self->sth->finish if $self->sth && $self->sth->{Active} };
+  try { $self->sth->finish }
+    if $self->sth && $self->sth->{Active};
 }
 
 1;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/InterBase.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/InterBase.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/InterBase.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
 use List::Util();
+use Try::Tiny;
 
 =head1 NAME
 
@@ -125,11 +126,12 @@
   local $dbh->{RaiseError} = 1;
   local $dbh->{PrintError} = 0;
 
-  eval {
+  return try {
     $dbh->do('select 1 from rdb$database');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 # We want dialect 3 for new features and quoting to work, DBD::InterBase uses

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 
 use base qw/DBIx::Class::Storage::DBI::UniqueIdentifier/;
 use mro 'c3';
+use Try::Tiny;
 
 use List::Util();
 
@@ -23,13 +24,13 @@
   );
 
   my $dbh = $self->_get_dbh;
-  eval { $dbh->do ($sql) };
-  if ($@) {
+  try { $dbh->do ($sql) }
+  catch {
     $self->throw_exception (sprintf "Error executing '%s': %s",
       $sql,
       $dbh->errstr,
     );
-  }
+  };
 }
 
 sub _unset_identity_insert {
@@ -128,7 +129,7 @@
 
     # this should bring back the result of SELECT SCOPE_IDENTITY() we tacked
     # on in _prep_for_execute above
-    my ($identity) = eval { $sth->fetchrow_array };
+    my ($identity) = try { $sth->fetchrow_array };
 
     # SCOPE_IDENTITY failed, but we can do something else
     if ( (! $identity) && $self->_identity_method) {
@@ -158,7 +159,11 @@
 
   # see if this is an ordered subquery
   my $attrs = $_[3];
-  if ( scalar $self->_parse_order_by ($attrs->{order_by}) ) {
+  if (
+    $sql !~ /^ \s* SELECT \s+ TOP \s+ \d+ \s+ /xi
+      &&
+    scalar $self->_parse_order_by ($attrs->{order_by}) 
+  ) {
     $self->throw_exception(
       'An ordered subselect encountered - this is not safe! Please see "Ordered Subselects" in DBIx::Class::Storage::DBI::MSSQL
     ') unless $attrs->{unsafe_subselect_ok};
@@ -201,11 +206,24 @@
 
   unless ($self->_sql_maker) {
     unless ($self->{_sql_maker_opts}{limit_dialect}) {
+      my $have_rno = 0;
 
-      my $version = $self->_server_info->{normalized_dbms_version} || 0;
+      if (exists $self->_server_info->{normalized_dbms_version}) {
+        $have_rno = 1 if $self->_server_info->{normalized_dbms_version} >= 9;
+      }
+      else {
+        # User is connecting via DBD::Sybase and has no permission to run
+        # stored procedures like xp_msver, or version detection failed for some
+        # other reason.
+        # So, we use a query to check if RNO is implemented.
+        try {
+          $self->_get_dbh->selectrow_array('SELECT row_number() OVER (ORDER BY rand())');
+          $have_rno = 1;
+        };
+      }
 
       $self->{_sql_maker_opts} = {
-        limit_dialect => ($version >= 9 ? 'RowNumberOver' : 'Top'),
+        limit_dialect => ($have_rno ? 'RowNumberOver' : 'Top'),
         %{$self->{_sql_maker_opts}||{}}
       };
     }
@@ -224,11 +242,12 @@
   local $dbh->{RaiseError} = 1;
   local $dbh->{PrintError} = 0;
 
-  eval {
+  return try {
     $dbh->do('select 1');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 package # hide from PAUSE

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -7,6 +7,7 @@
 
 use List::Util();
 use Scalar::Util ();
+use Try::Tiny;
 
 __PACKAGE__->mk_group_accessors(simple => qw/
   _using_dynamic_cursors
@@ -84,18 +85,17 @@
   my $self = shift;
   my $dbh  = $self->_get_dbh;
 
-  eval {
+  try {
     local $dbh->{RaiseError} = 1;
     local $dbh->{PrintError} = 0;
     $dbh->do('SELECT @@IDENTITY');
-  };
-  if ($@) {
+  } catch {
     $self->throw_exception (<<'EOF');
 
 Your drivers do not seem to support dynamic cursors (odbc_cursortype => 2),
 if you're using FreeTDS, make sure to set tds_version to 8.0 or greater.
 EOF
-  }
+  };
 
   $self->_using_dynamic_cursors(1);
   $self->_identity_method('@@identity');

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/ODBC.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -4,21 +4,23 @@
 
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
+use Try::Tiny;
 
 sub _rebless {
-    my ($self) = @_;
+  my ($self) = @_;
 
-    my $dbtype = eval { $self->_get_dbh->get_info(17) };
+  try {
+    my $dbtype = $self->_get_dbh->get_info(17);
 
-    unless ( $@ ) {
-        # Translate the backend name into a perl identifier
-        $dbtype =~ s/\W/_/gi;
-        my $subclass = "DBIx::Class::Storage::DBI::ODBC::${dbtype}";
-        if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
-            bless $self, $subclass;
-            $self->_rebless;
-        }
+    # Translate the backend name into a perl identifier
+    $dbtype =~ s/\W/_/gi;
+    my $subclass = "DBIx::Class::Storage::DBI::ODBC::${dbtype}";
+
+    if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
+      bless $self, $subclass;
+      $self->_rebless;
     }
+  };
 }
 
 1;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -4,6 +4,7 @@
 use warnings;
 use Scope::Guard ();
 use Context::Preserve ();
+use Try::Tiny;
 
 =head1 NAME
 
@@ -39,7 +40,7 @@
   $sqltargs->{quote_table_names} = $quote_char ? 1 : 0;
   $sqltargs->{quote_field_names} = $quote_char ? 1 : 0;
 
-  my $oracle_version = eval { $self->_get_dbh->get_info(18) };
+  my $oracle_version = try { $self->_get_dbh->get_info(18) };
 
   $sqltargs->{producer_args}{oracle_version} = $oracle_version;
 
@@ -85,7 +86,7 @@
     {
       $schema ? (owner => $schema) : (),
       table_name => $table || $source_name,
-      triggering_event => 'INSERT',
+      triggering_event => { -like => '%INSERT%' },
       status => 'ENABLED',
      },
   );
@@ -112,45 +113,51 @@
   local $dbh->{RaiseError} = 1;
   local $dbh->{PrintError} = 0;
 
-  eval {
+  return try {
     $dbh->do('select 1 from dual');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 sub _dbh_execute {
   my $self = shift;
   my ($dbh, $op, $extra_bind, $ident, $bind_attributes, @args) = @_;
 
-  my $wantarray = wantarray;
+  my (@res, $tried);
+  my $wantarray = wantarray();
+  my $next = $self->next::can;
+  do {
+    try {
+      my $exec = sub { $self->$next($dbh, $op, $extra_bind, $ident, $bind_attributes, @args) };
 
-  my (@res, $exception, $retried);
+      if (!defined $wantarray) {
+        $exec->();
+      }
+      elsif (! $wantarray) {
+        $res[0] = $exec->();
+      }
+      else {
+        @res = $exec->();
+      }
 
-  RETRY: {
-    do {
-      eval {
-        if ($wantarray) {
-          @res    = $self->next::method(@_);
-        } else {
-          $res[0] = $self->next::method(@_);
-        }
-      };
-      $exception = $@;
-      if ($exception =~ /ORA-01003/) {
+      $tried++;
+    }
+    catch {
+      if (! $tried and $_ =~ /ORA-01003/) {
         # ORA-01003: no statement parsed (someone changed the table somehow,
         # invalidating your cursor.)
         my ($sql, $bind) = $self->_prep_for_execute($op, $extra_bind, $ident, \@args);
         delete $dbh->{CachedKids}{$sql};
-      } else {
-        last RETRY;
       }
-    } while (not $retried++);
-  }
+      else {
+        $self->throw_exception($_);
+      }
+    };
+  } while (! $tried++);
 
-  $self->throw_exception($exception) if $exception;
-
-  $wantarray ? @res : $res[0]
+  return $wantarray ? @res : $res[0];
 }
 
 =head2 get_autoinc_seq
@@ -165,19 +172,6 @@
   $self->dbh_do('_dbh_get_autoinc_seq', $source, $col);
 }
 
-=head2 columns_info_for
-
-This wraps the superclass version of this method to force table
-names to uppercase
-
-=cut
-
-sub columns_info_for {
-  my ($self, $table) = @_;
-
-  $self->next::method($table);
-}
-
 =head2 datetime_parser_type
 
 This sets the proper DateTime::Format module for use with

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Oracle.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,23 +5,24 @@
 
 use base qw/DBIx::Class::Storage::DBI/;
 use mro 'c3';
+use Try::Tiny;
 
 sub _rebless {
     my ($self) = @_;
 
-    my $version = eval { $self->_get_dbh->get_info(18); };
+    try {
+      my $version = $self->_get_dbh->get_info(18);
 
-    if ( !$@ ) {
-        my ($major, $minor, $patchlevel) = split(/\./, $version);
+      my ($major, $minor, $patchlevel) = split(/\./, $version);
 
-        # Default driver
-        my $class = $major <= 8
-          ? 'DBIx::Class::Storage::DBI::Oracle::WhereJoins'
-          : 'DBIx::Class::Storage::DBI::Oracle::Generic';
+      # Default driver
+      my $class = $major <= 8
+        ? 'DBIx::Class::Storage::DBI::Oracle::WhereJoins'
+        : 'DBIx::Class::Storage::DBI::Oracle::Generic';
 
-        $self->ensure_class_loaded ($class);
-        bless $self, $class;
-    }
+      $self->ensure_class_loaded ($class);
+      bless $self, $class;
+    };
 }
 
 1;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -8,6 +8,7 @@
 use Carp::Clan qw/^DBIx::Class/;
 use MooseX::Types::Moose qw/Num Int ClassName HashRef/;
 use DBIx::Class::Storage::DBI::Replicated::Types 'DBICStorageDBI';
+use Try::Tiny;
 
 use namespace::clean -except => 'meta';
 
@@ -293,18 +294,16 @@
 sub _safely {
   my ($self, $replicant, $name, $code) = @_;
 
-  eval {
-    $code->()
-  };
-  if ($@) {
+  return try {
+    $code->();
+    1;
+  } catch {
     $replicant->debugobj->print(sprintf(
       "Exception trying to $name for replicant %s, error is %s",
-      $replicant->_dbi_connect_info->[0], $@)
+      $replicant->_dbi_connect_info->[0], $_)
     );
-    return undef;
-  }
-
-  return 1;
+    undef;
+  };
 }
 
 =head2 connected_replicants

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 requires qw/_query_start/;
 
 use namespace::clean -except => 'meta';
+use Try::Tiny;
 
 =head1 NAME
 
@@ -32,7 +33,7 @@
 around '_query_start' => sub {
   my ($method, $self, $sql, @bind) = @_;
 
-  my $dsn = eval { $self->dsn } || $self->_dbi_connect_info->[0];
+  my $dsn = (try { $self->dsn }) || $self->_dbi_connect_info->[0];
 
   my($op, $rest) = (($sql=~m/^(\w+)(.+)$/),'NOP', 'NO SQL');
   my $storage_type = $self->can('active') ? 'REPLICANT' : 'MASTER';
@@ -41,7 +42,7 @@
     if ((reftype($dsn)||'') ne 'CODE') {
       "$op [DSN_$storage_type=$dsn]$rest";
     }
-    elsif (my $id = eval { $self->id }) {
+    elsif (my $id = try { $self->id }) {
       "$op [$storage_type=$id]$rest";
     }
     else {

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Replicated.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -16,6 +16,7 @@
 use Scalar::Util 'reftype';
 use Hash::Merge;
 use List::Util qw/min max reduce/;
+use Try::Tiny;
 
 use namespace::clean -except => 'meta';
 
@@ -308,7 +309,6 @@
     is_datatype_numeric
     _supports_insert_returning
     _count_select
-    _subq_count_select
     _subq_update_delete
     svp_rollback
     svp_begin
@@ -343,7 +343,6 @@
     _dbh_commit
     _execute_array
     _placeholders_supported
-    _verify_pid
     savepoints
     _sqlt_minimum_version
     _sql_maker_opts
@@ -371,7 +370,19 @@
   /],
 );
 
+my @unimplemented = qw(
+  _arm_global_destructor
+  _preserve_foreign_dbh
+  _verify_pid
+  _verify_tid
+);
 
+for my $method (@unimplemented) {
+  __PACKAGE__->meta->add_method($method, sub {
+    croak "$method must not be called on ".(blessed shift).' objects';
+  });
+}
+
 has _master_connect_info_opts =>
   (is => 'rw', isa => HashRef, default => sub { {} });
 
@@ -640,7 +651,7 @@
   my @result;
   my $want_array = wantarray;
 
-  eval {
+  try {
     if($want_array) {
       @result = $coderef->(@args);
     } elsif(defined $want_array) {
@@ -648,19 +659,14 @@
     } else {
       $coderef->(@args);
     }
+  } catch {
+    $self->throw_exception("coderef returned an error: $_");
+  } finally {
+    ##Reset to the original state
+    $self->read_handler($current);
   };
 
-  ##Reset to the original state
-  $self->read_handler($current);
-
-  ##Exception testing has to come last, otherwise you might leave the 
-  ##read_handler set to master.
-
-  if($@) {
-    $self->throw_exception("coderef returned an error: $@");
-  } else {
-    return $want_array ? @result : $result[0];
-  }
+  return $want_array ? @result : $result[0];
 }
 
 =head2 set_reliable_storage

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 use base qw/DBIx::Class::Storage::DBI::UniqueIdentifier/;
 use mro 'c3';
 use List::Util ();
+use Try::Tiny;
 
 __PACKAGE__->mk_group_accessors(simple => qw/
   _identity
@@ -62,8 +63,8 @@
     my $table_name = $source->from;
     $table_name    = $$table_name if ref $table_name;
 
-    my ($identity) = eval {
-      local $@; $dbh->selectrow_array("SELECT GET_IDENTITY('$table_name')")
+    my ($identity) = try {
+      $dbh->selectrow_array("SELECT GET_IDENTITY('$table_name')")
     };
 
     if (defined $identity) {
@@ -114,8 +115,13 @@
 sub build_datetime_parser {
   my $self = shift;
   my $type = "DateTime::Format::Strptime";
-  eval "use ${type}";
-  $self->throw_exception("Couldn't load ${type}: $@") if $@;
+  try {
+    eval "require ${type}"
+  }
+  catch {
+    $self->throw_exception("Couldn't load ${type}: $_");
+  };
+
   return $type->new( pattern => '%Y-%m-%d %H:%M:%S.%6N' );
 }
 

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -13,6 +13,7 @@
 use List::Util();
 use Sub::Name();
 use Data::Dumper::Concise();
+use Try::Tiny;
 
 __PACKAGE__->mk_group_accessors('simple' =>
     qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
@@ -596,7 +597,8 @@
       return 0;
   });
 
-  eval {
+  my $exception;
+  try {
     my $bulk = $self->_bulk_storage;
 
     my $guard = $bulk->txn_scope_guard;
@@ -640,9 +642,10 @@
     );
 
     $bulk->_query_end($sql);
+  } catch {
+    $exception = shift;
   };
 
-  my $exception = $@;
   DBD::Sybase::set_cslib_cb($orig_cslib_cb);
 
   if ($exception =~ /-Y option/) {
@@ -728,9 +731,11 @@
 sub _update_blobs {
   my ($self, $source, $blob_cols, $where) = @_;
 
-  my @primary_cols = eval { $source->_pri_cols };
-  $self->throw_exception("Cannot update TEXT/IMAGE column(s): $@")
-    if $@;
+  my @primary_cols = try
+    { $source->_pri_cols }
+    catch {
+      $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
+    };
 
 # check if we're updating a single row by PK
   my $pk_cols_in_where = 0;
@@ -762,9 +767,11 @@
   my $table = $source->name;
 
   my %row = %$row;
-  my @primary_cols = eval { $source->_pri_cols} ;
-  $self->throw_exception("Cannot update TEXT/IMAGE column(s): $@")
-    if $@;
+  my @primary_cols = try
+    { $source->_pri_cols }
+    catch {
+      $self->throw_exception("Cannot update TEXT/IMAGE column(s): $_")
+    };
 
   $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
     if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
@@ -779,14 +786,13 @@
     my $sth = $cursor->sth;
 
     if (not $sth) {
-
       $self->throw_exception(
           "Could not find row in table '$table' for blob update:\n"
         . Data::Dumper::Concise::Dumper (\%where)
       );
     }
 
-    eval {
+    try {
       do {
         $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr;
       } while $sth->fetch;
@@ -804,19 +810,20 @@
       $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr;
 
       $sth->func('ct_finish_send') or die $sth->errstr;
-    };
-    my $exception = $@;
-    $sth->finish if $sth;
-    if ($exception) {
+    }
+    catch {
       if ($self->using_freetds) {
         $self->throw_exception (
-          'TEXT/IMAGE operation failed, probably because you are using FreeTDS: '
-          . $exception
+          "TEXT/IMAGE operation failed, probably because you are using FreeTDS: $_"
         );
-      } else {
-        $self->throw_exception($exception);
       }
+      else {
+        $self->throw_exception($_);
+      }
     }
+    finally {
+      $sth->finish if $sth;
+    };
   }
 }
 

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI/Sybase.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -2,6 +2,7 @@
 
 use strict;
 use warnings;
+use Try::Tiny;
 
 use base qw/DBIx::Class::Storage::DBI/;
 
@@ -22,13 +23,13 @@
 sub _rebless {
   my $self = shift;
 
-  my $dbtype = eval {
-    @{$self->_get_dbh->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})}[2]
+  my $dbtype;
+  try {
+    $dbtype = @{$self->_get_dbh->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})}[2]
+  } catch {
+    $self->throw_exception("Unable to estable connection to determine database type: $_")
   };
 
-  $self->throw_exception("Unable to estable connection to determine database type: $@")
-    if $@;
-
   if ($dbtype) {
     $dbtype =~ s/\W/_/gi;
 
@@ -53,17 +54,17 @@
 
   if ($dbh->{syb_no_child_con}) {
 # if extra connections are not allowed, then ->ping is reliable
-    my $ping = eval { $dbh->ping };
-    return $@ ? 0 : $ping;
+    return try { $dbh->ping } catch { 0; };
   }
 
-  eval {
+  return try {
 # XXX if the main connection goes stale, does opening another for this statement
 # really determine anything?
     $dbh->do('select 1');
+    1;
+  } catch {
+    0;
   };
-
-  return $@ ? 0 : 1;
 }
 
 sub _set_max_connect {
@@ -110,8 +111,11 @@
 
 sub set_textsize {
   my $self = shift;
-  my $text_size = shift ||
-    eval { $self->_dbi_connect_info->[-1]->{LongReadLen} } ||
+  my $text_size =
+    shift
+      ||
+    try { $self->_dbi_connect_info->[-1]->{LongReadLen} }
+      ||
     32768; # the DBD::Sybase default
 
   return unless defined $text_size;

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBI.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -15,6 +15,8 @@
 use List::Util();
 use Data::Dumper::Concise();
 use Sub::Name ();
+use Try::Tiny;
+use File::Path ();
 
 __PACKAGE__->mk_group_accessors('simple' =>
   qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid
@@ -40,12 +42,12 @@
 /);
 __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks');
 
-
 # Each of these methods need _determine_driver called before itself
 # in order to function reliably. This is a purely DRY optimization
 my @rdbms_specific_methods = qw/
   deployment_statements
   sqlt_type
+  sql_maker
   build_datetime_parser
   datetime_parser_type
 
@@ -115,9 +117,101 @@
   $new->{_in_dbh_do} = 0;
   $new->{_dbh_gen} = 0;
 
+  # read below to see what this does
+  $new->_arm_global_destructor;
+
   $new;
 }
 
+# This is hack to work around perl shooting stuff in random
+# order on exit(). If we do not walk the remaining storage
+# objects in an END block, there is a *small but real* chance
+# of a fork()ed child to kill the parent's shared DBI handle,
+# *before perl reaches the DESTROY in this package*
+# Yes, it is ugly and effective.
+{
+  my %seek_and_destroy;
+
+  sub _arm_global_destructor {
+    my $self = shift;
+    my $key = Scalar::Util::refaddr ($self);
+    $seek_and_destroy{$key} = $self;
+    Scalar::Util::weaken ($seek_and_destroy{$key});
+  }
+
+  END {
+    local $?; # just in case the DBI destructor changes it somehow
+
+    # destroy just the object if not native to this process/thread
+    $_->_preserve_foreign_dbh for (grep
+      { defined $_ }
+      values %seek_and_destroy
+    );
+  }
+}
+
+sub DESTROY {
+  my $self = shift;
+
+  # destroy just the object if not native to this process/thread
+  $self->_preserve_foreign_dbh;
+
+  # some databases need this to stop spewing warnings
+  if (my $dbh = $self->_dbh) {
+    try {
+      %{ $dbh->{CachedKids} } = ();
+      $dbh->disconnect;
+    };
+  }
+
+  $self->_dbh(undef);
+}
+
+sub _preserve_foreign_dbh {
+  my $self = shift;
+
+  return unless $self->_dbh;
+
+  $self->_verify_tid;
+
+  return unless $self->_dbh;
+
+  $self->_verify_pid;
+
+}
+
+# handle pid changes correctly - do not destroy parent's connection
+sub _verify_pid {
+  my $self = shift;
+
+  return if ( defined $self->_conn_pid and $self->_conn_pid == $$ );
+
+  $self->_dbh->{InactiveDestroy} = 1;
+  $self->_dbh(undef);
+  $self->{_dbh_gen}++;
+
+  return;
+}
+
+# very similar to above, but seems to FAIL if I set InactiveDestroy
+sub _verify_tid {
+  my $self = shift;
+
+  if ( ! defined $self->_conn_tid ) {
+    return; # no threads
+  }
+  elsif ( $self->_conn_tid == threads->tid ) {
+    return; # same thread
+  }
+
+  #$self->_dbh->{InactiveDestroy} = 1;  # why does t/51threads.t fail...?
+  $self->_dbh(undef);
+  $self->{_dbh_gen}++;
+
+  return;
+}
+
+
 =head2 connect_info
 
 This method is normally called by L<DBIx::Class::Schema/connection>, which
@@ -628,39 +722,25 @@
 
   my $dbh = $self->_get_dbh;
 
-  return $self->$code($dbh, @_) if $self->{_in_dbh_do}
-      || $self->{transaction_depth};
+  return $self->$code($dbh, @_)
+    if ( $self->{_in_dbh_do} || $self->{transaction_depth} );
 
   local $self->{_in_dbh_do} = 1;
 
-  my @result;
-  my $want_array = wantarray;
+  my @args = @_;
+  return try {
+    $self->$code ($dbh, @args);
+  } catch {
+    $self->throw_exception($_) if $self->connected;
 
-  eval {
+    # We were not connected - reconnect and retry, but let any
+    #  exception fall right through this time
+    carp "Retrying $code after catching disconnected exception: $_"
+      if $ENV{DBIC_DBIRETRY_DEBUG};
 
-    if($want_array) {
-        @result = $self->$code($dbh, @_);
-    }
-    elsif(defined $want_array) {
-        $result[0] = $self->$code($dbh, @_);
-    }
-    else {
-        $self->$code($dbh, @_);
-    }
+    $self->_populate_dbh;
+    $self->$code($self->_dbh, @args);
   };
-
-  # ->connected might unset $@ - copy
-  my $exception = $@;
-  if(!$exception) { return $want_array ? @result : $result[0] }
-
-  $self->throw_exception($exception) if $self->connected;
-
-  # We were not connected - reconnect and retry, but let any
-  #  exception fall right through this time
-  carp "Retrying $code after catching disconnected exception: $exception"
-    if $ENV{DBIC_DBIRETRY_DEBUG};
-  $self->_populate_dbh;
-  $self->$code($self->_dbh, @_);
 }
 
 # This is basically a blend of dbh_do above and DBIx::Class::Storage::txn_do.
@@ -682,30 +762,32 @@
 
   my $tried = 0;
   while(1) {
-    eval {
+    my $exception;
+    my @args = @_;
+    try {
       $self->_get_dbh;
 
       $self->txn_begin;
       if($want_array) {
-          @result = $coderef->(@_);
+          @result = $coderef->(@args);
       }
       elsif(defined $want_array) {
-          $result[0] = $coderef->(@_);
+          $result[0] = $coderef->(@args);
       }
       else {
-          $coderef->(@_);
+          $coderef->(@args);
       }
       $self->txn_commit;
+    } catch {
+      $exception = $_;
     };
 
-    # ->connected might unset $@ - copy
-    my $exception = $@;
-    if(!$exception) { return $want_array ? @result : $result[0] }
+    if(! defined $exception) { return $want_array ? @result : $result[0] }
 
     if($tried++ || $self->connected) {
-      eval { $self->txn_rollback };
-      my $rollback_exception = $@;
-      if($rollback_exception) {
+      my $rollback_exception;
+      try { $self->txn_rollback } catch { $rollback_exception = shift };
+      if(defined $rollback_exception) {
         my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
         $self->throw_exception($exception)  # propagate nested rollback
           if $rollback_exception =~ /$exception_class/;
@@ -803,19 +885,11 @@
 sub _seems_connected {
   my $self = shift;
 
+  $self->_preserve_foreign_dbh;
+
   my $dbh = $self->_dbh
     or return 0;
 
-  if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) {
-    $self->_dbh(undef);
-    $self->{_dbh_gen}++;
-    return 0;
-  }
-  else {
-    $self->_verify_pid;
-    return 0 if !$self->_dbh;
-  }
-
   return $dbh->FETCH('Active');
 }
 
@@ -827,20 +901,6 @@
   return $dbh->ping;
 }
 
-# handle pid changes correctly
-#  NOTE: assumes $self->_dbh is a valid $dbh
-sub _verify_pid {
-  my ($self) = @_;
-
-  return if defined $self->_conn_pid && $self->_conn_pid == $$;
-
-  $self->_dbh->{InactiveDestroy} = 1;
-  $self->_dbh(undef);
-  $self->{_dbh_gen}++;
-
-  return;
-}
-
 sub ensure_connected {
   my ($self) = @_;
 
@@ -873,7 +933,7 @@
 # this is the internal "get dbh or connect (don't check)" method
 sub _get_dbh {
   my $self = shift;
-  $self->_verify_pid if $self->_dbh;
+  $self->_preserve_foreign_dbh;
   $self->_populate_dbh unless $self->_dbh;
   return $self->_dbh;
 }
@@ -940,7 +1000,7 @@
 
     my %info;
 
-    my $server_version = $self->_get_server_version;
+    my $server_version = try { $self->_get_server_version };
 
     if (defined $server_version) {
       $info{dbms_version} = $server_version;
@@ -972,7 +1032,7 @@
 }
 
 sub _get_server_version {
-  eval { shift->_get_dbh->get_info(18) };
+  shift->_get_dbh->get_info(18);
 }
 
 sub _determine_driver {
@@ -1097,7 +1157,7 @@
     $DBI::connect_via = 'connect';
   }
 
-  eval {
+  try {
     if(ref $info[0] eq 'CODE') {
        $dbh = $info[0]->();
     }
@@ -1105,7 +1165,11 @@
        $dbh = DBI->connect(@info);
     }
 
-    if($dbh && !$self->unsafe) {
+    if (!$dbh) {
+      die $DBI::errstr;
+    }
+
+    unless ($self->unsafe) {
       my $weak_self = $self;
       Scalar::Util::weaken($weak_self);
       $dbh->{HandleError} = sub {
@@ -1122,15 +1186,15 @@
       $dbh->{RaiseError} = 1;
       $dbh->{PrintError} = 0;
     }
+  }
+  catch {
+    $self->throw_exception("DBI Connection failed: $_")
+  }
+  finally {
+    $DBI::connect_via = $old_connect_via if $old_connect_via;
   };
 
-  $DBI::connect_via = $old_connect_via if $old_connect_via;
-
-  $self->throw_exception("DBI Connection failed: " . ($@||$DBI::errstr))
-    if !$dbh || $@;
-
   $self->_dbh_autocommit($dbh->{AutoCommit});
-
   $dbh;
 }
 
@@ -1278,7 +1342,7 @@
 sub txn_rollback {
   my $self = shift;
   my $dbh = $self->_dbh;
-  eval {
+  try {
     if ($self->{transaction_depth} == 1) {
       $self->debugobj->txn_rollback()
         if ($self->debug);
@@ -1296,15 +1360,17 @@
     else {
       die DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION->new;
     }
-  };
-  if ($@) {
-    my $error = $@;
-    my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
-    $error =~ /$exception_class/ and $self->throw_exception($error);
-    # ensure that a failed rollback resets the transaction depth
-    $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1;
-    $self->throw_exception($error);
   }
+  catch {
+    my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
+
+    if ($_ !~ /$exception_class/) {
+      # ensure that a failed rollback resets the transaction depth
+      $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1;
+    }
+
+    $self->throw_exception($_)
+  };
 }
 
 sub _dbh_rollback {
@@ -1446,7 +1512,7 @@
   if ($opts->{returning}) {
     my @ret_cols = @{$opts->{returning}};
 
-    my @ret_vals = eval {
+    my @ret_vals = try {
       local $SIG{__WARN__} = sub {};
       my @r = $sth->fetchrow_array;
       $sth->finish;
@@ -1601,16 +1667,27 @@
     $placeholder_index++;
   }
 
-  my $rv = eval {
-    $self->_dbh_execute_array($sth, $tuple_status, @extra);
+  my ($rv, $err);
+  try {
+    $rv = $self->_dbh_execute_array($sth, $tuple_status, @extra);
+  }
+  catch {
+    $err = shift;
+  }
+  finally {
+    # Statement must finish even if there was an exception.
+    try {
+      $sth->finish
+    }
+    catch {
+      $err = shift unless defined $err 
+    };
   };
-  my $err = $@ || $sth->errstr;
 
-# Statement must finish even if there was an exception.
-  eval { $sth->finish };
-  $err = $@ unless $err;
+  $err = $sth->errstr
+    if (! defined $err and $sth->err);
 
-  if ($err) {
+  if (defined $err) {
     my $i = 0;
     ++$i while $i <= $#$tuple_status && !ref $tuple_status->[$i];
 
@@ -1624,6 +1701,7 @@
       }),
     );
   }
+
   return $rv;
 }
 
@@ -1636,21 +1714,29 @@
 sub _dbh_execute_inserts_with_no_binds {
   my ($self, $sth, $count) = @_;
 
-  eval {
+  my $err;
+  try {
     my $dbh = $self->_get_dbh;
     local $dbh->{RaiseError} = 1;
     local $dbh->{PrintError} = 0;
 
     $sth->execute foreach 1..$count;
+  }
+  catch {
+    $err = shift;
+  }
+  finally {
+    # Make sure statement is finished even if there was an exception.
+    try {
+      $sth->finish
+    }
+    catch {
+      $err = shift unless defined $err;
+    };
   };
-  my $exception = $@;
 
-# Make sure statement is finished even if there was an exception.
-  eval { $sth->finish };
-  $exception = $@ unless $exception;
+  $self->throw_exception($err) if defined $err;
 
-  $self->throw_exception($exception) if $exception;
-
   return $count;
 }
 
@@ -1770,31 +1856,18 @@
 
 sub _select {
   my $self = shift;
-
-  # localization is neccessary as
-  # 1) there is no infrastructure to pass this around before SQLA2
-  # 2) _select_args sets it and _prep_for_execute consumes it
-  my $sql_maker = $self->sql_maker;
-  local $sql_maker->{_dbic_rs_attrs};
-
-  return $self->_execute($self->_select_args(@_));
+  $self->_execute($self->_select_args(@_));
 }
 
 sub _select_args_to_query {
   my $self = shift;
 
-  # localization is neccessary as
-  # 1) there is no infrastructure to pass this around before SQLA2
-  # 2) _select_args sets it and _prep_for_execute consumes it
-  my $sql_maker = $self->sql_maker;
-  local $sql_maker->{_dbic_rs_attrs};
-
-  # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $order, $rows, $offset)
+  # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $rs_attrs, $rows, $offset)
   #  = $self->_select_args($ident, $select, $cond, $attrs);
   my ($op, $bind, $ident, $bind_attrs, @args) =
     $self->_select_args(@_);
 
-  # my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, [ $select, $cond, $order, $rows, $offset ]);
+  # my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, [ $select, $cond, $rs_attrs, $rows, $offset ]);
   my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, \@args);
   $prepared_bind ||= [];
 
@@ -1807,16 +1880,16 @@
 sub _select_args {
   my ($self, $ident, $select, $where, $attrs) = @_;
 
+  my $sql_maker = $self->sql_maker;
   my ($alias2source, $rs_alias) = $self->_resolve_ident_sources ($ident);
 
-  my $sql_maker = $self->sql_maker;
-  $sql_maker->{_dbic_rs_attrs} = {
+  $attrs = {
     %$attrs,
     select => $select,
     from => $ident,
     where => $where,
     $rs_alias && $alias2source->{$rs_alias}
-      ? ( _source_handle => $alias2source->{$rs_alias}->handle )
+      ? ( _rsroot_source_handle => $alias2source->{$rs_alias}->handle )
       : ()
     ,
   };
@@ -1849,19 +1922,13 @@
   }
 
   # adjust limits
-  if (
-    $attrs->{software_limit}
-      ||
-    $sql_maker->_default_limit_syntax eq "GenericSubQ"
-  ) {
-    $attrs->{software_limit} = 1;
-  }
-  else {
+  if (defined $attrs->{rows}) {
     $self->throw_exception("rows attribute must be positive if present")
-      if (defined($attrs->{rows}) && !($attrs->{rows} > 0));
-
+      unless $attrs->{rows} > 0;
+  }
+  elsif (defined $attrs->{offset}) {
     # MySQL actually recommends this approach.  I cringe.
-    $attrs->{rows} = 2**48 if not defined $attrs->{rows} and defined $attrs->{offset};
+    $attrs->{rows} = 2**32;
   }
 
   my @limit;
@@ -1872,18 +1939,7 @@
     #limited has_many
     ( $attrs->{rows} && keys %{$attrs->{collapse}} )
        ||
-    # limited prefetch with RNO subqueries
-    (
-      $attrs->{rows}
-        &&
-      $sql_maker->limit_dialect eq 'RowNumberOver'
-        &&
-      $attrs->{_prefetch_select}
-        &&
-      @{$attrs->{_prefetch_select}}
-    )
-      ||
-    # grouped prefetch
+    # grouped prefetch (to satisfy group_by == select)
     ( $attrs->{group_by}
         &&
       @{$attrs->{group_by}}
@@ -1896,39 +1952,6 @@
     ($ident, $select, $where, $attrs)
       = $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs);
   }
-
-  elsif (
-    ($attrs->{rows} || $attrs->{offset})
-      &&
-    $sql_maker->limit_dialect eq 'RowNumberOver'
-      &&
-    (ref $ident eq 'ARRAY' && @$ident > 1)  # indicates a join
-      &&
-    scalar $self->_parse_order_by ($attrs->{order_by})
-  ) {
-    # the RNO limit dialect above mangles the SQL such that the join gets lost
-    # wrap a subquery here
-
-    push @limit, delete @{$attrs}{qw/rows offset/};
-
-    my $subq = $self->_select_args_to_query (
-      $ident,
-      $select,
-      $where,
-      $attrs,
-    );
-
-    $ident = {
-      -alias => $attrs->{alias},
-      -source_handle => $ident->[0]{-source_handle},
-      $attrs->{alias} => $subq,
-    };
-
-    # all part of the subquery now
-    delete @{$attrs}{qw/order_by group_by having/};
-    $where = undef;
-  }
-
   elsif (! $attrs->{software_limit} ) {
     push @limit, $attrs->{rows}, $attrs->{offset};
   }
@@ -1946,12 +1969,7 @@
   # invoked, and that's just bad...
 ###
 
-  my $order = { map
-    { $attrs->{$_} ? ( $_ => $attrs->{$_} ) : ()  }
-    (qw/order_by group_by having/ )
-  };
-
-  return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $order, @limit);
+  return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $attrs, @limit);
 }
 
 # Returns a counting SELECT for a simple count
@@ -1963,47 +1981,7 @@
   return { count => '*' };
 }
 
-# Returns a SELECT which will end up in the subselect
-# There may or may not be a group_by, as the subquery
-# might have been called to accomodate a limit
-#
-# Most databases would be happy with whatever ends up
-# here, but some choke in various ways.
-#
-sub _subq_count_select {
-  my ($self, $source, $rs_attrs) = @_;
 
-  if (my $groupby = $rs_attrs->{group_by}) {
-
-    my $avail_columns = $self->_resolve_column_info ($rs_attrs->{from});
-
-    my $sel_index;
-    for my $sel (@{$rs_attrs->{select}}) {
-      if (ref $sel eq 'HASH' and $sel->{-as}) {
-        $sel_index->{$sel->{-as}} = $sel;
-      }
-    }
-
-    my @selection;
-    for my $g_part (@$groupby) {
-      if (ref $g_part or $avail_columns->{$g_part}) {
-        push @selection, $g_part;
-      }
-      elsif ($sel_index->{$g_part}) {
-        push @selection, $sel_index->{$g_part};
-      }
-      else {
-        $self->throw_exception ("group_by criteria '$g_part' not contained within current resultset source(s)");
-      }
-    }
-
-    return \@selection;
-  }
-
-  my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
-  return @pcols ? \@pcols : [ 1 ];
-}
-
 sub source_bind_attributes {
   my ($self, $source) = @_;
 
@@ -2086,7 +2064,8 @@
 
   if ($dbh->can('column_info')) {
     my %result;
-    eval {
+    my $caught;
+    try {
       my ($schema,$tab) = $table =~ /^(.+?)\.(.+)$/ ? ($1,$2) : (undef,$table);
       my $sth = $dbh->column_info( undef,$schema, $tab, '%' );
       $sth->execute();
@@ -2101,8 +2080,10 @@
 
         $result{$col_name} = \%column_info;
       }
+    } catch {
+      $caught = 1;
     };
-    return \%result if !$@ && scalar keys %result;
+    return \%result if !$caught && scalar keys %result;
   }
 
   my %result;
@@ -2152,7 +2133,7 @@
 sub _dbh_last_insert_id {
     my ($self, $dbh, $source, $col) = @_;
 
-    my $id = eval { $dbh->last_insert_id (undef, undef, $source->name, $col) };
+    my $id = try { $dbh->last_insert_id (undef, undef, $source->name, $col) };
 
     return $id if defined $id;
 
@@ -2203,12 +2184,15 @@
 
   # some drivers provide a $dbh attribute (e.g. Sybase and $dbh->{syb_dynamic_supported})
   # but it is inaccurate more often than not
-  eval {
+  return try {
     local $dbh->{PrintError} = 0;
     local $dbh->{RaiseError} = 1;
     $dbh->do('select ?', {}, 1);
+    1;
+  }
+  catch {
+    0;
   };
-  return $@ ? 0 : 1;
 }
 
 # Check if placeholders bound to non-string types throw exceptions
@@ -2217,13 +2201,16 @@
   my $self = shift;
   my $dbh  = $self->_get_dbh;
 
-  eval {
+  return try {
     local $dbh->{PrintError} = 0;
     local $dbh->{RaiseError} = 1;
     # this specifically tests a bind that is NOT a string
     $dbh->do('select 1 where 1 = ?', {}, 1);
+    1;
+  }
+  catch {
+    0;
   };
-  return $@ ? 0 : 1;
 }
 
 =head2 sqlt_type
@@ -2334,6 +2321,9 @@
   unless ($dir) {
     carp "No directory given, using ./\n";
     $dir = './';
+  } else {
+      -d $dir or File::Path::mkpath($dir)
+          or $self->throw_exception("create_ddl_dir: $! creating dir '$dir'");
   }
 
   $self->throw_exception ("Directory '$dir' does not exist\n") unless(-d $dir);
@@ -2537,14 +2527,13 @@
     return if($line =~ /^COMMIT/m);
     return if $line =~ /^\s+$/; # skip whitespace only
     $self->_query_start($line);
-    eval {
+    try {
       # do a dbh_do cycle here, as we need some error checking in
       # place (even though we will ignore errors)
       $self->dbh_do (sub { $_[1]->do($line) });
+    } catch {
+      carp qq{$_ (running "${line}")};
     };
-    if ($@) {
-      carp qq{$@ (running "${line}")};
-    }
     $self->_query_end($line);
   };
   my @statements = $schema->deployment_statements($type, undef, $dir, { %{ $sqltargs || {} }, no_comments => 1 } );
@@ -2649,23 +2638,6 @@
   return $alias;
 }
 
-sub DESTROY {
-  my $self = shift;
-
-  $self->_verify_pid if $self->_dbh;
-
-  # some databases need this to stop spewing warnings
-  if (my $dbh = $self->_dbh) {
-    local $@;
-    eval {
-      %{ $dbh->{CachedKids} } = ();
-      $dbh->disconnect;
-    };
-  }
-
-  $self->_dbh(undef);
-}
-
 1;
 
 =head1 USAGE NOTES

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBIHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBIHacks.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/DBIHacks.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -94,6 +94,8 @@
     }
 
     push @$inner_select, $sel;
+
+    push @{$inner_attrs->{as}}, $attrs->{as}[$i];
   }
 
   # construct the inner $from for the subquery

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/TxnScopeGuard.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/TxnScopeGuard.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage/TxnScopeGuard.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -3,6 +3,7 @@
 use strict;
 use warnings;
 use Carp::Clan qw/^DBIx::Class/;
+use Try::Tiny;
 
 sub new {
   my ($class, $storage) = @_;
@@ -31,10 +32,11 @@
     carp 'A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back.'
       unless $exception;
 
-    eval { $storage->txn_rollback };
-    my $rollback_exception = $@;
+    my $rollback_exception;
+    try { $storage->txn_rollback }
+    catch { $rollback_exception = shift };
 
-    if ($rollback_exception && $rollback_exception !~ /DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION/) {
+    if (defined $rollback_exception && $rollback_exception !~ /DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION/) {
       if ($exception) {
         $exception = "Transaction aborted: ${exception} "
           ."Rollback failed: ${rollback_exception}";

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class/Storage.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -10,6 +10,7 @@
 use Scalar::Util();
 use IO::File;
 use DBIx::Class::Storage::TxnScopeGuard;
+use Try::Tiny;
 
 __PACKAGE__->mk_group_accessors('simple' => qw/debug debugobj schema/);
 __PACKAGE__->mk_group_accessors('inherited' => 'cursor_class');
@@ -158,16 +159,16 @@
   };
 
   my $rs;
-  eval {
+  try {
     $rs = $schema->txn_do($coderef);
-  };
-
-  if ($@) {                                  # Transaction failed
+  } catch {
+    my $error = shift;
+    # Transaction failed
     die "something terrible has happened!"   #
-      if ($@ =~ /Rollback failed/);          # Rollback failed
+      if ($error =~ /Rollback failed/);          # Rollback failed
 
     deal_with_failed_transaction();
-  }
+  };
 
 In a nested transaction (calling txn_do() from within a txn_do() coderef) only
 the outermost transaction will issue a L</txn_commit>, and txn_do() can be
@@ -195,9 +196,9 @@
   $self->txn_begin; # If this throws an exception, no rollback is needed
 
   my $wantarray = wantarray; # Need to save this since the context
-                             # inside the eval{} block is independent
+                             # inside the try{} block is independent
                              # of the context that called txn_do()
-  eval {
+  try {
 
     # Need to differentiate between scalar/list context to allow for
     # returning a list in scalar context to get the size of the list
@@ -212,28 +213,23 @@
       $coderef->(@args);
     }
     $self->txn_commit;
-  };
+  }
+  catch {
+    my $error = shift;
 
-  if ($@) {
-    my $error = $@;
-
-    eval {
+    try {
       $self->txn_rollback;
-    };
-
-    if ($@) {
-      my $rollback_error = $@;
+    } catch {
       my $exception_class = "DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION";
       $self->throw_exception($error)  # propagate nested rollback
-        if $rollback_error =~ /$exception_class/;
+        if $_ =~ /$exception_class/;
 
       $self->throw_exception(
-        "Transaction aborted: $error. Rollback failed: ${rollback_error}"
+        "Transaction aborted: $error. Rollback failed: $_"
       );
-    } else {
-      $self->throw_exception($error); # txn failed but rollback succeeded
     }
-  }
+    $self->throw_exception($error); # txn failed but rollback succeeded
+  };
 
   return $wantarray ? @return_values : $return_value;
 }

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/DBIx/Class.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -42,8 +42,11 @@
 sub _attr_cache {
   my $self = shift;
   my $cache = $self->can('__attr_cache') ? $self->__attr_cache : {};
-  my $rest = eval { $self->next::method };
-  return $@ ? $cache : { %$cache, %$rest };
+
+  return {
+    %$cache,
+    %{ $self->maybe::next::method || {} },
+  };
 }
 
 1;
@@ -370,6 +373,8 @@
 
 Tom Hukins
 
+tonvoon: Ton Voon <tonvoon at cpan.org>
+
 triode: Pete Gamache <gamache at cpan.org>
 
 typester: Daisuke Murase <typester at cpan.org>

Modified: DBIx-Class/0.08/branches/pg_cursors/lib/SQL/Translator/Parser/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/lib/SQL/Translator/Parser/DBIx/Class.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/lib/SQL/Translator/Parser/DBIx/Class.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -14,8 +14,9 @@
 
 use Exporter;
 use SQL::Translator::Utils qw(debug normalize_name);
-use Carp::Clan qw/^SQL::Translator|^DBIx::Class/;
+use Carp::Clan qw/^SQL::Translator|^DBIx::Class|^Try::Tiny/;
 use Scalar::Util ();
+use Try::Tiny;
 
 use base qw(Exporter);
 
@@ -43,8 +44,12 @@
 
     croak 'No DBIx::Class::Schema' unless ($dbicschema);
     if (!ref $dbicschema) {
-      eval "use $dbicschema;";
-      croak "Can't load $dbicschema ($@)" if($@);
+      try {
+        eval "require $dbicschema;"
+      }
+      catch {
+        croak "Can't load $dbicschema ($_)";
+      }
     }
 
     my $schema      = $tr->schema;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/03podcoverage.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/03podcoverage.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/03podcoverage.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -46,6 +46,15 @@
             MULTICREATE_DEBUG
         /],
     },
+    'DBIx::Class::FilterColumn' => {
+        ignore => [qw/
+            new
+            update
+            store_column
+            get_column
+            get_columns
+        /],
+    },
     'DBIx::Class::ResultSource' => {
         ignore => [qw/
             compare_relationship_keys

Modified: DBIx-Class/0.08/branches/pg_cursors/t/39load_namespaces_1.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/39load_namespaces_1.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/39load_namespaces_1.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -7,8 +7,6 @@
 use lib qw(t/lib);
 use DBICTest; # do not remove even though it is not used
 
-plan tests => 8;
-
 my $warnings;
 eval {
     local $SIG{__WARN__} = sub { $warnings .= shift };
@@ -16,9 +14,11 @@
     use base qw/DBIx::Class::Schema/;
     __PACKAGE__->load_namespaces;
 };
-ok(!$@) or diag $@;
-like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/);
+ok(!$@, 'load_namespaces doesnt die') or diag $@;
+like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/, 'Found warning about extra ResultSet classes');
 
+like($warnings, qr/load_namespaces found ResultSet class DBICNSTest::ResultSet::D that does not subclass DBIx::Class::ResultSet/, 'Found warning about ResultSets with incorrect subclass');
+
 my $source_a = DBICNSTest->source('A');
 isa_ok($source_a, 'DBIx::Class::ResultSource::Table');
 my $rset_a   = DBICNSTest->resultset('A');
@@ -31,5 +31,7 @@
 
 for my $moniker (qw/A B/) {
   my $class = "DBICNSTest::Result::$moniker";
-  ok(!defined($class->result_source_instance->source_name));
+  ok(!defined($class->result_source_instance->source_name), "Source name of $moniker not defined");
 }
+
+done_testing;

Deleted: DBIx-Class/0.08/branches/pg_cursors/t/41orrible.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/41orrible.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/41orrible.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -1,89 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-use DBIx::Class::SQLAHacks::OracleJoins;
-
-use lib qw(t/lib);
-use DBICTest; # do not remove even though it is not used
-use DBIC::SqlMakerTest;
-
-plan tests => 4;
-
-my $sa = new DBIx::Class::SQLAHacks::OracleJoins;
-
-$sa->limit_dialect('RowNum');
-
-is($sa->select('rubbish',
-                  [ 'foo.id', 'bar.id', \'TO_CHAR(foo.womble, "blah")' ],
-                  undef, undef, 1, 3),
-   'SELECT * FROM
-(
-    SELECT A.*, ROWNUM r FROM
-    (
-        SELECT foo.id AS col1, bar.id AS col2, TO_CHAR(foo.womble, "blah") AS col3 FROM rubbish 
-    ) A
-    WHERE ROWNUM < 5
-) B
-WHERE r >= 4
-', 'Munged stuff to make Oracle not explode');
-
-# test WhereJoins
-# search with undefined or empty $cond
-
-#  my ($self, $table, $fields, $where, $order, @rest) = @_;
-my ($sql, @bind) = $sa->select(
-    [
-        { me => "cd" },
-        [
-            { "-join_type" => "LEFT", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
-        ],
-    ],
-    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
-    undef,
-    undef
-);
-is_same_sql_bind(
-  $sql, \@bind,
-  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( artist.artistid(+) = me.artist )', [],
-  'WhereJoins search with empty where clause'
-);
-
-($sql, @bind) = $sa->select(
-    [
-        { me => "cd" },
-        [
-            { "-join_type" => "", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
-        ],
-    ],
-    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
-    { 'artist.artistid' => 3 },
-    undef
-);
-is_same_sql_bind(
-  $sql, \@bind,
-  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid = me.artist ) AND ( artist.artistid = ? ) ) )', [3],
-  'WhereJoins search with where clause'
-);
-
-($sql, @bind) = $sa->select(
-    [
-        { me => "cd" },
-        [
-            { "-join_type" => "LEFT", artist => "artist" },
-            { "artist.artistid" => "me.artist" },
-        ],
-    ],
-    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
-    [{ 'artist.artistid' => 3 }, { 'me.cdid' => 5 }],
-    undef
-);
-is_same_sql_bind(
-  $sql, \@bind,
-  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid(+) = me.artist ) AND ( ( ( artist.artistid = ? ) OR ( me.cdid = ? ) ) ) ) )', [3, 5],
-  'WhereJoins search with or in where clause'
-);
-
-

Modified: DBIx-Class/0.08/branches/pg_cursors/t/60core.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/60core.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/60core.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -45,6 +45,8 @@
 is(scalar(keys(%fake_dirty)), 1, '1 fake dirty column');
 ok(grep($_ eq 'name', keys(%fake_dirty)), 'name is fake dirty');
 
+ok($art->update, 'Update run');
+
 my $record_jp = $schema->resultset("Artist")->search(undef, { join => 'cds' })->search(undef, { prefetch => 'cds' })->next;
 
 ok($record_jp, "prefetch on same rel okay");
@@ -67,6 +69,8 @@
 
 is($art->in_storage, 0, "It knows it's dead");
 
+lives_ok { $art->update } 'No changes so update should be OK';
+
 dies_ok ( sub { $art->delete }, "Can't delete twice");
 
 is($art->name, 'We Are In Rehab', 'But the object is still live');

Modified: DBIx-Class/0.08/branches/pg_cursors/t/72pg.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/72pg.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/72pg.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -238,12 +238,15 @@
         $schema2->source("Artist")->name("dbic_t_schema.artist");
 
         $schema->txn_do( sub {
-          my $artist = $schema->resultset('Artist')->search(
+          my $rs = $schema->resultset('Artist')->search(
               {
                   artistid => 1
               },
               $t->{update_lock} ? { for => 'update' } : {}
-          )->first;
+          );
+          ok ($rs->count, 'Count works');
+
+          my $artist = $rs->next;
           is($artist->artistid, 1, "select returns artistid = 1");
 
           $timed_out = 0;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/73oracle.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/73oracle.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/73oracle.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -87,7 +87,7 @@
 });
 $dbh->do(qq{
   CREATE OR REPLACE TRIGGER cd_insert_trg
-  BEFORE INSERT ON cd
+  BEFORE INSERT OR UPDATE ON cd
   FOR EACH ROW
   BEGIN
     IF :new.cdid IS NULL THEN

Modified: DBIx-Class/0.08/branches/pg_cursors/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/746mssql.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/746mssql.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -59,8 +59,6 @@
   { on_connect_call => 'use_dynamic_cursors' },
   {},
 );
-my $new;
-
 # test Auto-PK with different options
 for my $opts (@opts) {
   SKIP: {
@@ -77,384 +75,408 @@
 
     $schema->resultset('Artist')->search({ name => 'foo' })->delete;
 
-    $new = $schema->resultset('Artist')->create({ name => 'foo' });
+    my $new = $schema->resultset('Artist')->create({ name => 'foo' });
 
     ok($new->artistid > 0, "Auto-PK worked");
   }
 }
 
-$seen_id{$new->artistid}++;
 
-# test LIMIT support
-for (1..6) {
-    $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
-    is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
-    $seen_id{$new->artistid}++;
-}
 
-my $it = $schema->resultset('Artist')->search( {}, {
-    rows => 3,
-    order_by => 'artistid',
-});
+# Test populate
 
-is( $it->count, 3, "LIMIT count ok" );
-is( $it->next->name, "foo", "iterator->next ok" );
-$it->next;
-is( $it->next->name, "Artist 2", "iterator->next ok" );
-is( $it->next, undef, "next past end of resultset ok" );
-
-# test GUID columns
-
-$schema->storage->dbh_do (sub {
+{
+  $schema->storage->dbh_do (sub {
     my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE artist") };
+    eval { $dbh->do("DROP TABLE owners") };
+    eval { $dbh->do("DROP TABLE books") };
     $dbh->do(<<'SQL');
-CREATE TABLE artist (
-   artistid UNIQUEIDENTIFIER NOT NULL,
+CREATE TABLE books (
+   id INT IDENTITY (1, 1) NOT NULL,
+   source VARCHAR(100),
+   owner INT,
+   title VARCHAR(10),
+   price INT NULL
+)
+
+CREATE TABLE owners (
+   id INT IDENTITY (1, 1) NOT NULL,
    name VARCHAR(100),
-   rank INT NOT NULL DEFAULT '13',
-   charfield CHAR(10) NULL,
-   a_guid UNIQUEIDENTIFIER,
-   primary key(artistid)
 )
 SQL
-});
 
-# start disconnected to make sure insert works on an un-reblessed storage
-$schema = DBICTest::Schema->connect($dsn, $user, $pass);
+  });
 
-my $row;
-lives_ok {
-  $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
-} 'created a row with a GUID';
+  lives_ok ( sub {
+    # start a new connection, make sure rebless works
+    my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+    $schema->populate ('Owners', [
+      [qw/id  name  /],
+      [qw/1   wiggle/],
+      [qw/2   woggle/],
+      [qw/3   boggle/],
+      [qw/4   fRIOUX/],
+      [qw/5   fRUE/],
+      [qw/6   fREW/],
+      [qw/7   fROOH/],
+      [qw/8   fISMBoC/],
+      [qw/9   station/],
+      [qw/10   mirror/],
+      [qw/11   dimly/],
+      [qw/12   face_to_face/],
+      [qw/13   icarus/],
+      [qw/14   dream/],
+      [qw/15   dyrstyggyr/],
+    ]);
+  }, 'populate with PKs supplied ok' );
 
-ok(
-  eval { $row->artistid },
-  'row has GUID PK col populated',
-);
-diag $@ if $@;
 
-ok(
-  eval { $row->a_guid },
-  'row has a GUID col with auto_nextval populated',
-);
-diag $@ if $@;
+  lives_ok (sub {
+    # start a new connection, make sure rebless works
+    # test an insert with a supplied identity, followed by one without
+    my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+    for (2, 1) {
+      my $id = $_ * 20 ;
+      $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
+      $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
+    }
+  }, 'create with/without PKs ok' );
 
-my $row_from_db = $schema->resultset('ArtistGUID')
-  ->search({ name => 'mtfnpy' })->first;
+  is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
 
-is $row_from_db->artistid, $row->artistid,
-  'PK GUID round trip';
+  lives_ok ( sub {
+    # start a new connection, make sure rebless works
+    my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+    $schema->populate ('BooksInLibrary', [
+      [qw/source  owner title   /],
+      [qw/Library 1     secrets0/],
+      [qw/Library 1     secrets1/],
+      [qw/Eatery  1     secrets2/],
+      [qw/Library 2     secrets3/],
+      [qw/Library 3     secrets4/],
+      [qw/Eatery  3     secrets5/],
+      [qw/Library 4     secrets6/],
+      [qw/Library 5     secrets7/],
+      [qw/Eatery  5     secrets8/],
+      [qw/Library 6     secrets9/],
+      [qw/Library 7     secrets10/],
+      [qw/Eatery  7     secrets11/],
+      [qw/Library 8     secrets12/],
+    ]);
+  }, 'populate without PKs supplied ok' );
+}
 
-is $row_from_db->a_guid, $row->a_guid,
-  'NON-PK GUID round trip';
+# test simple, complex LIMIT and limited prefetch support, with both dialects and quote combinations (if possible)
+for my $dialect (
+  'Top',
+  ($schema->storage->_server_info->{normalized_dbms_version} || 0 ) >= 9
+    ? ('RowNumberOver')
+    : ()
+  ,
+) {
+  for my $quoted (0, 1) {
 
-# test MONEY type
-$schema->storage->dbh_do (sub {
-    my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE money_test") };
-    $dbh->do(<<'SQL');
-CREATE TABLE money_test (
-   id INT IDENTITY PRIMARY KEY,
-   amount MONEY NULL
-)
-SQL
-});
+    $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+        limit_dialect => $dialect,
+        $quoted
+          ? ( quote_char => [ qw/ [ ] / ], name_sep => '.' )
+          : ()
+        ,
+      });
 
-my $rs = $schema->resultset('Money');
+    my $test_type = "Dialect:$dialect Quoted:$quoted";
 
-lives_ok {
-  $row = $rs->create({ amount => 100 });
-} 'inserted a money value';
+    # basic limit support
+    TODO: {
+      my $art_rs = $schema->resultset ('Artist');
+      $art_rs->delete;
+      $art_rs->create({ name => 'Artist ' . $_ }) for (1..6);
 
-cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
+      my $it = $schema->resultset('Artist')->search( {}, {
+        rows => 4,
+        offset => 3,
+        order_by => 'artistid',
+      });
 
-lives_ok {
-  $row->update({ amount => 200 });
-} 'updated a money value';
+      is( $it->count, 3, "$test_type: LIMIT count ok" );
 
-cmp_ok $rs->find($row->id)->amount, '==', 200,
-  'updated money value round-trip';
+      local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+        if $dialect eq 'Top';
 
-lives_ok {
-  $row->update({ amount => undef });
-} 'updated a money value to NULL';
+      is( $it->next->name, 'Artist 4', "$test_type: iterator->next ok" );
+      $it->next;
+      is( $it->next->name, 'Artist 6', "$test_type: iterator->next ok" );
+      is( $it->next, undef, "$test_type: next past end of resultset ok" );
+    }
 
-is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip';
+    # plain ordered subqueries throw
+    throws_ok (sub {
+      $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
+    }, qr/ordered subselect encountered/, "$test_type: Ordered Subselect detection throws ok");
 
-$schema->storage->dbh_do (sub {
-    my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE owners") };
-    eval { $dbh->do("DROP TABLE books") };
-    $dbh->do(<<'SQL');
-CREATE TABLE books (
-   id INT IDENTITY (1, 1) NOT NULL,
-   source VARCHAR(100),
-   owner INT,
-   title VARCHAR(10),
-   price INT NULL
-)
+    # make sure ordered subselects *somewhat* work
+    {
+      my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+      my $sealed_owners = $owners->as_subselect_rs;
 
-CREATE TABLE owners (
-   id INT IDENTITY (1, 1) NOT NULL,
-   name VARCHAR(100),
-)
-SQL
+      is_deeply (
+        [ map { $_->name } ($sealed_owners->all) ],
+        [ map { $_->name } ($owners->all) ],
+        "$test_type: Sort preserved from within a subquery",
+      );
+    }
 
-});
+    {
+      my $book_owner_ids = $schema->resultset ('BooksInLibrary')->search ({}, {
+        rows => 6,
+        offset => 2,
+        join => 'owner',
+        distinct => 1,
+        order_by => 'owner.name',
+        unsafe_subselect_ok => 1
+      })->get_column ('owner');
 
-lives_ok ( sub {
-  # start a new connection, make sure rebless works
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  $schema->populate ('Owners', [
-    [qw/id  name  /],
-    [qw/1   wiggle/],
-    [qw/2   woggle/],
-    [qw/3   boggle/],
-    [qw/4   fRIOUX/],
-    [qw/5   fRUE/],
-    [qw/6   fREW/],
-    [qw/7   fROOH/],
-    [qw/8   fISMBoC/],
-    [qw/9   station/],
-    [qw/10   mirror/],
-    [qw/11   dimly/],
-    [qw/12   face_to_face/],
-    [qw/13   icarus/],
-    [qw/14   dream/],
-    [qw/15   dyrstyggyr/],
-  ]);
-}, 'populate with PKs supplied ok' );
+      my @ids = $book_owner_ids->all;
 
+      is (@ids, 6, 'Limit works');
 
-lives_ok (sub {
-  # start a new connection, make sure rebless works
-  # test an insert with a supplied identity, followed by one without
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  for (2, 1) {
-    my $id = $_ * 20 ;
-    $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
-    $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
-  }
-}, 'create with/without PKs ok' );
+      my $book_owners = $schema->resultset ('Owners')->search ({
+        id => { -in => $book_owner_ids->as_query }
+      });
 
-is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
+      TODO: {
+        local $TODO = "Correlated limited IN subqueries will probably never preserve order";
 
-lives_ok ( sub {
-  # start a new connection, make sure rebless works
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  $schema->populate ('BooksInLibrary', [
-    [qw/source  owner title   /],
-    [qw/Library 1     secrets0/],
-    [qw/Library 1     secrets1/],
-    [qw/Eatery  1     secrets2/],
-    [qw/Library 2     secrets3/],
-    [qw/Library 3     secrets4/],
-    [qw/Eatery  3     secrets5/],
-    [qw/Library 4     secrets6/],
-    [qw/Library 5     secrets7/],
-    [qw/Eatery  5     secrets8/],
-    [qw/Library 6     secrets9/],
-    [qw/Library 7     secrets10/],
-    [qw/Eatery  7     secrets11/],
-    [qw/Library 8     secrets12/],
-  ]);
-}, 'populate without PKs supplied ok' );
+        is_deeply (
+          [ map { $_->id } ($book_owners->all) ],
+          [ $book_owner_ids->all ],
+          "$test_type: Sort is preserved across IN subqueries",
+        );
+      }
+    }
 
-# plain ordered subqueries throw
-throws_ok (sub {
-  $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
-}, qr/ordered subselect encountered/, 'Ordered Subselect detection throws ok');
+    # still even with lost order of IN, we should be getting correct
+    # sets
+    {
+      my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+      my $corelated_owners = $owners->result_source->resultset->search (
+        {
+          id => { -in => $owners->get_column('id')->as_query },
+        },
+        {
+          order_by => 'name' #reorder because of what is shown above
+        },
+      );
 
-# make sure ordered subselects *somewhat* work
-{
-  my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+      is (
+        join ("\x00", map { $_->name } ($corelated_owners->all) ),
+        join ("\x00", map { $_->name } ($owners->all) ),
+        "$test_type: With an outer order_by, everything still matches",
+      );
+    }
 
-  my $al = $owners->current_source_alias;
-  my $sealed_owners = $owners->result_source->resultset->search (
-    {},
+    # make sure right-join-side single-prefetch ordering limit works
     {
-      alias => $al,
-      from => [{
-        -alias => $al,
-        -source_handle => $owners->result_source->handle,
-        $al => $owners->as_query,
-      }],
-    },
-  );
+      my $rs = $schema->resultset ('BooksInLibrary')->search (
+        {
+          'owner.name' => { '!=', 'woggle' },
+        },
+        {
+          prefetch => 'owner',
+          order_by => 'owner.name',
+        }
+      );
+      # this is the order in which they should come from the above query
+      my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
 
-  is_deeply (
-    [ map { $_->name } ($sealed_owners->all) ],
-    [ map { $_->name } ($owners->all) ],
-    'Sort preserved from within a subquery',
-  );
-}
+      is ($rs->all, 8, "$test_type: Correct amount of objects from right-sorted joined resultset");
+      is_deeply (
+        [map { $_->owner->name } ($rs->all) ],
+        \@owner_names,
+        "$test_type: Prefetched rows were properly ordered"
+      );
 
-TODO: {
-  local $TODO = "This porbably will never work, but it isn't critical either afaik";
+      my $limited_rs = $rs->search ({}, {rows => 6, offset => 2, unsafe_subselect_ok => 1});
+      is ($limited_rs->count, 6, "$test_type: Correct count of limited right-sorted joined resultset");
+      is ($limited_rs->count_rs->next, 6, "$test_type: Correct count_rs of limited right-sorted joined resultset");
 
-  my $book_owner_ids = $schema->resultset ('BooksInLibrary')
-                               ->search ({}, { join => 'owner', distinct => 1, order_by => 'owner.name', unsafe_subselect_ok => 1 })
-                                ->get_column ('owner');
+      my $queries;
+      $schema->storage->debugcb(sub { $queries++; });
+      $schema->storage->debug(1);
 
-  my $book_owners = $schema->resultset ('Owners')->search ({
-    id => { -in => $book_owner_ids->as_query }
-  });
+      is_deeply (
+        [map { $_->owner->name } ($limited_rs->all) ],
+        [@owner_names[2 .. 7]],
+        "$test_type: Prefetch-limited rows were properly ordered"
+      );
+      is ($queries, 1, "$test_type: Only one query with prefetch");
 
-  is_deeply (
-    [ map { $_->id } ($book_owners->all) ],
-    [ $book_owner_ids->all ],
-    'Sort is preserved across IN subqueries',
-  );
-}
+      $schema->storage->debugcb(undef);
+      $schema->storage->debug(0);
 
-# This is known not to work - thus the negative test
-{
-  my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
-  my $corelated_owners = $owners->result_source->resultset->search (
-    {
-      id => { -in => $owners->get_column('id')->as_query },
-    },
-    {
-      order_by => 'name' #reorder because of what is shown above
-    },
-  );
+      is_deeply (
+        [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
+        [@owner_names[2 .. 7]],
+        "$test_type: Rows are still properly ordered after search_related",
+      );
+    }
 
-  cmp_ok (
-    join ("\x00", map { $_->name } ($corelated_owners->all) ),
-      'ne',
-    join ("\x00", map { $_->name } ($owners->all) ),
-    'Sadly sort not preserved from within a corelated subquery',
-  );
+    # try a ->has_many direction with duplicates
+    my $owners = $schema->resultset ('Owners')->search (
+      {
+        'books.id' => { '!=', undef },
+        'me.name' => { '!=', 'somebogusstring' },
+      },
+      {
+        prefetch => 'books',
+        order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
+        rows     => 3,  # 8 results total
+        unsafe_subselect_ok => 1,
+      },
+    );
 
-  cmp_ok (
-    join ("\x00", sort map { $_->name } ($corelated_owners->all) ),
-      'ne',
-    join ("\x00", sort map { $_->name } ($owners->all) ),
-    'Which in fact gives a completely wrong dataset',
-  );
-}
+    my ($sql, @bind) = @${$owners->page(3)->as_query};
+    is_deeply (
+      \@bind,
+      [ ([ 'me.name' => 'somebogusstring' ], [ test => 'xxx' ]) x 2 ],  # double because of the prefetch subq
+    );
 
+    is ($owners->page(1)->all, 3, "$test_type: has_many prefetch returns correct number of rows");
+    is ($owners->page(1)->count, 3, "$test_type: has-many prefetch returns correct count");
 
-# make sure right-join-side single-prefetch ordering limit works
-{
-  my $rs = $schema->resultset ('BooksInLibrary')->search (
-    {
-      'owner.name' => { '!=', 'woggle' },
-    },
-    {
-      prefetch => 'owner',
-      order_by => 'owner.name',
+    is ($owners->page(3)->count, 2, "$test_type: has-many prefetch returns correct count");
+    TODO: {
+      local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+        if $dialect eq 'Top';
+      is ($owners->page(3)->all, 2, "$test_type: has_many prefetch returns correct number of rows");
+      is ($owners->page(3)->count_rs->next, 2, "$test_type: has-many prefetch returns correct count_rs");
     }
-  );
-  # this is the order in which they should come from the above query
-  my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
 
-  is ($rs->all, 8, 'Correct amount of objects from right-sorted joined resultset');
-  is_deeply (
-    [map { $_->owner->name } ($rs->all) ],
-    \@owner_names,
-    'Rows were properly ordered'
-  );
 
-  my $limited_rs = $rs->search ({}, {rows => 7, offset => 2, unsafe_subselect_ok => 1});
-  is ($limited_rs->count, 6, 'Correct count of limited right-sorted joined resultset');
-  is ($limited_rs->count_rs->next, 6, 'Correct count_rs of limited right-sorted joined resultset');
+    # try a ->belongs_to direction (no select collapse, group_by should work)
+    my $books = $schema->resultset ('BooksInLibrary')->search (
+      {
+        'owner.name' => [qw/wiggle woggle/],
+      },
+      {
+        distinct => 1,
+        having => \['1 = ?', [ test => 1 ] ], #test having propagation
+        prefetch => 'owner',
+        rows     => 2,  # 3 results total
+        order_by => { -desc => 'me.owner' },
+        unsafe_subselect_ok => 1,
+      },
+    );
 
-  my $queries;
-  $schema->storage->debugcb(sub { $queries++; });
-  $schema->storage->debug(1);
+    ($sql, @bind) = @${$books->page(3)->as_query};
+    is_deeply (
+      \@bind,
+      [
+        # inner
+        [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ], [ test => '1' ],
+        # outer
+        [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ],
+      ],
+    );
 
-  is_deeply (
-    [map { $_->owner->name } ($limited_rs->all) ],
-    [@owner_names[2 .. 7]],
-    'Limited rows were properly ordered'
-  );
-  is ($queries, 1, 'Only one query with prefetch');
+    is ($books->page(1)->all, 2, "$test_type: Prefetched grouped search returns correct number of rows");
+    is ($books->page(1)->count, 2, "$test_type: Prefetched grouped search returns correct count");
 
-  $schema->storage->debugcb(undef);
-  $schema->storage->debug(0);
+    is ($books->page(2)->count, 1, "$test_type: Prefetched grouped search returns correct count");
+    TODO: {
+      local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+        if $dialect eq 'Top';
+      is ($books->page(2)->all, 1, "$test_type: Prefetched grouped search returns correct number of rows");
+      is ($books->page(2)->count_rs->next, 1, "$test_type: Prefetched grouped search returns correct count_rs");
+    }
+  }
+}
 
 
-  is_deeply (
-    [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
-    [@owner_names[2 .. 7]],
-    'Rows are still properly ordered after search_related'
+# test GUID columns
+{
+  $schema->storage->dbh_do (sub {
+    my ($storage, $dbh) = @_;
+    eval { $dbh->do("DROP TABLE artist") };
+    $dbh->do(<<'SQL');
+CREATE TABLE artist (
+   artistid UNIQUEIDENTIFIER NOT NULL,
+   name VARCHAR(100),
+   rank INT NOT NULL DEFAULT '13',
+   charfield CHAR(10) NULL,
+   a_guid UNIQUEIDENTIFIER,
+   primary key(artistid)
+)
+SQL
+  });
+
+  # start disconnected to make sure insert works on an un-reblessed storage
+  $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+  my $row;
+  lives_ok {
+    $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
+  } 'created a row with a GUID';
+
+  ok(
+    eval { $row->artistid },
+    'row has GUID PK col populated',
   );
-}
+  diag $@ if $@;
 
+  ok(
+    eval { $row->a_guid },
+    'row has a GUID col with auto_nextval populated',
+  );
+  diag $@ if $@;
 
-#
-# try a prefetch on tables with identically named columns
-#
+  my $row_from_db = $schema->resultset('ArtistGUID')
+    ->search({ name => 'mtfnpy' })->first;
 
-# set quote char - make sure things work while quoted
-$schema->storage->_sql_maker->{quote_char} = [qw/[ ]/];
-$schema->storage->_sql_maker->{name_sep} = '.';
+  is $row_from_db->artistid, $row->artistid,
+    'PK GUID round trip';
 
+  is $row_from_db->a_guid, $row->a_guid,
+    'NON-PK GUID round trip';
+}
+
+# test MONEY type
 {
-  # try a ->has_many direction
-  my $owners = $schema->resultset ('Owners')->search (
-    {
-      'books.id' => { '!=', undef },
-      'me.name' => { '!=', 'somebogusstring' },
-    },
-    {
-      prefetch => 'books',
-      order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
-      rows     => 3,  # 8 results total
-      unsafe_subselect_ok => 1,
-    },
-  );
+  $schema->storage->dbh_do (sub {
+    my ($storage, $dbh) = @_;
+    eval { $dbh->do("DROP TABLE money_test") };
+    $dbh->do(<<'SQL');
+CREATE TABLE money_test (
+   id INT IDENTITY PRIMARY KEY,
+   amount MONEY NULL
+)
+SQL
+  });
 
-  my ($sql, @bind) = @${$owners->page(3)->as_query};
-  is_deeply (
-    \@bind,
-    [ ([ 'me.name' => 'somebogusstring' ], [ test => 'xxx' ]) x 2 ],  # double because of the prefetch subq
-  );
+  my $rs = $schema->resultset('Money');
+  my $row;
 
-  is ($owners->page(1)->all, 3, 'has_many prefetch returns correct number of rows');
-  is ($owners->page(1)->count, 3, 'has-many prefetch returns correct count');
+  lives_ok {
+    $row = $rs->create({ amount => 100 });
+  } 'inserted a money value';
 
-  is ($owners->page(3)->all, 2, 'has_many prefetch returns correct number of rows');
-  is ($owners->page(3)->count, 2, 'has-many prefetch returns correct count');
-  is ($owners->page(3)->count_rs->next, 2, 'has-many prefetch returns correct count_rs');
+  cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
 
+  lives_ok {
+    $row->update({ amount => 200 });
+  } 'updated a money value';
 
-  # try a ->belongs_to direction (no select collapse, group_by should work)
-  my $books = $schema->resultset ('BooksInLibrary')->search (
-    {
-      'owner.name' => [qw/wiggle woggle/],
-    },
-    {
-      distinct => 1,
-      having => \['1 = ?', [ test => 1 ] ], #test having propagation
-      prefetch => 'owner',
-      rows     => 2,  # 3 results total
-      order_by => { -desc => 'me.owner' },
-      unsafe_subselect_ok => 1,
-    },
-  );
+  cmp_ok $rs->find($row->id)->amount, '==', 200,
+    'updated money value round-trip';
 
-  ($sql, @bind) = @${$books->page(3)->as_query};
-  is_deeply (
-    \@bind,
-    [
-      # inner
-      [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ], [ test => '1' ],
-      # outer
-      [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ],
-    ],
-  );
+  lives_ok {
+    $row->update({ amount => undef });
+  } 'updated a money value to NULL';
 
-  is ($books->page(1)->all, 2, 'Prefetched grouped search returns correct number of rows');
-  is ($books->page(1)->count, 2, 'Prefetched grouped search returns correct count');
-
-  is ($books->page(2)->all, 1, 'Prefetched grouped search returns correct number of rows');
-  is ($books->page(2)->count, 1, 'Prefetched grouped search returns correct count');
-  is ($books->page(2)->count_rs->next, 1, 'Prefetched grouped search returns correct count_rs');
+  is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip';
 }
 
+
 done_testing;
 
 # clean up our mess

Modified: DBIx-Class/0.08/branches/pg_cursors/t/74mssql.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/74mssql.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/74mssql.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -172,6 +172,47 @@
 
   is $rs->first, undef, 'rolled back';
   $rs->reset;
+
+  # test RNO detection when version detection fails
+  SKIP: {
+    my $storage = $schema->storage;
+    my $version = $storage->_server_info->{normalized_dbms_version};
+    
+    skip 1, 'could not detect SQL Server version' if not defined $version;
+
+    my $have_rno = $version >= 9 ? 1 : 0;
+
+    # Delete version information to force RNO check when rebuilding SQLA
+    # instance.
+    no strict 'refs';
+    no warnings 'redefine';
+    local *{(ref $storage).'::_get_server_version'} = sub { undef };
+
+    my $server_info = { %{ $storage->_server_info_hash } }; # clone
+
+    delete @$server_info{qw/dbms_version normalized_dbms_version/};
+
+    local $storage->{_server_info_hash} = $server_info;
+    local $storage->{_sql_maker}        = undef;
+    local $storage->{_sql_maker_opts}   = undef;
+
+    $storage->sql_maker;
+
+    my $rno_detected =
+      ($storage->{_sql_maker_opts}{limit_dialect} eq 'RowNumberOver') ? 1 : 0;
+
+    ok (($have_rno == $rno_detected),
+      'row_number() over support detected correctly');
+  }
+
+  {
+    my $schema = DBICTest::Schema->clone;
+    $schema->connection($dsn, $user, $pass);
+
+    like $schema->storage->sql_maker->{limit_dialect},
+      qr/^(?:Top|RowNumberOver)\z/,
+      'sql_maker is correct on unconnected schema';
+  }
 }
 
 # test op-induced autoconnect

Modified: DBIx-Class/0.08/branches/pg_cursors/t/85utf8.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/85utf8.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/85utf8.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -5,6 +5,7 @@
 use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
+use DBIC::DebugObj;
 
 {
   package A::Comp;
@@ -74,24 +75,6 @@
 DBICTest::Schema::CD->utf8_columns('title');
 Class::C3->reinitialize();
 
-{
-  package DBICTest::UTF8::Debugger;
-
-  use base 'DBIx::Class::Storage::Statistics';
-
-  __PACKAGE__->mk_group_accessors(simple => 'call_stack');
-
-  sub query_start {
-    my $self = shift;
-    my $sql = shift;
-
-    my @bind = map { substr $_, 1, -1 } (@_); # undo the effect of _fix_bind_params
-
-    $self->call_stack ( [ @{$self->call_stack || [] }, [$sql, @bind] ] );
-    $self->next::method ($sql, @_);
-  }
-}
-
 # as per http://search.cpan.org/dist/Test-Simple/lib/Test/More.pm#utf8
 binmode (Test::More->builder->$_, ':utf8') for qw/output failure_output todo_output/;
 
@@ -100,16 +83,22 @@
 cmp_ok ($bytestream_title, 'ne', $utf8_title, 'unicode/raw differ (sanity check)');
 
 my $storage = $schema->storage;
-$storage->debugobj (DBICTest::UTF8::Debugger->new);
-$storage->debugobj->silence (1);
+my ($sql, @bind);
+my $debugobj = DBIC::DebugObj->new (\$sql, \@bind);
+my ($orig_debug, $orig_debugobj) = ($storage->debug, $storage->debugobj);
+$storage->debugobj ($debugobj);
 $storage->debug (1);
 
 my $cd = $schema->resultset('CD')->create( { artist => 1, title => $utf8_title, year => '2048' } );
 
-# bind values are always alphabetically ordered by column, thus [2]
+$storage->debugobj ($orig_debugobj);
+$storage->debug ($orig_debug);
+
+# bind values are always alphabetically ordered by column, thus [1]
+# the single quotes are an artefact of the debug-system
 TODO: {
   local $TODO = "This has been broken since rev 1191, Mar 2006";
-  is ($storage->debugobj->call_stack->[-1][2], $bytestream_title, 'INSERT: raw bytes sent to the database');
+  is ($bind[1], "'$bytestream_title'", 'INSERT: raw bytes sent to the database');
 }
 
 # this should be using the cursor directly, no inflation/processing of any sort
@@ -145,8 +134,16 @@
 $bytestream_title = $utf8_title = "something \x{219} else";
 utf8::encode($bytestream_title);
 
+
+$storage->debugobj ($debugobj);
+$storage->debug (1);
+
 $cd->update ({ title => $utf8_title });
-is ($storage->debugobj->call_stack->[-1][1], $bytestream_title, 'UPDATE: raw bytes sent to the database');
+
+$storage->debugobj ($orig_debugobj);
+$storage->debug ($orig_debug);
+
+is ($bind[0], "'$bytestream_title'", 'UPDATE: raw bytes sent to the database');
 ($raw_db_title) = $schema->resultset('CD')
                              ->search ($cd->ident_condition)
                                ->get_column('title')

Modified: DBIx-Class/0.08/branches/pg_cursors/t/90join_torture.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/90join_torture.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/90join_torture.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -146,7 +146,7 @@
             JOIN cd cd ON cd.cdid = me.cd_id
             JOIN artist artist_2 ON artist_2.artistid = cd.artist
           GROUP BY me.cd_id
-        ) count_subq
+        ) me
     )',
     [],
   );

Modified: DBIx-Class/0.08/branches/pg_cursors/t/93single_accessor_object.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/93single_accessor_object.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/93single_accessor_object.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -45,20 +45,20 @@
 $schema = DBICTest->init_schema();
 
 {
-	my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
-	my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982, genreid => undef });
+  my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
+  my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982, genreid => undef });
 
-	ok(!defined($cd->get_column('genreid')), 'genreid is NULL');  #no accessor was defined for this column
-	ok(!defined($cd->genre), 'genre accessor returns undef');
+  ok(!defined($cd->get_column('genreid')), 'genreid is NULL');  #no accessor was defined for this column
+  ok(!defined($cd->genre), 'genre accessor returns undef');
 }
 
 $schema = DBICTest->init_schema();
 
 {
-	my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
-	my $genre = $schema->resultset('Genre')->create({ genreid => 88, name => 'disco' });
-	my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982 });
+  my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
+  my $genre = $schema->resultset('Genre')->create({ genreid => 88, name => 'disco' });
+  my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982 });
 
-	dies_ok { $cd->genre } 'genre accessor throws without column';
+  dies_ok { $cd->genre } 'genre accessor throws without column';
 }
 

Modified: DBIx-Class/0.08/branches/pg_cursors/t/94versioning.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/94versioning.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/94versioning.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -245,6 +245,33 @@
   is($schema_v2->get_db_version(), '3.0', 'Fast deploy/upgrade');
 };
 
+# Check that it Schema::Versioned deals with new/all forms of connect arguments.
+{
+  my $get_db_version_run = 0;
+
+  no warnings qw/once redefine/;
+  local *DBIx::Class::Schema::Versioned::get_db_version = sub {
+    $get_db_version_run = 1;
+    return $_[0]->schema_version;
+  };
+
+  # Make sure the env var isn't whats triggering it
+  local $ENV{DBIC_NO_VERSION_CHECK} = 0;
+
+  DBICVersion::Schema->connect({
+    dsn => $dsn,
+    user => $user, 
+    pass => $pass,
+    ignore_version => 1
+  });
+  
+  ok($get_db_version_run == 0, "attributes pulled from hashref connect_info");
+  $get_db_version_run = 0;
+
+  DBICVersion::Schema->connect( $dsn, $user, $pass, { ignore_version => 1 } );
+  ok($get_db_version_run == 0, "attributes pulled from list connect_info");
+}
+
 unless ($ENV{DBICTEST_KEEP_VERSIONING_DDL}) {
     unlink $_ for (values %$fn);
 }

Modified: DBIx-Class/0.08/branches/pg_cursors/t/bind/order_by.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/bind/order_by.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/bind/order_by.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -3,6 +3,7 @@
 
 use Test::More;
 use Test::Exception;
+use Data::Dumper::Concise;
 use lib qw(t/lib);
 use DBICTest;
 use DBIC::SqlMakerTest;
@@ -25,7 +26,7 @@
             {
                 order_by => $args->{order_by},
                 having =>
-                  [ { read_count => { '>' => 5 } }, \[ 'read_count < ?', 8 ] ]
+                  [ { read_count => { '>' => 5 } }, \[ 'read_count < ?', [ read_count => 8  ] ] ]
             }
           )->as_query,
         "(
@@ -38,14 +39,13 @@
         [
             [qw(foo bar)],
             [qw(read_count 5)],
-            8,
+            [qw(read_count 8)],
             $args->{bind}
               ? @{ $args->{bind} }
               : ()
         ],
-      );
+      ) || diag Dumper $args->{order_by};
     };
-    fail('Fail the unfinished is_same_sql_bind') if $@;
   }
 }
 
@@ -61,46 +61,42 @@
         bind      => [],
     },
     {
-        order_by  => { -desc => \[ 'colA LIKE ?', 'test' ] },
+        order_by  => { -desc => \[ 'colA LIKE ?', [ colA => 'test' ] ] },
         order_req => 'colA LIKE ? DESC',
-        bind      => [qw(test)],
+        bind      => [ [ colA => 'test' ] ],
     },
     {
-        order_by  => \[ 'colA LIKE ? DESC', 'test' ],
+        order_by  => \[ 'colA LIKE ? DESC', [ colA => 'test' ] ],
         order_req => 'colA LIKE ? DESC',
-        bind      => [qw(test)],
+        bind      => [ [ colA => 'test' ] ],
     },
     {
         order_by => [
             { -asc  => \['colA'] },
-            { -desc => \[ 'colB LIKE ?', 'test' ] },
-            { -asc  => \[ 'colC LIKE ?', 'tost' ] }
+            { -desc => \[ 'colB LIKE ?', [ colB => 'test' ] ] },
+            { -asc  => \[ 'colC LIKE ?', [ colC => 'tost' ] ] },
         ],
         order_req => 'colA ASC, colB LIKE ? DESC, colC LIKE ? ASC',
-        bind      => [qw(test tost)],
+        bind      => [ [ colB => 'test' ], [ colC => 'tost' ] ],
     },
-
-    # (mo) this would be really really nice!
-    # (ribasushi) I don't think so, not writing it - patches welcome
     {
+        todo => 1,
         order_by => [
             { -asc  => 'colA' },
             { -desc => { colB => { 'LIKE' => 'test' } } },
             { -asc  => { colC => { 'LIKE' => 'tost' } } }
         ],
         order_req => 'colA ASC, colB LIKE ? DESC, colC LIKE ? ASC',
-        bind      => [ [ colB => 'test' ], [ colC => 'tost' ] ],      # ???
-        todo => 1,
+        bind      => [ [ colB => 'test' ], [ colC => 'tost' ] ],
     },
     {
+        todo => 1,
         order_by  => { -desc => { colA  => { LIKE  => 'test' } } },
         order_req => 'colA LIKE ? DESC',
-        bind      => [qw(test)],
-        todo => 1,
+        bind      => [ [ colA => 'test' ] ],
     },
 );
 
-plan( tests => scalar @tests * 2 );
-
 test_order($_) for @tests;
 
+done_testing;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/cdbi/columns_as_hashes.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/cdbi/columns_as_hashes.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/cdbi/columns_as_hashes.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -26,7 +26,7 @@
         my $rating = $waves->{rating};
         $waves->Rating("PG");
         is $rating, "R", 'evaluation of column value is not deferred';
-    } qr{^Column 'rating' of 'Film/$waves' was fetched as a hash at \Q$0};
+    } qr{^Column 'rating' of 'Film/$waves' was fetched as a hash at\b};
 
     warnings_like {
         is $waves->{title}, $waves->Title, "columns can be accessed as hashes";

Modified: DBIx-Class/0.08/branches/pg_cursors/t/count/count_rs.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/count/count_rs.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/count/count_rs.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -54,7 +54,7 @@
           JOIN cd disc ON disc.cdid = tracks.cd
         WHERE ( ( position = ? OR position = ? ) )
         LIMIT 3 OFFSET 8
-       ) count_subq
+       ) tracks
     )',
     [ [ position => 1 ], [ position => 2 ] ],
     'count_rs db-side limit applied',
@@ -88,7 +88,7 @@
           JOIN artist artist ON artist.artistid = cds.artist
         WHERE tracks.position = ? OR tracks.position = ?
         GROUP BY cds.cdid
-      ) count_subq
+      ) cds
     ',
     [ qw/'1' '2'/ ],
     'count softlimit applied',
@@ -109,7 +109,7 @@
         WHERE tracks.position = ? OR tracks.position = ?
         GROUP BY cds.cdid
         LIMIT 3 OFFSET 4
-      ) count_subq
+      ) cds
     )',
     [ [ 'tracks.position' => 1 ], [ 'tracks.position' => 2 ] ],
     'count_rs db-side limit applied',

Modified: DBIx-Class/0.08/branches/pg_cursors/t/count/prefetch.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/count/prefetch.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/count/prefetch.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -31,7 +31,7 @@
             JOIN artist artist ON artist.artistid = cds.artist
           WHERE tracks.position = ? OR tracks.position = ?
           GROUP BY cds.cdid
-        ) count_subq
+        ) cds
     )',
     [ map { [ 'tracks.position' => $_ ] } (1, 2) ],
   );
@@ -63,7 +63,7 @@
           WHERE ( genre.name = ? )
           GROUP BY genre.genreid
         )
-      count_subq
+      genre
     )',
     [ [ 'genre.name' => 'emo' ] ],
   );

Modified: DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_mssql.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_mssql.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_mssql.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -95,8 +95,8 @@
       ->first
     );
     is( $row->$col, $dt, "$type roundtrip" );
-    
-    is( $row->$col->nanosecond, $sample_dt->{nanosecond},
+
+    cmp_ok( $row->$col->nanosecond, '==', $sample_dt->{nanosecond},
       'DateTime fractional portion roundtrip' )
       if exists $sample_dt->{nanosecond};
   }

Modified: DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_sybase_asa.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_sybase_asa.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/inflate/datetime_sybase_asa.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -3,6 +3,7 @@
 
 use Test::More;
 use Test::Exception;
+use Scope::Guard ();
 use lib qw(t/lib);
 use DBICTest;
 
@@ -27,20 +28,20 @@
   [ $dsn2, $user2, $pass2 ],
 );
 
-my @handles_to_clean;
+my $schema;
 
 foreach my $info (@info) {
   my ($dsn, $user, $pass) = @$info;
 
   next unless $dsn;
 
-  my $schema = DBICTest::Schema->clone;
+  $schema = DBICTest::Schema->clone;
 
   $schema->connection($dsn, $user, $pass, {
     on_connect_call => [ 'datetime_setup' ],
   });
 
-  push @handles_to_clean, $schema->storage->dbh;
+  my $sg = Scope::Guard->new(\&cleanup); 
 
 # coltype, col, date
   my @dt_types = (
@@ -72,7 +73,7 @@
       ->search({ trackid => $row->trackid }, { select => [$col] })
       ->first
     );
-    is( $row->$col, $dt, 'DateTime roundtrip' );
+    is( $row->$col, $dt, "$type roundtrip" );
 
     is $row->$col->nanosecond, $dt->nanosecond,
         'nanoseconds survived' if 0+$dt->nanosecond;
@@ -82,8 +83,8 @@
 done_testing;
 
 # clean up our mess
-END {
-  foreach my $dbh (@handles_to_clean) {
+sub cleanup {
+  if (my $dbh = $schema->storage->dbh) {
     eval { $dbh->do("DROP TABLE $_") } for qw/track/;
   }
 }

Modified: DBIx-Class/0.08/branches/pg_cursors/t/inflate/hri.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/inflate/hri.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/inflate/hri.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -9,14 +9,15 @@
 # Under some versions of SQLite if the $rs is left hanging around it will lock
 # So we create a scope here cos I'm lazy
 {
-    my $rs = $schema->resultset('CD')->search ({}, { order_by => 'cdid' });
+    my $rs = $schema->resultset('CD')->search ({}, {
+        order_by => 'cdid',
+        # use the hashref inflator class as result class
+        result_class => 'DBIx::Class::ResultClass::HashRefInflator',
+    });
 
     # get the defined columns
     my @dbic_cols = sort $rs->result_source->columns;
 
-    # use the hashref inflator class as result class
-    $rs->result_class('DBIx::Class::ResultClass::HashRefInflator');
-
     # fetch first record
     my $datahashref1 = $rs->first;
 
@@ -29,6 +30,16 @@
 
     my $cd2 = $rs->search({ cdid => 1 })->single;
     is_deeply ( $cd2, $datahashref1, 'first/search+single return the same thing');
+
+    $rs->result_class('DBIx::Class::Row');
+
+    is( $rs->result_class, 'DBIx::Class::Row', 'result_class set' );
+
+    is(
+        $rs->search->result_class, 'DBIx::Class::ResultClass::HashRefInflator',
+        'result_class set using accessor does not propagate over search'
+    );
+
 }
 
 sub check_cols_of {

Modified: DBIx-Class/0.08/branches/pg_cursors/t/lib/DBIC/DebugObj.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/lib/DBIC/DebugObj.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/lib/DBIC/DebugObj.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -41,7 +41,7 @@
 
 sub query_end { }
 
-sub txn_start { }
+sub txn_begin { }
 
 sub txn_commit { }
 

Added: DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/Result/D.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/Result/D.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/Result/D.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,5 @@
+package DBICNSTest::Result::D;
+use base qw/DBIx::Class::Core/;
+__PACKAGE__->table('d');
+__PACKAGE__->add_columns('d');
+1;

Added: DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/ResultSet/D.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/ResultSet/D.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICNSTest/ResultSet/D.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,2 @@
+package DBICNSTest::ResultSet::D;
+1;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/AuthorCheck.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/AuthorCheck.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/AuthorCheck.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -43,17 +43,18 @@
     push @fail_reasons, "Missing ./inc directory";
   }
 
-  if (not $mf_mtime) {
+  if(not $mf_mtime) {
     push @fail_reasons, "Missing ./Makefile";
   }
-  elsif($mf_mtime < $mf_pl_mtime) {
-    push @fail_reasons, "./Makefile.PL is newer than ./Makefile";
+  else {
+    if($mf_mtime < $mf_pl_mtime) {
+      push @fail_reasons, "./Makefile.PL is newer than ./Makefile";
+    }
+    if($mf_mtime < $optdeps_mtime) {
+      push @fail_reasons, "./$optdeps is newer than ./Makefile";
+    }
   }
 
-  if ($mf_mtime < $optdeps_mtime) {
-    push @fail_reasons, "./$optdeps is newer than ./Makefile";
-  }
-
   if (@fail_reasons) {
     print STDERR <<'EOE';
 

Modified: DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/Schema/BooksInLibrary.pm
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/Schema/BooksInLibrary.pm	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/lib/DBICTest/Schema/BooksInLibrary.pm	2010-05-31 07:55:05 UTC (rev 9468)
@@ -27,6 +27,8 @@
 );
 __PACKAGE__->set_primary_key('id');
 
+__PACKAGE__->add_unique_constraint (['title']);
+
 __PACKAGE__->resultset_attributes({where => { source => "Library" } });
 
 __PACKAGE__->belongs_to ( owner => 'DBICTest::Schema::Owners', 'owner' );

Modified: DBIx-Class/0.08/branches/pg_cursors/t/prefetch/grouped.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/prefetch/grouped.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/prefetch/grouped.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -76,7 +76,7 @@
           WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
           GROUP BY me.cd
         )
-      count_subq
+      me
     )',
     [ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
     'count() query generated expected SQL',
@@ -151,7 +151,7 @@
           WHERE ( me.cdid IS NOT NULL )
           GROUP BY me.cdid
           LIMIT 2
-        ) count_subq
+        ) me
     )',
     [],
     'count() query generated expected SQL',
@@ -262,7 +262,7 @@
           WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
           GROUP BY SUBSTR(me.cd, 1, 1)
         )
-      count_subq
+      me
     )',
     [ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
     'count() query generated expected SQL',

Modified: DBIx-Class/0.08/branches/pg_cursors/t/resultset/as_subselect_rs.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/resultset/as_subselect_rs.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/resultset/as_subselect_rs.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -22,4 +22,21 @@
    '... and chaining off the virtual view works';
 dies_ok  { $new_rs->as_subselect_rs->search({'artwork_to_artist.artwork_cd_id'=> 1})->count }
    q{... but chaining off of a virtual view using join doesn't work};
+
+my $book_rs = $schema->resultset ('BooksInLibrary')->search ({}, { join => 'owner' });
+
+is_same_sql_bind (
+  $book_rs->as_subselect_rs->as_query,
+  '(SELECT me.id, me.source, me.owner, me.title, me.price 
+      FROM (
+        SELECT me.id, me.source, me.owner, me.title, me.price
+          FROM books me
+          JOIN owners owner ON owner.id = me.owner
+        WHERE ( source = ? )
+      ) me
+  )',
+  [ [ source => 'Library' ] ],
+  'Resultset-class attributes do not seep outside of the subselect',
+);
+
 done_testing;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/resultset/update_delete.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/resultset/update_delete.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/resultset/update_delete.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -6,9 +6,6 @@
 use Test::Exception;
 use DBICTest;
 
-#plan tests => 5;
-plan 'no_plan';
-
 my $schema = DBICTest->init_schema();
 
 my $tkfks = $schema->resultset('FourKeys_to_TwoKeys');
@@ -110,3 +107,10 @@
 $sub_rs->delete;
 
 is ($tkfks->count, $tkfk_cnt -= 2, 'Only two rows deleted');
+
+# make sure limit-only deletion works
+cmp_ok ($tkfk_cnt, '>', 1, 'More than 1 row left');
+$tkfks->search ({}, { rows => 1 })->delete;
+is ($tkfks->count, $tkfk_cnt -= 1, 'Only one row deleted');
+
+done_testing;

Added: DBIx-Class/0.08/branches/pg_cursors/t/row/filter_column.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/row/filter_column.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/row/filter_column.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,142 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+my $from_storage_ran = 0;
+my $to_storage_ran = 0;
+my $schema = DBICTest->init_schema();
+DBICTest::Schema::Artist->load_components(qw(FilterColumn InflateColumn));
+DBICTest::Schema::Artist->filter_column(rank => {
+  filter_from_storage => sub { $from_storage_ran++; $_[1] * 2 },
+  filter_to_storage   => sub { $to_storage_ran++; $_[1] / 2 },
+});
+Class::C3->reinitialize();
+
+my $artist = $schema->resultset('Artist')->create( { rank => 20 } );
+
+# this should be using the cursor directly, no inflation/processing of any sort
+my ($raw_db_rank) = $schema->resultset('Artist')
+                             ->search ($artist->ident_condition)
+                               ->get_column('rank')
+                                ->_resultset
+                                 ->cursor
+                                  ->next;
+
+is ($raw_db_rank, 10, 'INSERT: correctly unfiltered on insertion');
+
+for my $reloaded (0, 1) {
+  my $test = $reloaded ? 'reloaded' : 'stored';
+  $artist->discard_changes if $reloaded;
+
+  is( $artist->rank , 20, "got $test filtered rank" );
+}
+
+$artist->update;
+$artist->discard_changes;
+is( $artist->rank , 20, "got filtered rank" );
+
+$artist->update ({ rank => 40 });
+($raw_db_rank) = $schema->resultset('Artist')
+                             ->search ($artist->ident_condition)
+                               ->get_column('rank')
+                                ->_resultset
+                                 ->cursor
+                                  ->next;
+is ($raw_db_rank, 20, 'UPDATE: correctly unflitered on update');
+
+$artist->discard_changes;
+$artist->rank(40);
+ok( !$artist->is_column_changed('rank'), 'column is not dirty after setting the same value' );
+
+MC: {
+   my $cd = $schema->resultset('CD')->create({
+      artist => { rank => 20 },
+      title => 'fun time city!',
+      year => 'forevertime',
+   });
+   ($raw_db_rank) = $schema->resultset('Artist')
+                                ->search ($cd->artist->ident_condition)
+                                  ->get_column('rank')
+                                   ->_resultset
+                                    ->cursor
+                                     ->next;
+
+   is $raw_db_rank, 10, 'artist rank gets correctly unfiltered w/ MC';
+   is $cd->artist->rank, 20, 'artist rank gets correctly filtered w/ MC';
+}
+
+CACHE_TEST: {
+  my $expected_from = $from_storage_ran;
+  my $expected_to   = $to_storage_ran;
+
+  # ensure we are creating a fresh obj
+  $artist = $schema->resultset('Artist')->single($artist->ident_condition);
+
+  is $from_storage_ran, $expected_from, 'from has not run yet';
+  is $to_storage_ran, $expected_to, 'to has not run yet';
+
+  $artist->rank;
+  cmp_ok (
+    $artist->get_filtered_column('rank'),
+      '!=',
+    $artist->get_column('rank'),
+    'filter/unfilter differ'
+  );
+  is $from_storage_ran, ++$expected_from, 'from ran once, therefor caches';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->rank(6);
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, ++$expected_to,  'to ran once';
+
+  ok ($artist->is_column_changed ('rank'), 'Column marked as dirty');
+
+  $artist->rank;
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->update;
+
+  $artist->set_column(rank => 3);
+  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same set_column value');
+  is ($artist->rank, '6', 'Column set properly (cache blown)');
+  is $from_storage_ran, ++$expected_from, 'from ran once (set_column blew cache)';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->rank(6);
+  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on same accessor-set value');
+  is ($artist->rank, '6', 'Column set properly');
+  is $from_storage_ran, $expected_from, 'from did not run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+
+  $artist->store_column(rank => 4);
+  ok (! $artist->is_column_changed ('rank'), 'Column not marked as dirty on differing store_column value');
+  is ($artist->rank, '8', 'Cache properly blown');
+  is $from_storage_ran, ++$expected_from, 'from did not run';
+  is $to_storage_ran, $expected_to,  'to did not run';
+}
+
+IC_DIE: {
+  dies_ok {
+     DBICTest::Schema::Artist->inflate_column(rank =>
+        { inflate => sub {}, deflate => sub {} }
+     );
+  } q(Can't inflate column after filter column);
+
+  DBICTest::Schema::Artist->inflate_column(name =>
+     { inflate => sub {}, deflate => sub {} }
+  );
+
+  dies_ok {
+     DBICTest::Schema::Artist->filter_column(name => {
+        filter_to_storage => sub {},
+        filter_from_storage => sub {}
+     });
+  } q(Can't filter column after inflate column);
+}
+
+done_testing;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/search/subquery.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/search/subquery.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/search/subquery.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -16,12 +16,12 @@
 my @tests = (
   {
     rs => $cdrs,
-    search => \[ "title = ? AND year LIKE ?", 'buahaha', '20%' ],
+    search => \[ "title = ? AND year LIKE ?", [ title => 'buahaha' ], [ year => '20%' ] ],
     attrs => { rows => 5 },
     sqlbind => \[
       "( SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE (title = ? AND year LIKE ?) LIMIT 5)",
-      'buahaha',
-      '20%',
+      [ title => 'buahaha' ],
+      [ year => '20%' ],
     ],
   },
 
@@ -157,8 +157,6 @@
 );
 
 
-plan tests => @tests * 2;
-
 for my $i (0 .. $#tests) {
   my $t = $tests[$i];
   for my $p (1, 2) {  # repeat everything twice, make sure we do not clobber search arguments
@@ -169,3 +167,5 @@
     );
   }
 }
+
+done_testing;

Added: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/generic_subq.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/generic_subq.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/generic_subq.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,125 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema;
+
+$schema->storage->_sql_maker->limit_dialect ('GenericSubQ');
+
+my $rs = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+columns' => [{ owner_name => 'owner.name' }],
+  join => 'owner',
+  rows => 2,
+  order_by => 'me.title',
+});
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(
+    SELECT  id, source, owner, title, price,
+            owner_name
+      FROM (
+        SELECT  me.id, me.source, me.owner, me.title, me.price,
+                owner.name AS owner_name
+          FROM books me
+          JOIN owners owner ON owner.id = me.owner
+        WHERE ( source = ? )
+        ORDER BY me.title
+      ) me
+    WHERE
+      (
+        SELECT COUNT(*)
+          FROM books rownum__emulation
+        WHERE rownum__emulation.title < me.title
+      ) < 2
+  )',
+  [  [ 'source', 'Library' ] ],
+);
+
+is_deeply (
+  [ $rs->get_column ('title')->all ],
+  ['Best Recipe Cookbook', 'Dynamical Systems'],
+  'Correct columns selected with rows',
+);
+
+$schema->storage->_sql_maker->quote_char ('"');
+$schema->storage->_sql_maker->name_sep ('.');
+
+$rs = $schema->resultset ('BooksInLibrary')->search ({}, {
+  order_by => { -desc => 'title' },
+  '+select' => ['owner.name'],
+  '+as' => ['owner.name'],
+  join => 'owner',
+  rows => 3,
+  offset => 1,
+});
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(
+    SELECT  "id", "source", "owner", "title", "price",
+            "owner__name"
+      FROM (
+        SELECT  "me"."id", "me"."source", "me"."owner", "me"."title", "me"."price",
+                "owner"."name" AS "owner__name"
+          FROM "books" "me"
+          JOIN "owners" "owner" ON "owner"."id" = "me"."owner"
+        WHERE ( "source" = ? )
+        ORDER BY "title" DESC
+      ) "me"
+    WHERE
+      (
+        SELECT COUNT(*)
+          FROM "books" "rownum__emulation"
+        WHERE "rownum__emulation"."title" > "me"."title"
+      ) BETWEEN 1 AND 3
+  )',
+  [ [ 'source', 'Library' ] ],
+);
+
+is_deeply (
+  [ $rs->get_column ('title')->all ],
+  [ 'Dynamical Systems', 'Best Recipe Cookbook' ],
+  'Correct columns selected with rows',
+);
+
+$rs = $schema->resultset ('BooksInLibrary')->search ({}, {
+  order_by => 'title',
+  'select' => ['owner.name'],
+  'as' => ['owner_name'],
+  join => 'owner',
+  offset => 1,
+});
+
+is_same_sql_bind(
+  $rs->as_query,
+  '(
+    SELECT "owner_name"
+      FROM (
+        SELECT "owner"."name" AS "owner_name", "title"
+          FROM "books" "me"
+          JOIN "owners" "owner" ON "owner"."id" = "me"."owner"
+        WHERE ( "source" = ? )
+        ORDER BY "title"
+      ) "me"
+    WHERE
+      (
+        SELECT COUNT(*)
+          FROM "books" "rownum__emulation"
+        WHERE "rownum__emulation"."title" < "me"."title"
+      ) BETWEEN 1 AND 4294967295
+  )',
+  [ [ 'source', 'Library' ] ],
+);
+
+is_deeply (
+  [ $rs->get_column ('owner_name')->all ],
+  [ ('Newton') x 2 ],
+  'Correct columns selected with rows',
+);
+
+done_testing;

Added: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rno.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rno.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rno.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,74 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema;
+
+$schema->storage->_sql_maker->limit_dialect ('RowNumberOver');
+
+my $rs_selectas_col = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+select' => ['owner.name'],
+  '+as' => ['owner.name'],
+  join => 'owner',
+  rows => 1,
+});
+
+is_same_sql_bind(
+  $rs_selectas_col->as_query,
+  '(
+    SELECT  id, source, owner, title, price,
+            owner__name
+      FROM (
+        SELECT  id, source, owner, title, price,
+                owner__name,
+                ROW_NUMBER() OVER( ) AS rno__row__index
+          FROM (
+            SELECT  me.id, me.source, me.owner, me.title, me.price,
+                    owner.name AS owner__name
+              FROM books me
+              JOIN owners owner ON owner.id = me.owner
+            WHERE ( source = ? )
+          ) me
+      ) me
+    WHERE rno__row__index BETWEEN 1 AND 1
+  )',
+  [  [ 'source', 'Library' ] ],
+);
+
+$schema->storage->_sql_maker->quote_char ([qw/ [ ] /]);
+$schema->storage->_sql_maker->name_sep ('.');
+
+my $rs_selectas_rel = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+select' => ['owner.name'],
+  '+as' => ['owner_name'],
+  join => 'owner',
+  rows => 1,
+});
+
+is_same_sql_bind(
+  $rs_selectas_rel->as_query,
+  '(
+    SELECT  [id], [source], [owner], [title], [price],
+            [owner_name]
+      FROM (
+        SELECT  [id], [source], [owner], [title], [price],
+                [owner_name],
+                ROW_NUMBER() OVER( ) AS [rno__row__index]
+          FROM (
+            SELECT  [me].[id], [me].[source], [me].[owner], [me].[title], [me].[price],
+                    [owner].[name] AS [owner_name]
+              FROM [books] [me]
+              JOIN [owners] [owner] ON [owner].[id] = [me].[owner]
+            WHERE ( [source] = ? )
+          ) [me]
+      ) [me]
+    WHERE [rno__row__index] BETWEEN 1 AND 1
+  )',
+  [ [ 'source', 'Library' ] ],
+);
+
+done_testing;

Copied: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rownum.t (from rev 9185, DBIx-Class/0.08/branches/pg_cursors/t/41orrible.t)
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rownum.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/rownum.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,35 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $s = DBICTest->init_schema (no_deploy => 1, );
+$s->storage->sql_maker->limit_dialect ('RowNum');
+
+my $rs = $s->resultset ('CD');
+
+is_same_sql_bind (
+  $rs->search ({}, { rows => 1, offset => 3,columns => [
+      { id => 'foo.id' },
+      { 'bar.id' => 'bar.id' },
+      { bleh => \ 'TO_CHAR (foo.womble, "blah")' },
+    ]})->as_query,
+  '(SELECT id, bar__id, bleh
+      FROM (
+        SELECT id, bar__id, bleh, ROWNUM rownum__index
+          FROM (
+            SELECT foo.id AS id, bar.id AS bar__id, TO_CHAR(foo.womble, "blah") AS bleh
+              FROM cd me
+          ) me
+      ) me
+    WHERE rownum__index BETWEEN 4 AND 4
+  )',
+  [],
+  'Rownum subsel aliasing works correctly'
+);
+
+done_testing;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/toplimit.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/toplimit.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/limit_dialects/toplimit.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -14,139 +14,184 @@
 delete $schema->storage->_sql_maker->{_cached_syntax};
 $schema->storage->_sql_maker->limit_dialect ('Top');
 
-my $rs = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 1, offset => 3 });
+my $books_45_and_owners = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 2, offset => 3 });
 
-sub default_test_order {
-   my $order_by = shift;
-   is_same_sql_bind(
-      $rs->search ({}, {order_by => $order_by})->as_query,
-      "(SELECT
-        TOP 1 me__id, source, owner, title, price, owner__id, name FROM
-         (SELECT
-           TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name
-           FROM books me
-           JOIN owners owner ON
-           owner.id = me.owner
-           WHERE ( source = ? )
-           ORDER BY me__id ASC
-         ) me ORDER BY me__id DESC
-       )",
+for my $null_order (
+  undef,
+  '',
+  {},
+  [],
+  [{}],
+) {
+  my $rs = $books_45_and_owners->search ({}, {order_by => $null_order });
+  is_same_sql_bind(
+      $rs->as_query,
+      '(SELECT TOP 2
+            id, source, owner, title, price, owner__id, owner__name
+          FROM (
+            SELECT TOP 5
+                me.id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name AS owner__name
+              FROM books me
+              JOIN owners owner ON owner.id = me.owner
+            WHERE ( source = ? )
+            ORDER BY me.id
+          ) me
+        ORDER BY me.id DESC
+       )',
     [ [ source => 'Library' ] ],
   );
 }
 
-sub test_order {
-  my $args = shift;
 
-  my $req_order = $args->{order_req}
-    ? "ORDER BY $args->{order_req}"
-    : ''
-  ;
-
-  is_same_sql_bind(
-    $rs->search ({}, {order_by => $args->{order_by}})->as_query,
-    "(SELECT
-      me__id, source, owner, title, price, owner__id, name FROM
-      (SELECT
-        TOP 1 me__id, source, owner, title, price, owner__id, name FROM
-         (SELECT
-           TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name FROM
-           books me
-           JOIN owners owner ON owner.id = me.owner
-           WHERE ( source = ? )
-           ORDER BY $args->{order_inner}
-         ) me ORDER BY $args->{order_outer}
-      ) me $req_order
-    )",
-    [ [ source => 'Library' ] ],
-  );
-}
-
-my @tests = (
+for my $ord_set (
   {
     order_by => \'foo DESC',
-    order_req => 'foo DESC',
     order_inner => 'foo DESC',
-    order_outer => 'foo ASC'
+    order_outer => 'ORDER__BY__1 ASC',
+    order_req => 'ORDER__BY__1 DESC',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
     order_by => { -asc => 'foo'  },
-    order_req => 'foo ASC',
     order_inner => 'foo ASC',
-    order_outer => 'foo DESC',
+    order_outer => 'ORDER__BY__1 DESC',
+    order_req => 'ORDER__BY__1 ASC',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
-    order_by => 'foo',
-    order_req => 'foo',
-    order_inner => 'foo ASC',
-    order_outer => 'foo DESC',
+    order_by => { -desc => 'foo' },
+    order_inner => 'foo DESC',
+    order_outer => 'ORDER__BY__1 ASC',
+    order_req => 'ORDER__BY__1 DESC',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
-    order_by => [ qw{ foo bar}   ],
-    order_req => 'foo, bar',
-    order_inner => 'foo ASC, bar ASC',
-    order_outer => 'foo DESC, bar DESC',
+    order_by => 'foo',
+    order_inner => 'foo',
+    order_outer => 'ORDER__BY__1 DESC',
+    order_req => 'ORDER__BY__1',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
-    order_by => { -desc => 'foo' },
-    order_req => 'foo DESC',
-    order_inner => 'foo DESC',
-    order_outer => 'foo ASC',
+    order_by => [ qw{ foo me.owner}   ],
+    order_inner => 'foo, me.owner',
+    order_outer => 'ORDER__BY__1 DESC, me.owner DESC',
+    order_req => 'ORDER__BY__1, me.owner',
+    exselect_outer => 'ORDER__BY__1',
+    exselect_inner => 'foo AS ORDER__BY__1',
   },
   {
     order_by => ['foo', { -desc => 'bar' } ],
-    order_req => 'foo, bar DESC',
-    order_inner => 'foo ASC, bar DESC',
-    order_outer => 'foo DESC, bar ASC',
+    order_inner => 'foo, bar DESC',
+    order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 ASC',
+    order_req => 'ORDER__BY__1, ORDER__BY__2 DESC',
+    exselect_outer => 'ORDER__BY__1, ORDER__BY__2',
+    exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2',
   },
   {
     order_by => { -asc => [qw{ foo bar }] },
-    order_req => 'foo ASC, bar ASC',
     order_inner => 'foo ASC, bar ASC',
-    order_outer => 'foo DESC, bar DESC',
+    order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 DESC',
+    order_req => 'ORDER__BY__1 ASC, ORDER__BY__2 ASC',
+    exselect_outer => 'ORDER__BY__1, ORDER__BY__2',
+    exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2',
   },
   {
     order_by => [
-      { -asc => 'foo' },
+      'foo',
       { -desc => [qw{bar}] },
-      { -asc  => [qw{hello sensors}]},
+      { -asc  => [qw{me.owner sensors}]},
     ],
-    order_req => 'foo ASC, bar DESC, hello ASC, sensors ASC',
-    order_inner => 'foo ASC, bar DESC, hello ASC, sensors ASC',
-    order_outer => 'foo DESC, bar ASC, hello DESC, sensors DESC',
+    order_inner => 'foo, bar DESC, me.owner ASC, sensors ASC',
+    order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 ASC, me.owner DESC, ORDER__BY__3 DESC',
+    order_req => 'ORDER__BY__1, ORDER__BY__2 DESC, me.owner ASC, ORDER__BY__3 ASC',
+    exselect_outer => 'ORDER__BY__1, ORDER__BY__2, ORDER__BY__3',
+    exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2, sensors AS ORDER__BY__3',
   },
-);
+) {
+  my $o_sel = $ord_set->{exselect_outer}
+    ? ', ' . $ord_set->{exselect_outer}
+    : ''
+  ;
+  my $i_sel = $ord_set->{exselect_inner}
+    ? ', ' . $ord_set->{exselect_inner}
+    : ''
+  ;
 
-my @default_tests = ( undef, '', {}, [] );
+  is_same_sql_bind(
+    $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}})->as_query,
+    "(SELECT TOP 2
+          id, source, owner, title, price, owner__id, owner__name
+        FROM (
+          SELECT TOP 2
+              id, source, owner, title, price, owner__id, owner__name$o_sel
+            FROM (
+              SELECT TOP 5
+                  me.id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name AS owner__name$i_sel
+                FROM books me
+                JOIN owners owner ON owner.id = me.owner
+              WHERE ( source = ? )
+              ORDER BY $ord_set->{order_inner}
+            ) me
+          ORDER BY $ord_set->{order_outer}
+        ) me
+      ORDER BY $ord_set->{order_req}
+    )",
+    [ [ source => 'Library' ] ],
+  );
+}
 
-plan (tests => scalar @tests + scalar @default_tests + 1);
-
-test_order ($_) for @tests;
-default_test_order ($_) for @default_tests;
-
-
+# with groupby
 is_same_sql_bind (
-  $rs->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
-'(SELECT
-me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name FROM
-   ( SELECT
-      id, source, owner, title, price FROM
-      ( SELECT
-         TOP 1 id, source, owner, title, price FROM
-         ( SELECT
-            TOP 4 me.id, me.source, me.owner, me.title, me.price FROM
-            books me  JOIN
-            owners owner ON owner.id = me.owner
-            WHERE ( source = ? )
-            GROUP BY title
-            ORDER BY title ASC
-         ) me
-         ORDER BY title DESC
+  $books_45_and_owners->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
+  '(SELECT me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name
+      FROM (
+        SELECT TOP 2 id, source, owner, title, price
+          FROM (
+            SELECT TOP 2
+                id, source, owner, title, price
+              FROM (
+                SELECT TOP 5
+                    me.id, me.source, me.owner, me.title, me.price
+                  FROM books me
+                  JOIN owners owner ON owner.id = me.owner
+                WHERE ( source = ? )
+                GROUP BY title
+                ORDER BY title
+              ) me
+            ORDER BY title DESC
+          ) me
+        ORDER BY title
       ) me
-      ORDER BY title
-   ) me  JOIN
-   owners owner ON owner.id = me.owner WHERE
-   ( source = ? )
-   ORDER BY title)' ,
+      JOIN owners owner ON owner.id = me.owner
+    WHERE ( source = ? )
+    ORDER BY title
+  )',
   [ [ source => 'Library' ], [ source => 'Library' ] ],
 );
+
+# test deprecated column mixing over join boundaries
+my $rs_selectas_top = $schema->resultset ('BooksInLibrary')->search ({}, {
+  '+select' => ['owner.name'],
+  '+as' => ['owner_name'],
+  join => 'owner',
+  rows => 1 
+});
+
+is_same_sql_bind( $rs_selectas_top->search({})->as_query,
+                  '(SELECT
+                      TOP 1 me.id, me.source, me.owner, me.title, me.price,
+                      owner.name AS owner_name
+                    FROM books me
+                    JOIN owners owner ON owner.id = me.owner
+                    WHERE ( source = ? )
+                    ORDER BY me.id
+                   )',
+                   [ [ 'source', 'Library' ] ],
+                );
+
+done_testing;

Copied: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/oraclejoin.t (from rev 9185, DBIx-Class/0.08/branches/pg_cursors/t/41orrible.t)
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/oraclejoin.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/oraclejoin.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,71 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBIx::Class::SQLAHacks::OracleJoins;
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $sa = new DBIx::Class::SQLAHacks::OracleJoins;
+
+# search with undefined or empty $cond
+
+#  my ($self, $table, $fields, $where, $order, @rest) = @_;
+my ($sql, @bind) = $sa->select(
+    [
+        { me => "cd" },
+        [
+            { "-join_type" => "LEFT", artist => "artist" },
+            { "artist.artistid" => "me.artist" },
+        ],
+    ],
+    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+    undef,
+    undef
+);
+is_same_sql_bind(
+  $sql, \@bind,
+  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( artist.artistid(+) = me.artist )', [],
+  'WhereJoins search with empty where clause'
+);
+
+($sql, @bind) = $sa->select(
+    [
+        { me => "cd" },
+        [
+            { "-join_type" => "", artist => "artist" },
+            { "artist.artistid" => "me.artist" },
+        ],
+    ],
+    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+    { 'artist.artistid' => 3 },
+    undef
+);
+is_same_sql_bind(
+  $sql, \@bind,
+  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid = me.artist ) AND ( artist.artistid = ? ) ) )', [3],
+  'WhereJoins search with where clause'
+);
+
+($sql, @bind) = $sa->select(
+    [
+        { me => "cd" },
+        [
+            { "-join_type" => "LEFT", artist => "artist" },
+            { "artist.artistid" => "me.artist" },
+        ],
+    ],
+    [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+    [{ 'artist.artistid' => 3 }, { 'me.cdid' => 5 }],
+    undef
+);
+is_same_sql_bind(
+  $sql, \@bind,
+  'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid(+) = me.artist ) AND ( ( ( artist.artistid = ? ) OR ( me.cdid = ? ) ) ) ) )', [3, 5],
+  'WhereJoins search with or in where clause'
+);
+
+done_testing;
+

Added: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/order_by_func.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/order_by_func.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/order_by_func.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,35 @@
+use strict;
+use warnings;
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema();
+
+my $rs = $schema->resultset('CD')->search({}, {
+    'join' => 'tracks',
+    order_by => {
+        -desc => {
+            count => 'tracks.track_id',
+        },
+    },
+    distinct => 1,
+    rows => 2,
+    page => 1,
+});
+my $match = q{
+    SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me
+    GROUP BY me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+    ORDER BY COUNT(tracks.trackid) DESC
+};
+
+TODO: {
+    todo_skip 'order_by using function', 2;
+    is_same_sql($rs->as_query, $match, 'order by with func query');
+
+    ok($rs->count == 2, 'amount of rows return in order by func query');
+}
+
+done_testing;

Modified: DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/sql_maker/sql_maker_quote.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/sql_maker/sql_maker_quote.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/sqlahacks/sql_maker/sql_maker_quote.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -48,7 +48,7 @@
             'artist.name' => 'Caterwauler McCrae',
             'me.year' => 2001
           },
-          [],
+          {},
           undef,
           undef
 );
@@ -80,7 +80,7 @@
             'me.year'
           ],
           undef,
-          'year DESC',
+          { order_by => 'year DESC' },
           undef,
           undef
 );
@@ -105,10 +105,10 @@
             'me.year'
           ],
           undef,
-          [
+          { order_by => [
             'year DESC',
             'title ASC'
-          ],
+          ]},
           undef,
           undef
 );
@@ -133,7 +133,7 @@
               'me.year'
             ],
             undef,
-            { -desc => 'year' },
+            { order_by => { -desc => 'year' } },
             undef,
             undef
   );
@@ -158,10 +158,10 @@
               'me.year'
             ],
             undef,
-            [
+            { order_by => [
               { -desc => 'year' },
-              { -asc => 'title' }
-            ],
+              { -asc => 'title' },
+            ]},
             undef,
             undef
   );
@@ -188,7 +188,7 @@
             'me.year'
           ],
           undef,
-          \'year DESC',
+          { order_by => \'year DESC' },
           undef,
           undef
 );
@@ -213,10 +213,10 @@
             'me.year'
           ],
           undef,
-          [
+          { order_by => [
             \'year DESC',
             \'title ASC'
-          ],
+          ]},
           undef,
           undef
 );
@@ -283,9 +283,9 @@
           'me.*'
         ],
         undef,
-        [],
         undef,
-        undef    
+        undef,
+        undef,
   );
 
   is_same_sql_bind(
@@ -328,9 +328,9 @@
             'artist.name' => 'Caterwauler McCrae',
             'me.year' => 2001
           },
-          [],
           undef,
-          undef
+          undef,
+          undef,
 );
 
 is_same_sql_bind(

Modified: DBIx-Class/0.08/branches/pg_cursors/t/storage/dbi_env.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/storage/dbi_env.t	2010-05-31 07:09:30 UTC (rev 9467)
+++ DBIx-Class/0.08/branches/pg_cursors/t/storage/dbi_env.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -70,7 +70,7 @@
 lives_ok { count_sheep($schema) } 'SQLite in DBI_DRIVER';
 isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
 
-undef $ENV{DBI_DRIVER};
+delete $ENV{DBI_DRIVER};
 $ENV{DBI_DSN} = "dbi:SQLite:$dbname";
 $schema = DBICTest::Schema->connect;
 lives_ok { count_sheep($schema) } 'SQLite in DBI_DSN';

Added: DBIx-Class/0.08/branches/pg_cursors/t/storage/deploy.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/storage/deploy.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/storage/deploy.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,32 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+use File::Spec;
+use File::Path qw/ mkpath rmtree /;
+
+
+my $schema = DBICTest->init_schema();
+
+my $var = File::Spec->catfile(qw| t var create_ddl_dir |);
+-d $var
+    or mkpath($var)
+    or die "can't create $var";
+
+my $test_dir_1 =  File::Spec->catdir( $var, 'test1', 'foo', 'bar' );
+rmtree( $test_dir_1 ) if -d $test_dir_1;
+$schema->create_ddl_dir( undef, undef, $test_dir_1 );
+
+ok( -d $test_dir_1, 'create_ddl_dir did a mkpath on its target dir' );
+ok( scalar( glob $test_dir_1.'/*.sql' ), 'there are sql files in there' );
+
+TODO: {
+    local $TODO = 'we should probably add some tests here for actual deployability of the DDL?';
+    ok( 0 );
+}
+
+done_testing;

Added: DBIx-Class/0.08/branches/pg_cursors/t/storage/global_destruction.t
===================================================================
--- DBIx-Class/0.08/branches/pg_cursors/t/storage/global_destruction.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/pg_cursors/t/storage/global_destruction.t	2010-05-31 07:55:05 UTC (rev 9468)
@@ -0,0 +1,57 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBICTest;
+
+for my $type (qw/PG MYSQL/) {
+
+  SKIP: {
+    skip "Skipping $type tests without DBICTEST_${type}_DSN", 1
+      unless $ENV{"DBICTEST_${type}_DSN"};
+
+    my $schema = DBICTest::Schema->connect (@ENV{map { "DBICTEST_${type}_${_}" } qw/DSN USER PASS/});
+
+    # emulate a singleton-factory, just cache the object *somewhere in a different package*
+    # to induce out-of-order destruction
+    $DBICTest::FakeSchemaFactory::schema = $schema;
+
+    # so we can see the retry exceptions (if any)
+    $ENV{DBIC_DBIRETRY_DEBUG} = 1;
+
+    ok (!$schema->storage->connected, "$type: start disconnected");
+
+    lives_ok (sub {
+      $schema->txn_do (sub {
+
+        ok ($schema->storage->connected, "$type: transaction starts connected");
+
+        my $pid = fork();
+        SKIP: {
+          skip "Fork failed: $!", 1 if (! defined $pid);
+
+          if ($pid) {
+            note "Parent $$ sleeping...";
+            wait();
+            note "Parent $$ woken up after child $pid exit";
+          }
+          else {
+            note "Child $$ terminating";
+            exit 0;
+          }
+
+          ok ($schema->storage->connected, "$type: parent still connected (in txn_do)");
+        }
+      });
+    });
+
+    ok ($schema->storage->connected, "$type: parent still connected (outside of txn_do)");
+
+    undef $DBICTest::FakeSchemaFactory::schema;
+  }
+}
+
+done_testing;




More information about the Bast-commits mailing list