[Bast-commits] r7592 - in DBIx-Class/0.08/branches/prefetch: . lib/DBIx lib/DBIx/Class lib/DBIx/Class/InflateColumn lib/DBIx/Class/Manual lib/DBIx/Class/Serialize lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI script t t/cdbi/testlib t/count t/inflate t/lib/DBICTest t/lib/DBICTest/Schema t/prefetch t/resultset t/search t/storage

ribasushi at dev.catalyst.perl.org ribasushi at dev.catalyst.perl.org
Mon Sep 7 07:20:46 GMT 2009


Author: ribasushi
Date: 2009-09-07 07:20:44 +0000 (Mon, 07 Sep 2009)
New Revision: 7592

Added:
   DBIx-Class/0.08/branches/prefetch/.gitignore
   DBIx-Class/0.08/branches/prefetch/t/storage/exception.t
Removed:
   DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/Binary.pm
   DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/PgBase.pm
Modified:
   DBIx-Class/0.08/branches/prefetch/
   DBIx-Class/0.08/branches/prefetch/Changes
   DBIx-Class/0.08/branches/prefetch/MANIFEST.SKIP
   DBIx-Class/0.08/branches/prefetch/Makefile.PL
   DBIx-Class/0.08/branches/prefetch/TODO
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Component.pod
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Cookbook.pod
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/FAQ.pod
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Serialize/Storable.pm
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm
   DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Pg.pm
   DBIx-Class/0.08/branches/prefetch/script/dbicadmin
   DBIx-Class/0.08/branches/prefetch/t/02pod.t
   DBIx-Class/0.08/branches/prefetch/t/03podcoverage.t
   DBIx-Class/0.08/branches/prefetch/t/103many_to_many_warning.t
   DBIx-Class/0.08/branches/prefetch/t/26dumper.t
   DBIx-Class/0.08/branches/prefetch/t/46where_attribute.t
   DBIx-Class/0.08/branches/prefetch/t/60core.t
   DBIx-Class/0.08/branches/prefetch/t/72pg.t
   DBIx-Class/0.08/branches/prefetch/t/746mssql.t
   DBIx-Class/0.08/branches/prefetch/t/76joins.t
   DBIx-Class/0.08/branches/prefetch/t/83cache.t
   DBIx-Class/0.08/branches/prefetch/t/count/grouped_pager.t
   DBIx-Class/0.08/branches/prefetch/t/count/in_subquery.t
   DBIx-Class/0.08/branches/prefetch/t/inflate/serialize.t
   DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/AuthorCheck.pm
   DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/Artist.pm
   DBIx-Class/0.08/branches/prefetch/t/prefetch/attrs_untouched.t
   DBIx-Class/0.08/branches/prefetch/t/prefetch/standard.t
   DBIx-Class/0.08/branches/prefetch/t/resultset/as_query.t
   DBIx-Class/0.08/branches/prefetch/t/search/preserve_original_rs.t
   DBIx-Class/0.08/branches/prefetch/t/search/subquery.t
   DBIx-Class/0.08/branches/prefetch/t/storage/ping_count.t
   DBIx-Class/0.08/branches/prefetch/t/zzzzzzz_perl_perf_bug.t
Log:
 r7506 at Thesaurus (orig r7503):  ribasushi | 2009-09-03 17:16:17 +0200
 Add podcoverage skip
 r7507 at Thesaurus (orig r7504):  ribasushi | 2009-09-03 17:23:19 +0200
 Consolidate _verify_pid calls
 r7511 at Thesaurus (orig r7508):  matthewt | 2009-09-03 20:12:53 +0200
 get the COPYRIGHT in the right pless to not confuse META.yml generation
 r7513 at Thesaurus (orig r7510):  ribasushi | 2009-09-03 20:41:22 +0200
 
 r7514 at Thesaurus (orig r7511):  ribasushi | 2009-09-03 20:41:34 +0200
  r7472 at Thesaurus (orig r7469):  norbi | 2009-09-01 21:43:08 +0200
   r7635 at vger:  mendel | 2009-09-01 21:02:23 +0200
   Added pointer to 'SQL functions on the lhs' to the 'using stored procs' section.
  
 
 r7515 at Thesaurus (orig r7512):  ribasushi | 2009-09-03 20:41:44 +0200
  r7473 at Thesaurus (orig r7470):  norbi | 2009-09-01 21:43:19 +0200
   r7636 at vger:  mendel | 2009-09-01 21:09:43 +0200
   Mentions the possibiliby of creating indexes on SQL function return values.
  
 
 r7516 at Thesaurus (orig r7513):  ribasushi | 2009-09-03 20:41:52 +0200
  r7474 at Thesaurus (orig r7471):  norbi | 2009-09-01 21:43:31 +0200
   r7637 at vger:  mendel | 2009-09-01 21:19:14 +0200
   Rewrote 'SQL functions on the lhs' to use the new SQLA literal SQL + bind feature.
  
 
 r7517 at Thesaurus (orig r7514):  ribasushi | 2009-09-03 20:41:59 +0200
  r7475 at Thesaurus (orig r7472):  norbi | 2009-09-01 21:43:42 +0200
   r7638 at vger:  mendel | 2009-09-01 21:20:17 +0200
   Added a comment to the example code to stress that it does not work.
  
 
 r7518 at Thesaurus (orig r7515):  ribasushi | 2009-09-03 20:42:10 +0200
  r7476 at Thesaurus (orig r7473):  norbi | 2009-09-01 21:43:54 +0200
   r7639 at vger:  mendel | 2009-09-01 21:28:18 +0200
   Added pointer to DBIx::Class::DynamicSubclass.
  
 
 r7519 at Thesaurus (orig r7516):  ribasushi | 2009-09-03 20:42:15 +0200
  r7477 at Thesaurus (orig r7474):  norbi | 2009-09-01 21:44:03 +0200
   r7640 at vger:  mendel | 2009-09-01 21:30:13 +0200
   Replaced deprecated \'colname DESC' order_by syntax with { -desc => 'colname' } syntax.
  
 
 r7520 at Thesaurus (orig r7517):  ribasushi | 2009-09-03 20:42:22 +0200
  r7478 at Thesaurus (orig r7475):  norbi | 2009-09-01 21:44:17 +0200
   r7641 at vger:  mendel | 2009-09-01 21:32:48 +0200
   Rewrote 'SQL functions on the lhs' to use the new SQLA literal SQL + bind feature.
  
 
 r7521 at Thesaurus (orig r7518):  ribasushi | 2009-09-03 20:42:26 +0200
  r7479 at Thesaurus (orig r7476):  norbi | 2009-09-01 21:44:28 +0200
   r7642 at vger:  mendel | 2009-09-01 21:42:25 +0200
   Added many-to-many add_to_*() example to stress that it returns the related row and not the linking table row.
  
 
 r7522 at Thesaurus (orig r7519):  ribasushi | 2009-09-03 20:42:32 +0200
  r7480 at Thesaurus (orig r7477):  norbi | 2009-09-01 22:14:25 +0200
   r7653 at vger:  mendel | 2009-09-01 22:14:11 +0200
   Fixed wrong literal SQL + bind examples (missing operator and placeholders).
  
 
 r7523 at Thesaurus (orig r7520):  ribasushi | 2009-09-03 20:42:37 +0200
  r7481 at Thesaurus (orig r7478):  norbi | 2009-09-01 22:30:48 +0200
   r7655 at vger:  mendel | 2009-09-01 22:30:35 +0200
   Fixed the bind value column names in the SQL literal + bind examples.
  
 
 r7524 at Thesaurus (orig r7521):  ribasushi | 2009-09-03 20:42:45 +0200
  r7482 at Thesaurus (orig r7479):  norbi | 2009-09-01 22:52:21 +0200
   r7657 at vger:  mendel | 2009-09-01 22:52:09 +0200
   Further improvement in the bind value column names in the SQL literal + bind examples.
  
 
 r7549 at Thesaurus (orig r7546):  ribasushi | 2009-09-04 08:47:19 +0200
 Stop connecting to determine dt-parser (test is in pg branch)
 r7553 at Thesaurus (orig r7550):  ribasushi | 2009-09-04 11:20:48 +0200
 Require sqla with bool support
 r7560 at Thesaurus (orig r7557):  ribasushi | 2009-09-04 19:17:32 +0200
 Dumper follies
 r7561 at Thesaurus (orig r7558):  ribasushi | 2009-09-04 19:27:50 +0200
 Even better sqla
 r7570 at Thesaurus (orig r7567):  ribasushi | 2009-09-04 20:49:53 +0200
  r7459 at Thesaurus (orig r7456):  rbuels | 2009-09-01 12:46:46 +0200
  making another pg_unqualified_schema branch, for real this time
  r7460 at Thesaurus (orig r7457):  rbuels | 2009-09-01 12:51:31 +0200
  reworked tests for pg last_insert_id in presence of un-schema-qualified things. adds some todo tests, including a case for which is does not seem to be possible to correctly guess the sequence to use for the liid
  r7461 at Thesaurus (orig r7458):  rbuels | 2009-09-01 12:54:34 +0200
  in Pg storage, added a warning for case when the nextval sequence is not schema qualified
  r7462 at Thesaurus (orig r7459):  rbuels | 2009-09-01 13:01:31 +0200
  tweak to Pg test, warnings_like -> warnings_exist
  r7463 at Thesaurus (orig r7460):  ribasushi | 2009-09-01 13:34:59 +0200
  Rewrap todo properly
  r7490 at Thesaurus (orig r7487):  ribasushi | 2009-09-02 14:16:01 +0200
  Make pg sequence autodetect deterministic (or throw exceptions). Test needs adjusting
  r7491 at Thesaurus (orig r7488):  rbuels | 2009-09-02 19:15:01 +0200
  some reorganization and cleanup of pg-specific tests
  r7492 at Thesaurus (orig r7489):  rbuels | 2009-09-02 20:08:31 +0200
  more cleanup of 72pg.t
  r7495 at Thesaurus (orig r7492):  rbuels | 2009-09-02 20:48:12 +0200
  more cleanup of pg tests, added cascade to drop function, cleaned up create and drop of schemas to use dbh_do
  r7496 at Thesaurus (orig r7493):  rbuels | 2009-09-02 20:50:42 +0200
  oops, missed something screwed up by the pull
  r7525 at Thesaurus (orig r7522):  rbuels | 2009-09-03 20:45:53 +0200
  added __END__ before pod in Pg storage
  r7526 at Thesaurus (orig r7523):  rbuels | 2009-09-03 20:46:00 +0200
  renamed pg test schemas to be more organized
  r7531 at Thesaurus (orig r7528):  rbuels | 2009-09-04 00:28:11 +0200
  more pg test cleanup
  r7532 at Thesaurus (orig r7529):  rbuels | 2009-09-04 00:28:17 +0200
  more pg test cleanup
  r7533 at Thesaurus (orig r7530):  rbuels | 2009-09-04 00:28:25 +0200
  starting work on extended set of Pg auto-pk tests
  r7534 at Thesaurus (orig r7531):  rbuels | 2009-09-04 00:28:31 +0200
  more work on extended set of Pg auto-pk tests
  r7535 at Thesaurus (orig r7532):  rbuels | 2009-09-04 00:28:39 +0200
  more work on pg tests
  r7536 at Thesaurus (orig r7533):  rbuels | 2009-09-04 00:28:45 +0200
  more work on extended set of Pg auto-pk tests
  r7537 at Thesaurus (orig r7534):  rbuels | 2009-09-04 00:28:50 +0200
  added .gitignore for users of git-svn
  r7538 at Thesaurus (orig r7535):  rbuels | 2009-09-04 00:28:58 +0200
  more work on extended set of Pg auto-pk tests
  r7539 at Thesaurus (orig r7536):  rbuels | 2009-09-04 00:29:04 +0200
  added darcs and git to MANIFEST.SKIP version control skipping section
  r7540 at Thesaurus (orig r7537):  rbuels | 2009-09-04 00:41:26 +0200
  more work on extended set of Pg auto-pk tests
  r7541 at Thesaurus (orig r7538):  rbuels | 2009-09-04 00:41:32 +0200
  more work on extended set of Pg auto-pk tests
  r7542 at Thesaurus (orig r7539):  rbuels | 2009-09-04 00:41:38 +0200
  more work on extended set of Pg auto-pk tests
  r7543 at Thesaurus (orig r7540):  rbuels | 2009-09-04 02:20:23 +0200
  more work on extended set of Pg auto-pk tests
  r7544 at Thesaurus (orig r7541):  rbuels | 2009-09-04 02:20:32 +0200
  rewrote autoinc fetcher as a query into the pg_catalog.  all the old tests pass now, but not my new tests.  the new tests might be buggy
  r7545 at Thesaurus (orig r7542):  rbuels | 2009-09-04 02:20:39 +0200
  oops, forgot to put the drop for the extended tests back in the pg tests
  r7546 at Thesaurus (orig r7543):  rbuels | 2009-09-04 02:41:56 +0200
  couple of comment/documentation tweaks to pg storage driver
  r7547 at Thesaurus (orig r7544):  rbuels | 2009-09-04 02:42:02 +0200
  fixed my tests
  r7548 at Thesaurus (orig r7545):  rbuels | 2009-09-04 02:42:09 +0200
  clarified the POD in Pg storage driver regarding multi-schema support
  r7551 at Thesaurus (orig r7548):  ribasushi | 2009-09-04 08:51:30 +0200
  Proper unconnected test
  r7554 at Thesaurus (orig r7551):  ribasushi | 2009-09-04 11:26:12 +0200
  Fixes to pg test after review:
  - Move the store_column test to 60core.t
  - Streamline the select ... for update test
  - Disable all exception warnings for normal test runs
  
  r7555 at Thesaurus (orig r7552):  ribasushi | 2009-09-04 11:56:00 +0200
  Rewrite selector using sqla
  r7562 at Thesaurus (orig r7559):  rbuels | 2009-09-04 19:42:52 +0200
  moved search_path querying function from Pg storage driver into tests
  r7563 at Thesaurus (orig r7560):  rbuels | 2009-09-04 19:43:00 +0200
  refactored how Pg storage driver calls sequence search, made erorror message more informative when query into pg_catalog fails
  r7564 at Thesaurus (orig r7561):  rbuels | 2009-09-04 19:43:08 +0200
  tweaked pg sequence discovery error message a bit more
  r7565 at Thesaurus (orig r7562):  rbuels | 2009-09-04 19:43:17 +0200
  added big block comment explaining Pg sequence discovery strategy
  r7566 at Thesaurus (orig r7563):  rbuels | 2009-09-04 20:35:10 +0200
  added code to use DBD::Pg column_info to fetch column default if recent enough
  r7567 at Thesaurus (orig r7564):  rbuels | 2009-09-04 20:35:18 +0200
  tweaked comment
  r7568 at Thesaurus (orig r7565):  rbuels | 2009-09-04 20:35:30 +0200
  oops, DBD::Pg 2.15.1 should be included in working versions
 
 r7572 at Thesaurus (orig r7569):  ribasushi | 2009-09-04 21:32:01 +0200
 Stop double-caching datetime_parser - keep it in the storage only
 r7573 at Thesaurus (orig r7570):  ribasushi | 2009-09-04 21:36:39 +0200
 No Serialize::Storable in core
 r7574 at Thesaurus (orig r7571):  ribasushi | 2009-09-04 21:49:54 +0200
 Changes
 r7580 at Thesaurus (orig r7577):  ribasushi | 2009-09-06 12:28:44 +0200
 Add mysterious exception test
 r7582 at Thesaurus (orig r7579):  ribasushi | 2009-09-06 15:43:10 +0200
 No connection - no cleanup
 r7583 at Thesaurus (orig r7580):  ribasushi | 2009-09-06 15:45:51 +0200
 Streamline test
 r7584 at Thesaurus (orig r7581):  ribasushi | 2009-09-06 17:39:03 +0200
 Test cleanup:
 Benchmark and Data::Dumper have been in core forever
 Make POD testing conditional as shown in http://use.perl.org/~Alias/journal/38822
 Remove some dead cdbi test files
 Stop openly giving contributors an option to override the authorcheck
 
 r7585 at Thesaurus (orig r7582):  ribasushi | 2009-09-06 17:48:32 +0200
 Done long time ago
 r7586 at Thesaurus (orig r7583):  ribasushi | 2009-09-06 17:56:27 +0200
 Release 0.08110
 r7588 at Thesaurus (orig r7585):  ribasushi | 2009-09-06 18:33:46 +0200
 Stop eating exceptions in ::Storage::DBI::DESTROY
 r7589 at Thesaurus (orig r7586):  ribasushi | 2009-09-06 20:35:30 +0200
 Centralize identity insert control for mssql (it seems that issuing an OFF is not necessary)
 r7590 at Thesaurus (orig r7587):  ribasushi | 2009-09-06 20:45:41 +0200
 Clearer MSSQL error message
 r7591 at Thesaurus (orig r7588):  ribasushi | 2009-09-06 23:58:22 +0200
 Fix mssql pod
 r7592 at Thesaurus (orig r7589):  ribasushi | 2009-09-07 09:06:05 +0200
 Release 0.08111



Property changes on: DBIx-Class/0.08/branches/prefetch
___________________________________________________________________
Name: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7237
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7331
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:5651
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:7500
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7237
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7566
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:5651
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:7589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510

Added: DBIx-Class/0.08/branches/prefetch/.gitignore
===================================================================
--- DBIx-Class/0.08/branches/prefetch/.gitignore	                        (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/.gitignore	2009-09-07 07:20:44 UTC (rev 7592)
@@ -0,0 +1,7 @@
+META.yml
+Makefile
+README
+blib/
+inc/
+pm_to_blib
+t/var/

Modified: DBIx-Class/0.08/branches/prefetch/Changes
===================================================================
--- DBIx-Class/0.08/branches/prefetch/Changes	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/Changes	2009-09-07 07:20:44 UTC (rev 7592)
@@ -1,5 +1,6 @@
 Revision history for DBIx::Class
 
+0.08111 2009-09-06 21:58:00 (UTC)
         - The hashref to connection_info now accepts a 'dbh_maker'
           coderef, allowing better intergration with Catalyst
         - Fixed a complex prefetch + regular join regression introduced
@@ -9,10 +10,14 @@
           cleanup
         - SQLT related fixes:
           - sqlt_type is now called on the correct storage object
-          - hooks can now see the correct producer_type
+          - hooks can now see the correct producer_type (RT#47891)
           - optional SQLT requirements for e.g. deploy() bumped to 0.11002
+        - Really fixed (and greatly cleaned up) postgresql autoinc sequence
+          autodetection
         - Automatically detect MySQL v3 and use INNER JOIN instead of JOIN
-        - POD improvements
+        - POD improvements (including RT#48769)
+        - Test suite tweaks (including fixes for recent CPANTS fails)
+        - Better support for MSSQL IDENTITY_INSERT ON
 
 0.08109 2009-08-18 08:35:00 (UTC)
         - Replication updates:

Modified: DBIx-Class/0.08/branches/prefetch/MANIFEST.SKIP
===================================================================
--- DBIx-Class/0.08/branches/prefetch/MANIFEST.SKIP	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/MANIFEST.SKIP	2009-09-07 07:20:44 UTC (rev 7592)
@@ -6,6 +6,9 @@
 \bCVS\b
 ,v$
 \B\.svn\b
+\B\.git\b
+\B\.gitignore\b
+\b_darcs\b
 
 # Avoid Makemaker generated and utility files.
 \bMakefile$

Modified: DBIx-Class/0.08/branches/prefetch/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/prefetch/Makefile.PL	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/Makefile.PL	2009-09-07 07:20:44 UTC (rev 7592)
@@ -40,7 +40,7 @@
 requires 'Module::Find'             => '0.06';
 requires 'Path::Class'              => '0.16';
 requires 'Scope::Guard'             => '0.03';
-requires 'SQL::Abstract'            => '1.56';
+requires 'SQL::Abstract'            => '1.58';
 requires 'SQL::Abstract::Limit'     => '0.13';
 requires 'Sub::Name'                => '0.04';
 
@@ -61,9 +61,15 @@
   %replication_requires,
 
 #  'Module::Install::Pod::Inherit' => '0.01',
-  'Test::Pod::Coverage'       => '1.04',
   'SQL::Translator'           => $sqlt_recommends,
 
+  # when changing also adjust version in t/02pod.t
+  'Test::Pod'                 => '1.26',
+
+  # when changing also adjust version in t/03podcoverage.t
+  'Test::Pod::Coverage'       => '1.08',
+  'Pod::Coverage'             => '0.20',
+
   # CDBI-compat related
   'DBIx::ContextualFetch'     => '0',
   'Class::DBI::Plugin::DeepAbstractSearch' => '0',

Modified: DBIx-Class/0.08/branches/prefetch/TODO
===================================================================
--- DBIx-Class/0.08/branches/prefetch/TODO	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/TODO	2009-09-07 07:20:44 UTC (rev 7592)
@@ -25,13 +25,6 @@
    __PACKAGE__->table(__PACKAGE__->table()); for the result set to 
    return the correct object type.
 
-2006-03-27 by mst
- Add the ability for deploy to be given a directory and grab <dbname>.sql 
- out of there if available. Try SQL::Translator if not. If none of the above, 
- cry (and die()).  Then you can have a script that pre-gens for all available 
- SQLT modules so an app can do its own deploy without SQLT on the target 
- system
-
 2006-05-25 by mst (TODOed by bluefeet)
  Add the search attributes "limit" and "rows_per_page".
  limit: work as expected just like offset does

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -7,7 +7,6 @@
 use base qw/DBIx::Class/;
 
 __PACKAGE__->load_components(qw/
-  Serialize::Storable
   Relationship
   InflateColumn
   PK::Auto
@@ -35,8 +34,6 @@
 
 =over 4
 
-=item L<DBIx::Class::Serialize::Storable>
-
 =item L<DBIx::Class::InflateColumn>
 
 =item L<DBIx::Class::Relationship>

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -86,8 +86,6 @@
 
 __PACKAGE__->load_components(qw/InflateColumn/);
 
-__PACKAGE__->mk_group_accessors('simple' => '__datetime_parser');
-
 =head2 register_column
 
 Chains with the L<DBIx::Class::Row/register_column> method, and sets
@@ -224,12 +222,7 @@
 }
 
 sub _datetime_parser {
-  my $self = shift;
-  if (my $parser = $self->__datetime_parser) {
-    return $parser;
-  }
-  my $parser = $self->result_source->storage->datetime_parser(@_);
-  return $self->__datetime_parser($parser);
+  shift->result_source->storage->datetime_parser (@_);
 }
 
 1;

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Component.pod
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Component.pod	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Component.pod	2009-09-07 07:20:44 UTC (rev 7592)
@@ -84,6 +84,8 @@
 These components provide extra functionality beyond 
 basic functionality that you can't live without.
 
+L<DBIx::Class::Serialize::Storable> - Hooks for Storable freeze/thaw.
+
 L<DBIx::Class::CDBICompat> - Class::DBI Compatibility layer.
 
 L<DBIx::Class::FormTools> - Build forms with multiple interconnected objects.
@@ -132,8 +134,6 @@
 
 L<DBIx::Class::ResultSourceProxy::Table> - Provides a classdata table object and method proxies.
 
-L<DBIx::Class::Serialize::Storable> - Hooks for Storable freeze/thaw.
-
 L<DBIx::Class::Row> - Basic row methods.
 
 =head1 SEE ALSO

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Cookbook.pod	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/Cookbook.pod	2009-09-07 07:20:44 UTC (rev 7592)
@@ -37,8 +37,11 @@
 
 This results in something like the following C<WHERE> clause:
 
-  WHERE artist LIKE '%Lamb%' AND title LIKE '%Fear of Fours%'
+  WHERE artist LIKE ? AND title LIKE ?
 
+And the following bind values for the placeholders: C<'%Lamb%'>, C<'%Fear of
+Fours%'>.
+
 Other queries might require slightly more complex logic:
 
   my @albums = $schema->resultset('Album')->search({
@@ -244,6 +247,8 @@
   # Or use DBIx::Class::AccessorGroup:
   __PACKAGE__->mk_group_accessors('column' => 'name_length');
 
+See also L</Using SQL functions on the left hand side of a comparison>.
+
 =head2 SELECT DISTINCT with multiple columns
 
   my $rs = $schema->resultset('Artist')->search(
@@ -331,7 +336,7 @@
 The following will B<not> work:
 
   my $rs = $schema->resultset('CD')->search({
-    artist_id => $inside_rs->get_column('id')->as_query,
+    artist_id => $inside_rs->get_column('id')->as_query,  # does NOT work
   });
 
 =head3 Support
@@ -404,8 +409,10 @@
 
 =head2 Using SQL functions on the left hand side of a comparison
 
-Using SQL functions on the left hand side of a comparison is generally
-not a good idea since it requires a scan of the entire table.  However,
+Using SQL functions on the left hand side of a comparison is generally not a
+good idea since it requires a scan of the entire table. (Unless your RDBMS
+supports indexes on expressions - including return values of functions -, and
+you create an index on the return value of the function in question.) However,
 it can be accomplished with C<DBIx::Class> when necessary.
 
 If you do not have quoting on, simply include the function in your search
@@ -413,25 +420,30 @@
 
   $rs->search({ 'YEAR(date_of_birth)' => 1979 });
 
-With quoting on, or for a more portable solution, use the C<where>
-attribute:
+With quoting on, or for a more portable solution, use literal SQL values with
+placeholders:
 
-  $rs->search({}, { where => \'YEAR(date_of_birth) = 1979' });
+  $rs->search(\[ 'YEAR(date_of_birth) = ?', [ plain_value => 1979 ] ]);
 
-=begin hidden
+  # Equivalent SQL:
+  # SELECT * FROM employee WHERE YEAR(date_of_birth) = ?
 
-(When the bind args ordering bug is fixed, this technique will be better
-and can replace the one above.)
+  $rs->search({
+    name => 'Bob',
+    -nest => \[ 'YEAR(date_of_birth) = ?', [ plain_value => 1979 ] ],
+  });
 
-With quoting on, or for a more portable solution, use the C<where> and
-C<bind> attributes:
+  # Equivalent SQL:
+  # SELECT * FROM employee WHERE name = ? AND YEAR(date_of_birth) = ?
 
-  $rs->search({}, {
-      where => \'YEAR(date_of_birth) = ?',
-      bind  => [ 1979 ]
-  });
+Note: the C<plain_value> string in the C<< [ plain_value => 1979 ] >> part
+should be either the same as the name of the column (do this if the type of the
+return value of the function is the same as the type of the column) or
+otherwise it's essentially a dummy string currently (use C<plain_value> as a
+habit). It is used by L<DBIx::Class> to handle special column types.
 
-=end hidden
+See also L<SQL::Abstract/Literal SQL with placeholders and bind values
+(subqueries)>.
 
 =head1 JOINS AND PREFETCHING
 
@@ -922,6 +934,9 @@
     ### The statement below will print
     print "I can do admin stuff\n" if $admin->can('do_admin_stuff');
 
+Alternatively you can use L<DBIx::Class::DynamicSubclass> that implements
+exactly the above functionality.
+
 =head2 Skip row object creation for faster results
 
 DBIx::Class is not built for speed, it's built for convenience and
@@ -1062,7 +1077,7 @@
 To order C<< $book->pages >> by descending page_number, create the relation
 as follows:
 
-  __PACKAGE__->has_many('pages' => 'Page', 'book', { order_by => \'page_number DESC'} );
+  __PACKAGE__->has_many('pages' => 'Page', 'book', { order_by => { -desc => 'page_number'} } );
 
 =head2 Filtering a relationship result set
 
@@ -1104,6 +1119,16 @@
   $rs = $user->addresses(); # get all addresses for a user
   $rs = $address->users(); # get all users for an address
 
+  my $address = $user->add_to_addresses(    # returns a My::Address instance,
+                                            # NOT a My::UserAddress instance!
+    {
+      country => 'United Kingdom',
+      area_code => 'XYZ',
+      town => 'London',
+      street => 'Sesame',
+    }
+  );
+
 =head2 Relationships across DB schemas
 
 Mapping relationships across L<DB schemas|DBIx::Class::Manual::Glossary/DB schema>

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/FAQ.pod
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/FAQ.pod	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Manual/FAQ.pod	2009-09-07 07:20:44 UTC (rev 7592)
@@ -247,17 +247,14 @@
 
 To use an SQL function on the left hand side of a comparison:
 
- ->search({}, { where => \'YEAR(date_of_birth)=1979' });
+ ->search({ -nest => \[ 'YEAR(date_of_birth) = ?', [ plain_value => 1979 ] ] });
 
-=begin hidden
+Note: the C<plain_value> string in the C<< [ plain_value => 1979 ] >> part
+should be either the same as the name of the column (do this if the type of the
+return value of the function is the same as the type of the column) or
+otherwise it's essentially a dummy string currently (use C<plain_value> as a
+habit). It is used by L<DBIx::Class> to handle special column types.
 
-(When the bind arg ordering bug is fixed, the previous example can be
-replaced with the following.)
-
- ->search({}, { where => \'YEAR(date_of_birth)=?', bind => [ 1979 ] });
-
-=end hidden
-
 Or, if you have quoting off:
 
  ->search({ 'YEAR(date_of_birth)' => 1979 });

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Serialize/Storable.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Serialize/Storable.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Serialize/Storable.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -11,11 +11,6 @@
     # reattached in the thaw handler below
     delete $to_serialize->{result_source};
 
-    # If the parser is cached there is a chance that the interpeter
-    # which receives the ice will not have the parser loaded
-    # A re-determination will force an implicit load
-    delete $to_serialize->{__datetime_parser};
-
     # Dynamic values, easy to recalculate
     delete $to_serialize->{$_} for qw/related_resultsets _inflated_column/;
 

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -14,31 +14,36 @@
 
 __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::MSSQL');
 
+sub _set_identity_insert {
+  my ($self, $table) = @_;
+
+  my $sql = sprintf (
+    'SET IDENTITY_INSERT %s ON',
+    $self->sql_maker->_quote ($table),
+  );
+
+  my $dbh = $self->_get_dbh;
+  eval { $dbh->do ($sql) };
+  if ($@) {
+    $self->throw_exception (sprintf "Error executing '%s': %s",
+      $sql,
+      $dbh->errstr,
+    );
+  }
+}
+
 sub insert_bulk {
   my $self = shift;
   my ($source, $cols, $data) = @_;
 
-  my $identity_insert = 0;
-
-  COLUMNS:
-  foreach my $col (@{$cols}) {
-    if ($source->column_info($col)->{is_auto_increment}) {
-      $identity_insert = 1;
-      last COLUMNS;
-    }
+  if (List::Util::first
+      { $source->column_info ($_)->{is_auto_increment} }
+      (@{$cols})
+  ) {
+      $self->_set_identity_insert ($source->name);
   }
 
-  if ($identity_insert) {
-    my $table = $source->from;
-    $self->_get_dbh->do("SET IDENTITY_INSERT $table ON");
-  }
-
   $self->next::method(@_);
-
-  if ($identity_insert) {
-    my $table = $source->from;
-    $self->_get_dbh->do("SET IDENTITY_INSERT $table OFF");
-  }
 }
 
 # support MSSQL GUID column types
@@ -47,7 +52,7 @@
   my $self = shift;
   my ($source, $to_insert) = @_;
 
-  my $updated_cols = {};
+  my $supplied_col_info = $self->_resolve_column_info($source, [keys %$to_insert] );
 
   my %guid_cols;
   my @pk_cols = $source->primary_columns;
@@ -71,11 +76,17 @@
   my @get_guids_for =
     grep { not exists $to_insert->{$_} } (@pk_guids, @auto_guids);
 
+  my $updated_cols = {};
+
   for my $guid_col (@get_guids_for) {
     my ($new_guid) = $self->_get_dbh->selectrow_array('SELECT NEWID()');
     $updated_cols->{$guid_col} = $to_insert->{$guid_col} = $new_guid;
   }
 
+  if (List::Util::first { $_->{is_auto_increment} } (values %$supplied_col_info) ) {
+    $self->_set_identity_insert ($source->name);
+  }
+
   $updated_cols = { %$updated_cols, %{ $self->next::method(@_) } };
 
   return $updated_cols;
@@ -105,14 +116,6 @@
   if ($op eq 'insert') {
     $sql .= ';SELECT SCOPE_IDENTITY()';
 
-    my $col_info = $self->_resolve_column_info($ident, [map $_->[0], @{$bind}]);
-    if (List::Util::first { $_->{is_auto_increment} } (values %$col_info) ) {
-
-      my $table = $ident->from;
-      my $identity_insert_on = "SET IDENTITY_INSERT $table ON";
-      my $identity_insert_off = "SET IDENTITY_INSERT $table OFF";
-      $sql = "$identity_insert_on; $sql; $identity_insert_off";
-    }
   }
 
   return ($sql, $bind);
@@ -218,13 +221,15 @@
 inserts into another table with an identity will give erroneous results on
 recent versions of SQL Server.
 
-=head2 bulk_insert
+=head2 identity insert
 
 Be aware that we have tried to make things as simple as possible for our users.
-For MSSQL that means that when a user tries to do a populate/bulk_insert which
-includes an autoincrementing column, we will try to tell the database to allow
-the insertion of the autoinc column.  But the user must have the db_ddladmin
-role membership, otherwise you will get a fairly opaque error message.
+For MSSQL that means that when a user tries to create a row, while supplying an
+explicit value for an autoincrementing column, we will try to issue the
+appropriate database call to make this possible, namely C<SET IDENTITY_INSERT
+$table_name ON>. Unfortunately this operation in MSSQL requires the
+C<db_ddladmin> privilege, which is normally not included in the standard
+write-permissions.
 
 =head1 AUTHOR
 

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Pg.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Pg.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Pg.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -21,10 +21,12 @@
 
 sub last_insert_id {
   my ($self,$source,$col) = @_;
-  my $seq = ($source->column_info($col)->{sequence} ||= $self->get_autoinc_seq($source,$col));
-  $self->throw_exception("could not fetch primary key for " . $source->name . ", could not "
-    . "get autoinc sequence for $col (check that table and column specifications are correct "
-    . "and in the correct case)") unless defined $seq;
+  my $seq = ( $source->column_info($col)->{sequence} ||= $self->dbh_do('_dbh_get_autoinc_seq', $source, $col) )
+      or $self->throw_exception( "could not determine sequence for "
+                                 . $source->name
+                                 . ".$col, please consider adding a "
+                                 . "schema-qualified sequence to its column info"
+                               );
 
   $self->_dbh_last_insert_id ($self->_dbh, $seq);
 }
@@ -37,76 +39,93 @@
 }
 
 
-sub _get_pg_search_path {
-    my ($self,$dbh) = @_;
-    # cache the search path as ['schema','schema',...] in the storage
-    # obj
-    $self->{_pg_search_path} ||= do {
-        my @search_path;
-        my ($sp_string) = $dbh->selectrow_array('SHOW search_path');
-        while( $sp_string =~ s/("[^"]+"|[^,]+),?// ) {
-            unless( defined $1 and length $1 ) {
-                $self->throw_exception("search path sanity check failed: '$1'")
-            }
-            push @search_path, $1;
-        }
-        \@search_path
-    };
-}
-
 sub _dbh_get_autoinc_seq {
-  my ($self, $dbh, $schema, $table, @pri) = @_;
+  my ($self, $dbh, $source, $col) = @_;
 
-  # get the list of postgres schemas to search.  if we have a schema
-  # specified, use that.  otherwise, use the search path
-  my @search_path;
-  if( defined $schema and length $schema ) {
-      @search_path = ( $schema );
-  } else {
-      @search_path = @{ $self->_get_pg_search_path($dbh) };
+  my $schema;
+  my $table = $source->name;
+
+  # deref table name if it needs it
+  $table = $$table
+      if ref $table eq 'SCALAR';
+
+  # parse out schema name if present
+  if( $table =~ /^(.+)\.(.+)$/ ) {
+    ( $schema, $table ) = ( $1, $2 );
   }
 
-  foreach my $search_schema (@search_path) {
-      foreach my $col (@pri) {
-          my $info = $dbh->column_info(undef,$search_schema,$table,$col)->fetchrow_hashref;
-          if($info) {
-              # if we get here, we have definitely found the right
-              # column.
-              if( defined $info->{COLUMN_DEF} and
-                  $info->{COLUMN_DEF}
-                    =~ /^nextval\(+'([^']+)'::(?:text|regclass)\)/i
-                ) {
-                  my $seq = $1;
-                  return $seq =~ /\./ ? $seq : $info->{TABLE_SCHEM} . "." . $seq;
-              } else {
-                  # we have found the column, but cannot figure out
-                  # the nextval seq
-                  return;
-              }
-          }
-      }
+  # use DBD::Pg to fetch the column info if it is recent enough to
+  # work. otherwise, use custom SQL
+  my $seq_expr =  $DBD::Pg::VERSION >= 2.015001
+      ? eval{ $dbh->column_info(undef,$schema,$table,$col)->fetchrow_hashref->{COLUMN_DEF} }
+      : $self->_dbh_get_column_default( $dbh, $schema, $table, $col );
+
+  # if no default value is set on the column, or if we can't parse the
+  # default value as a sequence, throw.
+  unless ( defined $seq_expr and $seq_expr =~ /^nextval\(+'([^']+)'::(?:text|regclass)\)/i ){
+    $seq_expr = '' unless defined $seq_expr;
+    $schema = "$schema." if defined $schema && length $schema;
+    $self->throw_exception( "no sequence found for $schema$table.$col, check table definition, "
+                            . "or explicitly set the 'sequence' for this column in the "
+                            . $source->source_name
+                            . " class"
+                          );
   }
-  return;
+
+  return $1;
 }
 
-sub get_autoinc_seq {
-  my ($self,$source,$col) = @_;
+# custom method for fetching column default, since column_info has a
+# bug with older versions of DBD::Pg
+sub _dbh_get_column_default {
+  my ( $self, $dbh, $schema, $table, $col ) = @_;
 
-  my @pri = $source->primary_columns;
+  # Build and execute a query into the pg_catalog to find the Pg
+  # expression for the default value for this column in this table.
+  # If the table name is schema-qualified, query using that specific
+  # schema name.
 
-  my $schema;
-  my $table = $source->name;
+  # Otherwise, find the table in the standard Postgres way, using the
+  # search path.  This is done with the pg_catalog.pg_table_is_visible
+  # function, which returns true if a given table is 'visible',
+  # meaning the first table of that name to be found in the search
+  # path.
 
-  if (ref $table eq 'SCALAR') {
-    $table = $$table;
-  }
-  elsif ($table =~ /^(.+)\.(.+)$/) {
-    ($schema, $table) = ($1, $2);
-  }
+  # I *think* we can be assured that this query will always find the
+  # correct column according to standard Postgres semantics.
+  #
+  # -- rbuels
 
-  $self->dbh_do('_dbh_get_autoinc_seq', $schema, $table, @pri);
+  my $sqlmaker = $self->sql_maker;
+  local $sqlmaker->{bindtype} = 'normal';
+
+  my ($where, @bind) = $sqlmaker->where ({
+    'a.attnum' => {'>', 0},
+    'c.relname' => $table,
+    'a.attname' => $col,
+    -not_bool => 'a.attisdropped',
+    (defined $schema && length $schema)
+      ? ( 'n.nspname' => $schema )
+      : ( -bool => \'pg_catalog.pg_table_is_visible(c.oid)' )
+  });
+
+  my ($seq_expr) = $dbh->selectrow_array(<<EOS,undef, at bind);
+
+SELECT
+  (SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid)
+   FROM pg_catalog.pg_attrdef d
+   WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef)
+FROM pg_catalog.pg_class c
+     LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
+     JOIN pg_catalog.pg_attribute a ON a.attrelid = c.oid
+$where
+
+EOS
+
+  return $seq_expr;
 }
 
+
 sub sqlt_type {
   return 'PostgreSQL';
 }
@@ -155,6 +174,8 @@
 
 1;
 
+__END__
+
 =head1 NAME
 
 DBIx::Class::Storage::DBI::Pg - Automatic primary key class for PostgreSQL
@@ -172,14 +193,18 @@
 
 =head1 POSTGRESQL SCHEMA SUPPORT
 
-This supports multiple PostgreSQL schemas, with one caveat: for
-performance reasons, the schema search path is queried the first time it is
-needed and CACHED for subsequent uses.
+This driver supports multiple PostgreSQL schemas, with one caveat: for
+performance reasons, data about the search path, sequence names, and
+so forth is queried as needed and CACHED for subsequent uses.
 
-For this reason, you should do any necessary manipulation of the
-PostgreSQL search path BEFORE instantiating your schema object, or as
-part of the on_connect_do option to connect(), for example:
+For this reason, once your schema is instantiated, you should not
+change the PostgreSQL schema search path for that schema's database
+connection. If you do, Bad Things may happen.
 
+You should do any necessary manipulation of the search path BEFORE
+instantiating your schema object, or as part of the on_connect_do
+option to connect(), for example:
+
    my $schema = My::Schema->connect
                   ( $dsn,$user,$pass,
                     { on_connect_do =>

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -563,7 +563,7 @@
   my $self = shift;
   my $code = shift;
 
-  my $dbh = $self->_dbh;
+  my $dbh = $self->_get_dbh;
 
   return $self->$code($dbh, @_) if $self->{_in_dbh_do}
       || $self->{transaction_depth};
@@ -574,11 +574,6 @@
   my $want_array = wantarray;
 
   eval {
-    $self->_verify_pid if $dbh;
-    if(!$self->_dbh) {
-        $self->_populate_dbh;
-        $dbh = $self->_dbh;
-    }
 
     if($want_array) {
         @result = $self->$code($dbh, @_);
@@ -625,8 +620,7 @@
   my $tried = 0;
   while(1) {
     eval {
-      $self->_verify_pid if $self->_dbh;
-      $self->_populate_dbh if !$self->_dbh;
+      $self->_get_dbh;
 
       $self->txn_begin;
       if($want_array) {
@@ -815,6 +809,7 @@
 # this is the internal "get dbh or connect (don't check)" method
 sub _get_dbh {
   my $self = shift;
+  $self->_verify_pid if $self->_dbh;
   $self->_populate_dbh unless $self->_dbh;
   return $self->_dbh;
 }
@@ -966,7 +961,7 @@
     my @bind = map { [ undef, $_ ] } @do_args;
 
     $self->_query_start($sql, @bind);
-    $self->_dbh->do($sql, $attrs, @do_args);
+    $self->_get_dbh->do($sql, $attrs, @do_args);
     $self->_query_end($sql, @bind);
   }
 
@@ -1364,6 +1359,7 @@
     local $Data::Dumper::Indent = 1;
     local $Data::Dumper::Useqq = 1;
     local $Data::Dumper::Quotekeys = 0;
+    local $Data::Dumper::Sortkeys = 1;
 
     $self->throw_exception(sprintf "%s for populate slice:\n%s",
       $tuple_status->[$i][1],
@@ -2492,7 +2488,6 @@
 sub datetime_parser {
   my $self = shift;
   return $self->{datetime_parser} ||= do {
-    $self->_populate_dbh unless $self->_dbh;
     $self->build_datetime_parser(@_);
   };
 }
@@ -2513,6 +2508,11 @@
 =cut
 
 sub build_datetime_parser {
+  if (not $_[0]->_driver_determined) {
+    $_[0]->_determine_driver;
+    goto $_[0]->can('build_datetime_parser');
+  }
+
   my $self = shift;
   my $type = $self->datetime_parser_type(@_);
   $self->ensure_class_loaded ($type);
@@ -2547,10 +2547,12 @@
 
 sub DESTROY {
   my $self = shift;
+
   $self->_verify_pid if $self->_dbh;
 
   # some databases need this to stop spewing warnings
   if (my $dbh = $self->_dbh) {
+    local $@;
     eval { $dbh->disconnect };
   }
 

Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -25,7 +25,7 @@
 # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
 # brain damage and presumably various other packaging systems too
 
-$VERSION = '0.08109';
+$VERSION = '0.08111';
 
 $VERSION = eval $VERSION; # numify for warning-free dev releases
 
@@ -233,11 +233,6 @@
 L<DBIx::Class::Manual::DocMap> lists each task you might want help on, and
 the modules where you will find documentation.
 
-=head1 COPYRIGHT
-
-Copyright (c) 2005 - 2009 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
-as listed below.
-
 =head1 AUTHOR
 
 mst: Matt S. Trout <mst at shadowcatsystems.co.uk>
@@ -389,6 +384,11 @@
 
 zamolxes: Bogdan Lucaciu <bogdan at wiz.ro>
 
+=head1 COPYRIGHT
+
+Copyright (c) 2005 - 2009 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
+as listed above.
+
 =head1 LICENSE
 
 This library is free software and may be distributed under the same terms

Modified: DBIx-Class/0.08/branches/prefetch/script/dbicadmin
===================================================================
--- DBIx-Class/0.08/branches/prefetch/script/dbicadmin	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/script/dbicadmin	2009-09-07 07:20:44 UTC (rev 7592)
@@ -30,7 +30,7 @@
 }
 
 pod2usage(1) if ($help);
-$ENV{DBIX_CLASS_STORAGE_DBI_DEBUG} = 1 if ($trace);
+$ENV{DBIC_TRACE} = 1 if ($trace);
 
 die('No op specified') if(!$op);
 die('Invalid op') if ($op!~/^insert|update|delete|select$/s);

Modified: DBIx-Class/0.08/branches/prefetch/t/02pod.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/02pod.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/02pod.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -1,6 +1,27 @@
+use warnings;
+use strict;
+
 use Test::More;
+use lib qw(t/lib);
+use DBICTest;
 
-eval "use Test::Pod 1.14";
-plan skip_all => 'Test::Pod 1.14 required' if $@;
+my @MODULES = (
+  'Test::Pod 1.26',
+);
 
+# Don't run tests for installs
+unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
+  plan( skip_all => "Author tests not required for installation" );
+}
+
+# Load the testing modules
+foreach my $MODULE ( @MODULES ) {
+  eval "use $MODULE";
+  if ( $@ ) {
+    $ENV{RELEASE_TESTING}
+    ? die( "Failed to load required release-testing module $MODULE" )
+    : plan( skip_all => "$MODULE not available for testing" );
+  }
+}
+
 all_pod_files_ok();

Modified: DBIx-Class/0.08/branches/prefetch/t/03podcoverage.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/03podcoverage.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/03podcoverage.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -1,14 +1,30 @@
+use warnings;
+use strict;
+
 use Test::More;
 use List::Util ();
+use lib qw(t/lib);
+use DBICTest;
 
-eval "use Pod::Coverage 0.19";
-plan skip_all => 'Pod::Coverage 0.19 required' if $@;
-eval "use Test::Pod::Coverage 1.04";
-plan skip_all => 'Test::Pod::Coverage 1.04 required' if $@;
+my @MODULES = (
+  'Test::Pod::Coverage 1.08',
+  'Pod::Coverage 0.20',
+);
 
-plan skip_all => 'set TEST_POD to enable this test'
-  unless ($ENV{TEST_POD} || -e 'MANIFEST.SKIP');
+# Don't run tests for installs
+unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
+  plan( skip_all => "Author tests not required for installation" );
+}
 
+# Load the testing modules
+foreach my $MODULE ( @MODULES ) {
+  eval "use $MODULE";
+  if ( $@ ) {
+    $ENV{RELEASE_TESTING}
+    ? die( "Failed to load required release-testing module $MODULE" )
+    : plan( skip_all => "$MODULE not available for testing" );
+  }
+}
 
 # Since this is about checking documentation, a little documentation
 # of what this is doing might be in order.
@@ -76,6 +92,7 @@
     'DBIx::Class::ResultSetProxy'                   => { skip => 1 },
     'DBIx::Class::ResultSourceProxy'                => { skip => 1 },
     'DBIx::Class::Storage::Statistics'              => { skip => 1 },
+    'DBIx::Class::Storage::DBI::Replicated::Types'  => { skip => 1 },
 
 # test some specific components whose parents are exempt below
     'DBIx::Class::Storage::DBI::Replicated*'        => {},

Modified: DBIx-Class/0.08/branches/prefetch/t/103many_to_many_warning.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/103many_to_many_warning.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/103many_to_many_warning.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -3,7 +3,6 @@
 use Test::More;
 
 use lib qw(t/lib);
-use Data::Dumper;
 
 plan tests => 4;
 my $exp_warn = qr/The many-to-many relationship 'bars' is trying to create/;

Modified: DBIx-Class/0.08/branches/prefetch/t/26dumper.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/26dumper.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/26dumper.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -6,15 +6,6 @@
 $Data::Dumper::Sortkeys = 1;
 
 use lib qw(t/lib);
-
-BEGIN {
-    eval "use DBD::SQLite";
-    plan $ENV{DATA_DUMPER_TEST}
-        ? ( tests => 2 )
-        : ( skip_all => 'Set $ENV{DATA_DUMPER_TEST} to run this test' );
-}
-
-
 use_ok('DBICTest');
 
 my $schema = DBICTest->init_schema();
@@ -36,4 +27,4 @@
 
 cmp_ok( $rs->count(), '==', 1, "Single record in after death with dumper");
 
-1;
+done_testing;

Modified: DBIx-Class/0.08/branches/prefetch/t/46where_attribute.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/46where_attribute.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/46where_attribute.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -2,7 +2,6 @@
 use warnings;
 
 use Test::More;
-use Data::Dumper;
 use lib qw(t/lib);
 use DBICTest;
 my $schema = DBICTest->init_schema();

Modified: DBIx-Class/0.08/branches/prefetch/t/60core.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/60core.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/60core.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -104,6 +104,13 @@
 
 is($new_again->ID, 'DBICTest::Artist|artist|artistid=4', 'unique object id generated correctly');
 
+# test that store_column is called once for create() for non sequence columns 
+{
+  ok(my $artist = $schema->resultset('Artist')->create({name => 'store_column test'}));
+  is($artist->name, 'X store_column test'); # used to be 'X X store...'
+  $artist->delete;
+}
+
 # Test backwards compatibility
 {
   my $warnings = '';

Modified: DBIx-Class/0.08/branches/prefetch/t/72pg.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/72pg.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/72pg.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -6,150 +6,74 @@
 use lib qw(t/lib);
 use DBICTest;
 
-{
-  package DBICTest::Schema::Casecheck;
 
-  use strict;
-  use warnings;
-  use base 'DBIx::Class';
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
 
-  __PACKAGE__->load_components(qw/Core/);
-  __PACKAGE__->table('testschema.casecheck');
-  __PACKAGE__->add_columns(qw/id name NAME uc_name storecolumn/);
-  __PACKAGE__->column_info_from_storage(1);
-  __PACKAGE__->set_primary_key('id');
+plan skip_all => <<EOM unless $dsn && $user;
+Set \$ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test
+( NOTE: This test drops and creates tables called 'artist', 'casecheck',
+  'array_test' and 'sequence_test' as well as following sequences:
+  'pkid1_seq', 'pkid2_seq' and 'nonpkid_seq''.  as well as following
+  schemas: 'dbic_t_schema', 'dbic_t_schema_2', 'dbic_t_schema_3',
+  'dbic_t_schema_4', and 'dbic_t_schema_5'
+)
+EOM
 
-  sub store_column {
-    my ($self, $name, $value) = @_;
-    $value = '#'.$value if($name eq "storecolumn");
-    $self->maybe::next::method($name, $value);
-  }
-}
+### load any test classes that are defined further down in the file via BEGIN blocks
 
-{
-  package DBICTest::Schema::ArrayTest;
+our @test_classes; #< array that will be pushed into by test classes defined in this file
+DBICTest::Schema->load_classes( map {s/.+:://;$_} @test_classes ) if @test_classes;
 
-  use strict;
-  use warnings;
-  use base 'DBIx::Class';
 
-  __PACKAGE__->load_components(qw/Core/);
-  __PACKAGE__->table('testschema.array_test');
-  __PACKAGE__->add_columns(qw/id arrayfield/);
-  __PACKAGE__->column_info_from_storage(1);
-  __PACKAGE__->set_primary_key('id');
+###  pre-connect tests (keep each test separate as to make sure rebless() runs)
+{
+  my $s = DBICTest::Schema->connect($dsn, $user, $pass);
 
-}
+  ok (!$s->storage->_dbh, 'definitely not connected');
 
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
+  # Check that datetime_parser returns correctly before we explicitly connect.
+  SKIP: {
+      eval { require DateTime::Format::Pg };
+      skip "DateTime::Format::Pg required", 2 if $@;
 
-plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test '.
-  '(note: This test drops and creates tables called \'artist\', \'casecheck\', \'array_test\' and \'sequence_test\''.
-  ' as well as following sequences: \'pkid1_seq\', \'pkid2_seq\' and \'nonpkid_seq\''.
-  ' as well as following schemas: \'testschema\',\'anothertestschema\'!)'
-    unless ($dsn && $user);
+      my $store = ref $s->storage;
+      is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
 
-DBICTest::Schema->load_classes( 'Casecheck', 'ArrayTest' );
+      my $parser = $s->storage->datetime_parser;
+      is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
+  }
 
-# make sure sqlt_type overrides work (::Storage::DBI::Pg does this)
-{
-  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-
-  ok (!$schema->storage->_dbh, 'definitely not connected');
-  is ($schema->storage->sqlt_type, 'PostgreSQL', 'sqlt_type correct pre-connection');
+  ok (!$s->storage->_dbh, 'still not connected');
 }
-
-my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-# Check that datetime_parser returns correctly before we explicitly connect.
-SKIP: {
-    eval { require DateTime::Format::Pg };
-    skip "DateTime::Format::Pg required", 2 if $@;
-
-    my $store = ref $schema->storage;
-    is($store, 'DBIx::Class::Storage::DBI', 'Started with generic storage');
-
-    my $parser = $schema->storage->datetime_parser;
-    is( $parser, 'DateTime::Format::Pg', 'datetime_parser is as expected');
-}
-
-my $dbh = $schema->storage->dbh;
-$schema->source("Artist")->name("testschema.artist");
-$schema->source("SequenceTest")->name("testschema.sequence_test");
 {
-    local $SIG{__WARN__} = sub {};
-    _cleanup ($schema);
-
-    my $artist_table_def = <<EOS;
-(
-  artistid serial PRIMARY KEY
-  , name VARCHAR(100)
-  , rank INTEGER NOT NULL DEFAULT '13'
-  , charfield CHAR(10)
-  , arrayfield INTEGER[]
-)
-EOS
-    $dbh->do("CREATE SCHEMA testschema;");
-    $dbh->do("CREATE TABLE testschema.artist $artist_table_def;");
-    $dbh->do("CREATE TABLE testschema.sequence_test (pkid1 integer, pkid2 integer, nonpkid integer, name VARCHAR(100), CONSTRAINT pk PRIMARY KEY(pkid1, pkid2));");
-    $dbh->do("CREATE SEQUENCE pkid1_seq START 1 MAXVALUE 999999 MINVALUE 0");
-    $dbh->do("CREATE SEQUENCE pkid2_seq START 10 MAXVALUE 999999 MINVALUE 0");
-    $dbh->do("CREATE SEQUENCE nonpkid_seq START 20 MAXVALUE 999999 MINVALUE 0");
-    ok ( $dbh->do('CREATE TABLE testschema.casecheck (id serial PRIMARY KEY, "name" VARCHAR(1), "NAME" VARCHAR(2), "UC_NAME" VARCHAR(3), "storecolumn" VARCHAR(10));'), 'Creation of casecheck table');
-    ok ( $dbh->do('CREATE TABLE testschema.array_test (id serial PRIMARY KEY, arrayfield INTEGER[]);'), 'Creation of array_test table');
-    $dbh->do("CREATE SCHEMA anothertestschema;");
-    $dbh->do("CREATE TABLE anothertestschema.artist $artist_table_def;");
-    $dbh->do("CREATE SCHEMA yetanothertestschema;");
-    $dbh->do("CREATE TABLE yetanothertestschema.artist $artist_table_def;");
-    $dbh->do('set search_path=testschema,public');
+  my $s = DBICTest::Schema->connect($dsn, $user, $pass);
+  # make sure sqlt_type overrides work (::Storage::DBI::Pg does this)
+  ok (!$s->storage->_dbh, 'definitely not connected');
+  is ($s->storage->sqlt_type, 'PostgreSQL', 'sqlt_type correct pre-connection');
+  ok (!$s->storage->_dbh, 'still not connected');
 }
 
-# store_column is called once for create() for non sequence columns
+### connect, create postgres-specific test schema
 
-ok(my $storecolumn = $schema->resultset('Casecheck')->create({'storecolumn' => 'a'}));
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
 
-is($storecolumn->storecolumn, '#a'); # was '##a'
+drop_test_schema($schema);
+create_test_schema($schema);
 
+### begin main tests
 
-# This is in Core now, but it's here just to test that it doesn't break
-$schema->class('Artist')->load_components('PK::Auto');
 
-cmp_ok( $schema->resultset('Artist')->count, '==', 0, 'this should start with an empty artist table');
+# run a BIG bunch of tests for last-insert-id / Auto-PK / sequence
+# discovery
+run_apk_tests($schema); #< older set of auto-pk tests
+run_extended_apk_tests($schema); #< new extended set of auto-pk tests
 
-{ # test that auto-pk also works with the defined search path by
-  # un-schema-qualifying the table name
-  my $artist_name_save = $schema->source("Artist")->name;
-  $schema->source("Artist")->name("artist");
 
-  my $unq_new;
-  lives_ok {
-      $unq_new = $schema->resultset('Artist')->create({ name => 'baz' });
-  } 'insert into unqualified, shadowed table succeeds';
 
-  is($unq_new && $unq_new->artistid, 1, "and got correct artistid");
 
-  #test with anothertestschema
-  $schema->source('Artist')->name('anothertestschema.artist');
-  my $another_new = $schema->resultset('Artist')->create({ name => 'ribasushi'});
-  is( $another_new->artistid,1, 'got correct artistid for yetanotherschema');
 
-  #test with yetanothertestschema
-  $schema->source('Artist')->name('yetanothertestschema.artist');
-  my $yetanother_new = $schema->resultset('Artist')->create({ name => 'ribasushi'});
-  is( $yetanother_new->artistid,1, 'got correct artistid for yetanotherschema');
-  is( $yetanother_new->artistid,1, 'got correct artistid for yetanotherschema');
+### type_info tests
 
-  $schema->source("Artist")->name($artist_name_save);
-}
-
-my $new = $schema->resultset('Artist')->create({ name => 'foo' });
-
-is($new->artistid, 2, "Auto-PK worked");
-
-$new = $schema->resultset('Artist')->create({ name => 'bar' });
-
-is($new->artistid, 3, "Auto-PK worked");
-
-
 my $test_type_info = {
     'artistid' => {
         'data_type' => 'integer',
@@ -183,8 +107,7 @@
     },
 };
 
-
-my $type_info = $schema->storage->columns_info_for('testschema.artist');
+my $type_info = $schema->storage->columns_info_for('dbic_t_schema.artist');
 my $artistid_defval = delete $type_info->{artistid}->{default_value};
 like($artistid_defval,
      qr/^nextval\('([^\.]*\.){0,1}artist_artistid_seq'::(?:text|regclass)\)/,
@@ -192,6 +115,26 @@
 is_deeply($type_info, $test_type_info,
           'columns_info_for - column data types');
 
+
+
+
+####### Array tests
+
+BEGIN {
+  package DBICTest::Schema::ArrayTest;
+  push @main::test_classes, __PACKAGE__;
+
+  use strict;
+  use warnings;
+  use base 'DBIx::Class';
+
+  __PACKAGE__->load_components(qw/Core/);
+  __PACKAGE__->table('dbic_t_schema.array_test');
+  __PACKAGE__->add_columns(qw/id arrayfield/);
+  __PACKAGE__->column_info_from_storage(1);
+  __PACKAGE__->set_primary_key('id');
+
+}
 SKIP: {
   skip "Need DBD::Pg 2.9.2 or newer for array tests", 4 if $DBD::Pg::VERSION < 2.009002;
 
@@ -221,6 +164,24 @@
 }
 
 
+
+########## Case check
+
+BEGIN {
+  package DBICTest::Schema::Casecheck;
+  push @main::test_classes, __PACKAGE__;
+
+  use strict;
+  use warnings;
+  use base 'DBIx::Class';
+
+  __PACKAGE__->load_components(qw/Core/);
+  __PACKAGE__->table('dbic_t_schema.casecheck');
+  __PACKAGE__->add_columns(qw/id name NAME uc_name/);
+  __PACKAGE__->column_info_from_storage(1);
+  __PACKAGE__->set_primary_key('id');
+}
+
 my $name_info = $schema->source('Casecheck')->column_info( 'name' );
 is( $name_info->{size}, 1, "Case sensitive matching info for 'name'" );
 
@@ -230,83 +191,72 @@
 my $uc_name_info = $schema->source('Casecheck')->column_info( 'uc_name' );
 is( $uc_name_info->{size}, 3, "Case insensitive matching info for 'uc_name'" );
 
-# Test SELECT ... FOR UPDATE
-my $HaveSysSigAction = eval "require Sys::SigAction" && !$@;
-if ($HaveSysSigAction) {
-    Sys::SigAction->import( 'set_sig_handler' );
-}
 
+
+
+## Test SELECT ... FOR UPDATE
+
 SKIP: {
-    skip "Sys::SigAction is not available", 3 unless $HaveSysSigAction;
-    # create a new schema
-    my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
-    $schema2->source("Artist")->name("testschema.artist");
+    if(eval "require Sys::SigAction" && !$@) {
+        Sys::SigAction->import( 'set_sig_handler' );
+    }
+    else {
+      skip "Sys::SigAction is not available", 6;
+    }
 
-    $schema->txn_do( sub {
-        my $artist = $schema->resultset('Artist')->search(
-            {
-                artistid => 1
-            },
-            {
-                for => 'update'
-            }
-        )->first;
-        is($artist->artistid, 1, "select for update returns artistid = 1");
+    my ($timed_out, $artist2);
 
-        my $artist_from_schema2;
-        my $error_ok = 0;
-        eval {
-            my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
-            alarm(2);
-            $artist_from_schema2 = $schema2->resultset('Artist')->find(1);
-            $artist_from_schema2->name('fooey');
-            $artist_from_schema2->update;
-            alarm(0);
-        };
-        if (my $e = $@) {
-            $error_ok = $e =~ /DBICTestTimeout/;
-        }
-
+    for my $t (
+      {
         # Make sure that an error was raised, and that the update failed
-        ok($error_ok, "update from second schema times out");
-        ok($artist_from_schema2->is_column_changed('name'), "'name' column is still dirty from second schema");
-    });
-}
+        update_lock => 1,
+        test_sub => sub {
+          ok($timed_out, "update from second schema times out");
+          ok($artist2->is_column_changed('name'), "'name' column is still dirty from second schema");
+        },
+      },
+      {
+        # Make sure that an error was NOT raised, and that the update succeeded
+        update_lock => 0,
+        test_sub => sub {
+          ok(! $timed_out, "update from second schema DOES NOT timeout");
+          ok(! $artist2->is_column_changed('name'), "'name' column is NOT dirty from second schema");
+        },
+      },
+    ) {
+      # create a new schema
+      my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
+      $schema2->source("Artist")->name("dbic_t_schema.artist");
 
-SKIP: {
-    skip "Sys::SigAction is not available", 3 unless $HaveSysSigAction;
-    # create a new schema
-    my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass);
-    $schema2->source("Artist")->name("testschema.artist");
-
-    $schema->txn_do( sub {
+      $schema->txn_do( sub {
         my $artist = $schema->resultset('Artist')->search(
             {
                 artistid => 1
             },
+            $t->{update_lock} ? { for => 'update' } : {}
         )->first;
-        is($artist->artistid, 1, "select for update returns artistid = 1");
+        is($artist->artistid, 1, "select returns artistid = 1");
 
-        my $artist_from_schema2;
-        my $error_ok = 0;
+        $timed_out = 0;
         eval {
             my $h = set_sig_handler( 'ALRM', sub { die "DBICTestTimeout" } );
             alarm(2);
-            $artist_from_schema2 = $schema2->resultset('Artist')->find(1);
-            $artist_from_schema2->name('fooey');
-            $artist_from_schema2->update;
+            $artist2 = $schema2->resultset('Artist')->find(1);
+            $artist2->name('fooey');
+            $artist2->update;
             alarm(0);
         };
-        if (my $e = $@) {
-            $error_ok = $e =~ /DBICTestTimeout/;
-        }
+        $timed_out = $@ =~ /DBICTestTimeout/;
+      });
 
-        # Make sure that an error was NOT raised, and that the update succeeded
-        ok(! $error_ok, "update from second schema DOES NOT timeout");
-        ok(! $artist_from_schema2->is_column_changed('name'), "'name' column is NOT dirty from second schema");
-    });
+      $t->{test_sub}->();
+    }
 }
 
+
+######## other older Auto-pk tests
+
+$schema->source("SequenceTest")->name("dbic_t_schema.sequence_test");
 for (1..5) {
     my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
     is($st->pkid1, $_, "Oracle Auto-PK without trigger: First primary key");
@@ -316,22 +266,404 @@
 my $st = $schema->resultset('SequenceTest')->create({ name => 'foo', pkid1 => 55 });
 is($st->pkid1, 55, "Oracle Auto-PK without trigger: First primary key set manually");
 
-sub _cleanup {
-  my $schema = shift or return;
-  local $SIG{__WARN__} = sub {};
+done_testing;
 
-  for my $stat (
-    'DROP SCHEMA testschema CASCADE',
-    'DROP SCHEMA anothertestschema CASCADE',
-    'DROP SCHEMA yetanothertestschema CASCADE',
-    'DROP SEQUENCE pkid1_seq',
-    'DROP SEQUENCE pkid2_seq',
-    'DROP SEQUENCE nonpkid_seq',
-  ) {
-    eval { $schema->storage->_do_query ($stat) };
-  }
+exit;
+
+END {
+    return unless $schema;
+    drop_test_schema($schema);
+    eapk_drop_all( $schema)
+};
+
+
+######### SUBROUTINES
+
+sub create_test_schema {
+    my $schema = shift;
+    $schema->storage->dbh_do(sub {
+      my (undef,$dbh) = @_;
+
+      local $dbh->{Warn} = 0;
+
+      my $std_artist_table = <<EOS;
+(
+  artistid serial PRIMARY KEY
+  , name VARCHAR(100)
+  , rank INTEGER NOT NULL DEFAULT '13'
+  , charfield CHAR(10)
+  , arrayfield INTEGER[]
+)
+EOS
+
+      $dbh->do("CREATE SCHEMA dbic_t_schema");
+      $dbh->do("CREATE TABLE dbic_t_schema.artist $std_artist_table");
+      $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.sequence_test (
+    pkid1 integer
+    , pkid2 integer
+    , nonpkid integer
+    , name VARCHAR(100)
+    , CONSTRAINT pk PRIMARY KEY(pkid1, pkid2)
+)
+EOS
+      $dbh->do("CREATE SEQUENCE pkid1_seq START 1 MAXVALUE 999999 MINVALUE 0");
+      $dbh->do("CREATE SEQUENCE pkid2_seq START 10 MAXVALUE 999999 MINVALUE 0");
+      $dbh->do("CREATE SEQUENCE nonpkid_seq START 20 MAXVALUE 999999 MINVALUE 0");
+      $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.casecheck (
+    id serial PRIMARY KEY
+    , "name" VARCHAR(1)
+    , "NAME" VARCHAR(2)
+    , "UC_NAME" VARCHAR(3)
+)
+EOS
+      $dbh->do(<<EOS);
+CREATE TABLE dbic_t_schema.array_test (
+    id serial PRIMARY KEY
+    , arrayfield INTEGER[]
+)
+EOS
+      $dbh->do("CREATE SCHEMA dbic_t_schema_2");
+      $dbh->do("CREATE TABLE dbic_t_schema_2.artist $std_artist_table");
+      $dbh->do("CREATE SCHEMA dbic_t_schema_3");
+      $dbh->do("CREATE TABLE dbic_t_schema_3.artist $std_artist_table");
+      $dbh->do('set search_path=dbic_t_schema,public');
+      $dbh->do("CREATE SCHEMA dbic_t_schema_4");
+      $dbh->do("CREATE SCHEMA dbic_t_schema_5");
+      $dbh->do(<<EOS);
+ CREATE TABLE dbic_t_schema_4.artist
+ (
+   artistid integer not null default nextval('artist_artistid_seq'::regclass) PRIMARY KEY
+   , name VARCHAR(100)
+   , rank INTEGER NOT NULL DEFAULT '13'
+   , charfield CHAR(10)
+   , arrayfield INTEGER[]
+ );
+EOS
+      $dbh->do('set search_path=public,dbic_t_schema,dbic_t_schema_3');
+      $dbh->do('create sequence public.artist_artistid_seq'); #< in the public schema
+      $dbh->do(<<EOS);
+ CREATE TABLE dbic_t_schema_5.artist
+ (
+   artistid integer not null default nextval('public.artist_artistid_seq'::regclass) PRIMARY KEY
+   , name VARCHAR(100)
+   , rank INTEGER NOT NULL DEFAULT '13'
+   , charfield CHAR(10)
+   , arrayfield INTEGER[]
+ );
+EOS
+      $dbh->do('set search_path=dbic_t_schema,public');
+  });
 }
 
-done_testing;
 
-END { _cleanup($schema) }
+
+sub drop_test_schema {
+    my ( $schema, $warn_exceptions ) = @_;
+
+    $schema->storage->dbh_do(sub {
+        my (undef,$dbh) = @_;
+
+        local $dbh->{Warn} = 0;
+
+        for my $stat (
+                      'DROP SCHEMA dbic_t_schema_5 CASCADE',
+                      'DROP SEQUENCE public.artist_artistid_seq',
+                      'DROP SCHEMA dbic_t_schema_4 CASCADE',
+                      'DROP SCHEMA dbic_t_schema CASCADE',
+                      'DROP SEQUENCE pkid1_seq',
+                      'DROP SEQUENCE pkid2_seq',
+                      'DROP SEQUENCE nonpkid_seq',
+                      'DROP SCHEMA dbic_t_schema_2 CASCADE',
+                      'DROP SCHEMA dbic_t_schema_3 CASCADE',
+                     ) {
+            eval { $dbh->do ($stat) };
+            diag $@ if $@ && $warn_exceptions;
+        }
+    });
+}
+
+
+###  auto-pk / last_insert_id / sequence discovery
+sub run_apk_tests {
+    my $schema = shift;
+
+    # This is in Core now, but it's here just to test that it doesn't break
+    $schema->class('Artist')->load_components('PK::Auto');
+    cmp_ok( $schema->resultset('Artist')->count, '==', 0, 'this should start with an empty artist table');
+
+    # test that auto-pk also works with the defined search path by
+    # un-schema-qualifying the table name
+    apk_t_set($schema,'artist');
+
+    my $unq_new;
+    lives_ok {
+        $unq_new = $schema->resultset('Artist')->create({ name => 'baz' });
+    } 'insert into unqualified, shadowed table succeeds';
+
+    is($unq_new && $unq_new->artistid, 1, "and got correct artistid");
+
+    my @test_schemas = ( [qw| dbic_t_schema_2    1  |],
+                         [qw| dbic_t_schema_3    1  |],
+                         [qw| dbic_t_schema_4    2  |],
+                         [qw| dbic_t_schema_5    1  |],
+                       );
+    foreach my $t ( @test_schemas ) {
+        my ($sch_name, $start_num) = @$t;
+        #test with dbic_t_schema_2
+        apk_t_set($schema,"$sch_name.artist");
+        my $another_new;
+        lives_ok {
+            $another_new = $schema->resultset('Artist')->create({ name => 'Tollbooth Willy'});
+            is( $another_new->artistid,$start_num, "got correct artistid for $sch_name")
+                or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+        } "$sch_name liid 1 did not die"
+            or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+        lives_ok {
+            $another_new = $schema->resultset('Artist')->create({ name => 'Adam Sandler'});
+            is( $another_new->artistid,$start_num+1, "got correct artistid for $sch_name")
+                or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+        } "$sch_name liid 2 did not die"
+            or diag "USED SEQUENCE: ".($schema->source('Artist')->column_info('artistid')->{sequence} || '<none>');
+
+    }
+
+    lives_ok {
+        apk_t_set($schema,'dbic_t_schema.artist');
+        my $new = $schema->resultset('Artist')->create({ name => 'foo' });
+        is($new->artistid, 4, "Auto-PK worked");
+        $new = $schema->resultset('Artist')->create({ name => 'bar' });
+        is($new->artistid, 5, "Auto-PK worked");
+    } 'old auto-pk tests did not die either';
+}
+
+# sets the artist table name and clears sequence name cache
+sub apk_t_set {
+    my ( $s, $n ) = @_;
+    $s->source("Artist")->name($n);
+    $s->source('Artist')->column_info('artistid')->{sequence} = undef; #< clear sequence name cache
+}
+
+
+######## EXTENDED AUTO-PK TESTS
+
+my @eapk_id_columns;
+BEGIN {
+  package DBICTest::Schema::ExtAPK;
+  push @main::test_classes, __PACKAGE__;
+
+  use strict;
+  use warnings;
+  use base 'DBIx::Class';
+
+  __PACKAGE__->load_components(qw/Core/);
+  __PACKAGE__->table('apk');
+
+  @eapk_id_columns = qw( id1 id2 id3 id4 );
+  __PACKAGE__->add_columns(
+    map { $_ => { data_type => 'integer', is_auto_increment => 1 } }
+       @eapk_id_columns
+  );
+
+  __PACKAGE__->set_primary_key('id2'); #< note the SECOND column is
+                                       #the primary key
+}
+
+my @eapk_schemas;
+BEGIN{ @eapk_schemas = map "dbic_apk_$_", 0..5 }
+
+sub run_extended_apk_tests {
+  my $schema = shift;
+
+  #save the search path and reset it at the end
+  my $search_path_save = eapk_get_search_path($schema);
+
+  eapk_drop_all($schema);
+
+  # make the test schemas and sequences
+  $schema->storage->dbh_do(sub {
+    my ( undef, $dbh ) = @_;
+
+    $dbh->do("CREATE SCHEMA $_")
+        for @eapk_schemas;
+
+    $dbh->do("CREATE SEQUENCE $eapk_schemas[5].fooseq");
+    $dbh->do("CREATE SEQUENCE $eapk_schemas[4].fooseq");
+    $dbh->do("CREATE SEQUENCE $eapk_schemas[3].fooseq");
+
+    $dbh->do("SET search_path = ".join ',', @eapk_schemas );
+  });
+
+  # clear our search_path cache
+  $schema->storage->{_pg_search_path} = undef;
+
+  eapk_create( $schema,
+               with_search_path => [0,1],
+             );
+  eapk_create( $schema,
+               with_search_path => [1,0,'public'],
+               nextval => "$eapk_schemas[5].fooseq",
+             );
+  eapk_create( $schema,
+               with_search_path => ['public',0,1],
+               qualify_table => 2,
+             );
+  eapk_create( $schema,
+               with_search_path => [3,1,0,'public'],
+               nextval => "$eapk_schemas[4].fooseq",
+             );
+  eapk_create( $schema,
+               with_search_path => [3,1,0,'public'],
+               nextval => "$eapk_schemas[3].fooseq",
+               qualify_table => 4,
+             );
+
+  eapk_poke( $schema, 0 );
+  eapk_poke( $schema, 2 );
+  eapk_poke( $schema, 4 );
+  eapk_poke( $schema, 1 );
+  eapk_poke( $schema, 0 );
+  eapk_poke( $schema, 1 );
+  eapk_poke( $schema, 4 );
+  eapk_poke( $schema, 3 );
+  eapk_poke( $schema, 1 );
+  eapk_poke( $schema, 2 );
+  eapk_poke( $schema, 0 );
+
+  # set our search path back
+  eapk_set_search_path( $schema, @$search_path_save );
+}
+
+# do a DBIC create on the apk table in the given schema number (which is an
+# index of @eapk_schemas)
+
+my %seqs; #< sanity-check hash of schema.table.col => currval of its sequence
+
+sub eapk_poke {
+  my ($s, $schema_num) = @_;
+
+  my $schema_name = defined $schema_num
+      ? $eapk_schemas[$schema_num]
+      : '';
+
+  my $schema_name_actual = $schema_name || eapk_get_search_path($s)->[0];
+
+  $s->source('ExtAPK')->name($schema_name ? $schema_name.'.apk' : 'apk');
+  #< clear sequence name cache
+  $s->source('ExtAPK')->column_info($_)->{sequence} = undef
+      for @eapk_id_columns;
+
+  no warnings 'uninitialized';
+  lives_ok {
+    my $new;
+    for my $inc (1,2,3) {
+      $new = $schema->resultset('ExtAPK')->create({});
+      my $proper_seqval = ++$seqs{"$schema_name_actual.apk.id2"};
+      is( $new->id2, $proper_seqval, "$schema_name_actual.apk.id2 correct inc $inc" )
+          or eapk_seq_diag($s,$schema_name);
+      $new->discard_changes;
+      for my $id (grep $_ ne 'id2', @eapk_id_columns) {
+        my $proper_seqval = ++$seqs{"$schema_name_actual.apk.$id"};
+        is( $new->$id, $proper_seqval, "$schema_name_actual.apk.$id correct inc $inc" )
+            or eapk_seq_diag($s,$schema_name);
+      }
+    }
+  } "create in schema '$schema_name' lives"
+      or eapk_seq_diag($s,$schema_name);
+}
+
+# print diagnostic info on which sequences were found in the ExtAPK
+# class
+sub eapk_seq_diag {
+    my $s = shift;
+    my $schema = shift || eapk_get_search_path($s)->[0];
+
+    diag "$schema.apk sequences: ",
+        join(', ',
+             map "$_:".($s->source('ExtAPK')->column_info($_)->{sequence} || '<none>'),
+             @eapk_id_columns
+            );
+}
+
+# get the postgres search path as an arrayref
+sub eapk_get_search_path {
+    my ( $s ) = @_;
+    # cache the search path as ['schema','schema',...] in the storage
+    # obj
+
+    return $s->storage->dbh_do(sub {
+        my (undef, $dbh) = @_;
+        my @search_path;
+        my ($sp_string) = $dbh->selectrow_array('SHOW search_path');
+        while ( $sp_string =~ s/("[^"]+"|[^,]+),?// ) {
+            unless( defined $1 and length $1 ) {
+                die "search path sanity check failed: '$1'";
+            }
+            push @search_path, $1;
+        }
+        \@search_path
+    });
+}
+sub eapk_set_search_path {
+    my ($s, at sp) = @_;
+    my $sp = join ',', at sp;
+    $s->storage->dbh_do( sub { $_[1]->do("SET search_path = $sp") } );
+}
+
+# create the apk table in the given schema, can set whether the table name is qualified, what the nextval is for the second ID
+sub eapk_create {
+    my ($schema, %a) = @_;
+
+    $schema->storage->dbh_do(sub {
+        my (undef,$dbh) = @_;
+
+        my $searchpath_save;
+        if ( $a{with_search_path} ) {
+            ($searchpath_save) = $dbh->selectrow_array('SHOW search_path');
+
+            my $search_path = join ',',map {/\D/ ? $_ : $eapk_schemas[$_]} @{$a{with_search_path}};
+
+            $dbh->do("SET search_path = $search_path");
+        }
+
+        my $table_name = $a{qualify_table}
+            ? ($eapk_schemas[$a{qualify_table}] || die). ".apk"
+            : 'apk';
+        local $_[1]->{Warn} = 0;
+
+        my $id_def = $a{nextval}
+            ? "integer primary key not null default nextval('$a{nextval}'::regclass)"
+            : 'serial primary key';
+        $dbh->do(<<EOS);
+CREATE TABLE $table_name (
+  id1 serial
+  , id2 $id_def
+  , id3 serial
+  , id4 serial
+)
+EOS
+
+        if( $searchpath_save ) {
+            $dbh->do("SET search_path = $searchpath_save");
+        }
+    });
+}
+
+sub eapk_drop_all {
+    my ( $schema, $warn_exceptions ) = @_;
+
+    $schema->storage->dbh_do(sub {
+        my (undef,$dbh) = @_;
+
+        local $dbh->{Warn} = 0;
+
+        # drop the test schemas
+        for (@eapk_schemas ) {
+            eval{ $dbh->do("DROP SCHEMA $_ CASCADE") };
+            diag $@ if $@ && $warn_exceptions;
+        }
+
+
+    });
+}

Modified: DBIx-Class/0.08/branches/prefetch/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/746mssql.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/746mssql.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -12,8 +12,6 @@
 plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test'
   unless ($dsn && $user);
 
-plan tests => 39;
-
 DBICTest::Schema->load_classes('ArtistGUID');
 my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
 
@@ -220,6 +218,19 @@
   ]);
 }, 'populate with PKs supplied ok' );
 
+lives_ok (sub {
+  # start a new connection, make sure rebless works
+  # test an insert with a supplied identity, followed by one without
+  my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+  for (1..2) {
+    my $id = $_ * 20 ;
+    $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
+    $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
+  }
+}, 'create with/without PKs ok' );
+
+is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
+
 lives_ok ( sub {
   # start a new connection, make sure rebless works
   my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
@@ -329,9 +340,10 @@
       ],
     );
   }
-
 }
 
+done_testing;
+
 # clean up our mess
 END {
   if (my $dbh = eval { $schema->storage->_dbh }) {

Modified: DBIx-Class/0.08/branches/prefetch/t/76joins.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/76joins.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/76joins.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -4,7 +4,6 @@
 use Test::More;
 use lib qw(t/lib);
 use DBICTest;
-use Data::Dumper;
 use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();

Modified: DBIx-Class/0.08/branches/prefetch/t/83cache.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/83cache.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/83cache.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -74,8 +74,6 @@
   }
 );
 
-use Data::Dumper; $Data::Dumper::Deparse = 1;
-
 # start test for prefetch SELECT count
 $queries = 0;
 $schema->storage->debug(1);

Deleted: DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/Binary.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/Binary.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/Binary.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -1,16 +0,0 @@
-package # hide from PAUSE
-    Binary;
-
-use strict;
-use base 'PgBase';
-
-__PACKAGE__->table(cdbibintest => 'cdbibintest');
-__PACKAGE__->sequence('binseq');
-__PACKAGE__->columns(All => qw(id bin));
-
-# __PACKAGE__->data_type(bin => DBI::SQL_BINARY);
-
-sub schema { "id INTEGER, bin BYTEA" }
-
-1;
-

Deleted: DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/PgBase.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/PgBase.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/cdbi/testlib/PgBase.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -1,23 +0,0 @@
-package # hide from PAUSE 
-    PgBase;
-
-use strict;
-use base 'DBIx::Class::CDBICompat';
-
-my $db   = $ENV{DBD_PG_DBNAME} || 'template1';
-my $user = $ENV{DBD_PG_USER}   || 'postgres';
-my $pass = $ENV{DBD_PG_PASSWD} || '';
-
-__PACKAGE__->connection("dbi:Pg:dbname=$db", $user, $pass,
-	{ AutoCommit => 1 });
-
-sub CONSTRUCT {
-	my $class = shift;
-	my ($table, $sequence) = ($class->table, $class->sequence || "");
-	my $schema = $class->schema;
-	$class->db_Main->do("CREATE TEMPORARY SEQUENCE $sequence") if $sequence;
-	$class->db_Main->do("CREATE TEMPORARY TABLE $table ( $schema )");
-}
-
-1;
-

Modified: DBIx-Class/0.08/branches/prefetch/t/count/grouped_pager.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/count/grouped_pager.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/count/grouped_pager.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -11,8 +11,6 @@
 
 my $schema = DBICTest->init_schema();
 
-use Data::Dumper;
-
 # add 2 extra artists
 $schema->populate ('Artist', [
     [qw/name/],

Modified: DBIx-Class/0.08/branches/prefetch/t/count/in_subquery.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/count/in_subquery.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/count/in_subquery.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -3,8 +3,6 @@
 use strict;
 use warnings;
 
-use Data::Dumper;
-
 use Test::More;
 
 plan ( tests => 1 );

Modified: DBIx-Class/0.08/branches/prefetch/t/inflate/serialize.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/inflate/serialize.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/inflate/serialize.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -7,8 +7,6 @@
 
 my $schema = DBICTest->init_schema();
 
-use Data::Dumper;
-
 my @serializers = (
     { module => 'YAML.pm',
       inflater => sub { YAML::Load (shift) },

Modified: DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/AuthorCheck.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/AuthorCheck.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/AuthorCheck.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -54,21 +54,17 @@
 We have a number of reasons to believe that this is a development
 checkout and that you, the user, did not run `perl Makefile.PL`
 before using this code. You absolutely _must_ perform this step,
-as not doing so often results in a lot of wasted time for other
-contributors trying to assit you with "it broke!" problems.
+and ensure you have all required dependencies present. Not doing
+so often results in a lot of wasted time for other contributors
+trying to assit you with spurious "its broken!" problems.
 
 If you are seeing this message unexpectedly (i.e. you are in fact
-attempting a regular installation be it through CPAN or manually,
-set the variable DBICTEST_NO_MAKEFILE_VERIFICATION to a true value
-so you can continue. Also _make_absolutely_sure_ to report this to
-either the mailing list or to the irc channel as described in
+attempting a regular installation be it through CPAN or manually),
+please report the situation to either the mailing list or to the
+irc channel as described in
 
 http://search.cpan.org/dist/DBIx-Class/lib/DBIx/Class.pm#GETTING_HELP/SUPPORT
 
-Failure to do this will make us believe that all these checks are
-indeed foolproof and we will remove the ability to override this
-entirely.
-
 The DBIC team
 
 
@@ -79,6 +75,19 @@
   }
 }
 
+# Mimic $Module::Install::AUTHOR
+sub is_author {
+
+  my $root = _find_co_root()
+    or return undef;
+
+  return (
+    ( not -d $root->subdir ('inc') )
+      or
+    ( -e $root->subdir ('inc')->file ($^O eq 'VMS' ? '_author' : '.author') )
+  );
+}
+
 # Try to determine the root of a checkout/untar if possible
 # or return undef
 sub _find_co_root {

Modified: DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/Artist.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/Artist.pm	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/Artist.pm	2009-09-07 07:20:44 UTC (rev 7592)
@@ -68,4 +68,11 @@
   }
 }
 
+sub store_column {
+  my ($self, $name, $value) = @_;
+  $value = 'X '.$value if ($name eq 'name' && $value && $value =~ /store_column test/);
+  $self->next::method($name, $value);
+}
+
+
 1;

Modified: DBIx-Class/0.08/branches/prefetch/t/prefetch/attrs_untouched.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/prefetch/attrs_untouched.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/prefetch/attrs_untouched.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -4,7 +4,9 @@
 use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
+
 use Data::Dumper;
+$Data::Dumper::Sortkeys = 1;
 
 my $schema = DBICTest->init_schema();
 

Modified: DBIx-Class/0.08/branches/prefetch/t/prefetch/standard.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/prefetch/standard.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/prefetch/standard.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -5,7 +5,6 @@
 use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
-use Data::Dumper;
 use IO::File;
 
 my $schema = DBICTest->init_schema();
@@ -20,8 +19,6 @@
 my $search = { 'artist.name' => 'Caterwauler McCrae' };
 my $attr = { prefetch => [ qw/artist liner_notes/ ],
              order_by => 'me.cdid' };
-my $search_str = Dumper($search);
-my $attr_str = Dumper($attr);
 
 my $rs = $schema->resultset("CD")->search($search, $attr);
 my @cd = $rs->all;

Modified: DBIx-Class/0.08/branches/prefetch/t/resultset/as_query.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/resultset/as_query.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/resultset/as_query.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -3,8 +3,6 @@
 use strict;
 use warnings FATAL => 'all';
 
-use Data::Dumper;
-
 use Test::More;
 
 plan ( tests => 5 );

Modified: DBIx-Class/0.08/branches/prefetch/t/search/preserve_original_rs.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/search/preserve_original_rs.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/search/preserve_original_rs.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -8,7 +8,10 @@
 use DBIC::SqlMakerTest;
 use DBIC::DebugObj;
 use DBICTest;
+
+# use Data::Dumper comparisons to avoid mesing with coderefs
 use Data::Dumper;
+$Data::Dumper::Sortkeys = 1;
 
 my $schema = DBICTest->init_schema();
 

Modified: DBIx-Class/0.08/branches/prefetch/t/search/subquery.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/search/subquery.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/search/subquery.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -3,11 +3,8 @@
 use strict;
 use warnings;
 
-use Data::Dumper;
-
 use Test::More;
 
-
 use lib qw(t/lib);
 use DBICTest;
 use DBIC::SqlMakerTest;

Added: DBIx-Class/0.08/branches/prefetch/t/storage/exception.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/storage/exception.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/t/storage/exception.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -0,0 +1,43 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+use DBICTest::Schema;
+
+# make sure nothing eats the exceptions (an unchecked eval in Storage::DESTROY used to be a problem)
+
+{
+  package Dying::Storage;
+
+  use warnings;
+  use strict;
+
+  use base 'DBIx::Class::Storage::DBI';
+
+  sub _populate_dbh {
+    my $self = shift;
+    my $death = $self->_dbi_connect_info->[3]{die};
+
+    die "storage test died: $death" if $death eq 'before_populate';
+    my $ret = $self->next::method (@_);
+    die "storage test died: $death" if $death eq 'after_populate';
+
+    return $ret;
+  }
+}
+
+for (qw/before_populate after_populate/) {
+  dies_ok (sub {
+    my $schema = DBICTest::Schema->clone;
+    $schema->storage_type ('Dying::Storage');
+    $schema->connection (DBICTest->_database, { die => $_ });
+    $schema->storage->ensure_connected;
+  }, "$_ exception found");
+}
+
+done_testing;

Modified: DBIx-Class/0.08/branches/prefetch/t/storage/ping_count.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/storage/ping_count.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/storage/ping_count.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -4,7 +4,6 @@
 use Test::More;
 use lib qw(t/lib);
 use DBICTest;
-use Data::Dumper;
 use DBIC::SqlMakerTest;
 
 my $ping_count = 0;

Modified: DBIx-Class/0.08/branches/prefetch/t/zzzzzzz_perl_perf_bug.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/zzzzzzz_perl_perf_bug.t	2009-09-07 07:14:33 UTC (rev 7591)
+++ DBIx-Class/0.08/branches/prefetch/t/zzzzzzz_perl_perf_bug.t	2009-09-07 07:20:44 UTC (rev 7592)
@@ -1,6 +1,7 @@
 use strict;
 use warnings;
 use Test::More;
+use Benchmark;
 use lib qw(t/lib);
 use DBICTest; # do not remove even though it is not used
 
@@ -25,9 +26,6 @@
 plan skip_all => 'Skipping as AUTOMATED_TESTING is set'
   if ( $ENV{AUTOMATED_TESTING} );
 
-eval "use Benchmark ':all'";
-plan skip_all => 'needs Benchmark for testing' if $@;
-
 plan tests => 3;
 
 ok( 1, 'Dummy - prevents next test timing out' );




More information about the Bast-commits mailing list