[Bast-commits] r7006 - in DBIx-Class/0.08/branches/table_name_ref: . lib/DBIx lib/DBIx/Class lib/DBIx/Class/Manual lib/DBIx/Class/Relationship lib/DBIx/Class/SQLAHacks lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI lib/DBIx/Class/Storage/DBI/ODBC lib/DBIx/Class/Storage/DBI/Oracle lib/DBIx/Class/Storage/DBI/Replicated lib/DBIx/Class/Storage/DBI/Sybase t t/cdbi t/count t/inflate t/lib/DBICTest/Schema t/multi_create t/prefetch t/relationship

caelum at dev.catalyst.perl.org caelum at dev.catalyst.perl.org
Thu Jul 9 05:37:24 GMT 2009


Author: caelum
Date: 2009-07-09 05:37:22 +0000 (Thu, 09 Jul 2009)
New Revision: 7006

Added:
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks/MSSQL.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm
   DBIx-Class/0.08/branches/table_name_ref/t/92storage_on_connect_call.t
   DBIx-Class/0.08/branches/table_name_ref/t/inflate/datetime_oracle.t
   DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_single_PKeqFK.t
   DBIx-Class/0.08/branches/table_name_ref/t/prefetch/count.t
   DBIx-Class/0.08/branches/table_name_ref/t/prefetch/grouped.t
   DBIx-Class/0.08/branches/table_name_ref/t/prefetch/incomplete.t
Removed:
   DBIx-Class/0.08/branches/table_name_ref/t/73oracle_inflate.t
   DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_might_have_PKeqFK.t
Modified:
   DBIx-Class/0.08/branches/table_name_ref/
   DBIx-Class/0.08/branches/table_name_ref/Changes
   DBIx-Class/0.08/branches/table_name_ref/Features_09
   DBIx-Class/0.08/branches/table_name_ref/Makefile.PL
   DBIx-Class/0.08/branches/table_name_ref/TODO
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Manual/Cookbook.pod
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Ordered.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/Accessor.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/BelongsTo.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/CascadeActions.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasMany.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasOne.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ManyToMany.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ProxyMethods.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSet.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSetColumn.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSource.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Row.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Schema.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Cursor.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/DB2.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MSSQL.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MultiColumnIn.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/NoBindVars.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/ACCESS.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/DB2_400_SQL.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/WhereJoins.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Pg.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Replicated/Types.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/SQLite.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/mysql.pm
   DBIx-Class/0.08/branches/table_name_ref/t/03podcoverage.t
   DBIx-Class/0.08/branches/table_name_ref/t/31stats.t
   DBIx-Class/0.08/branches/table_name_ref/t/42toplimit.t
   DBIx-Class/0.08/branches/table_name_ref/t/60core.t
   DBIx-Class/0.08/branches/table_name_ref/t/71mysql.t
   DBIx-Class/0.08/branches/table_name_ref/t/72pg.t
   DBIx-Class/0.08/branches/table_name_ref/t/73oracle.t
   DBIx-Class/0.08/branches/table_name_ref/t/746mssql.t
   DBIx-Class/0.08/branches/table_name_ref/t/746sybase.t
   DBIx-Class/0.08/branches/table_name_ref/t/74mssql.t
   DBIx-Class/0.08/branches/table_name_ref/t/83cache.t
   DBIx-Class/0.08/branches/table_name_ref/t/86might_have.t
   DBIx-Class/0.08/branches/table_name_ref/t/88result_set_column.t
   DBIx-Class/0.08/branches/table_name_ref/t/90join_torture.t
   DBIx-Class/0.08/branches/table_name_ref/t/95sql_maker_quote.t
   DBIx-Class/0.08/branches/table_name_ref/t/cdbi/02-Film.t
   DBIx-Class/0.08/branches/table_name_ref/t/count/count_rs.t
   DBIx-Class/0.08/branches/table_name_ref/t/count/distinct.t
   DBIx-Class/0.08/branches/table_name_ref/t/count/joined.t
   DBIx-Class/0.08/branches/table_name_ref/t/count/prefetch.t
   DBIx-Class/0.08/branches/table_name_ref/t/from_subquery.t
   DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/Artwork.pm
   DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/CD.pm
   DBIx-Class/0.08/branches/table_name_ref/t/multi_create/m2m.t
   DBIx-Class/0.08/branches/table_name_ref/t/prefetch/multiple_hasmany.t
   DBIx-Class/0.08/branches/table_name_ref/t/prefetch/standard.t
   DBIx-Class/0.08/branches/table_name_ref/t/relationship/core.t
   DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_multi.t
   DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_single.t
   DBIx-Class/0.08/branches/table_name_ref/t/zzzzzzz_sqlite_deadlock.t
Log:
 r5766 at hlagh (orig r6843):  abraxxa | 2009-06-29 02:02:17 -0700
 fixed typo in test
 
 r5779 at hlagh (orig r6847):  ribasushi | 2009-06-29 10:09:00 -0700
 Minor Ordered optimization (don't use count)
 r5787 at hlagh (orig r6855):  caelum | 2009-06-29 14:42:11 -0700
  r5451 at hlagh (orig r6605):  caelum | 2009-06-10 09:23:44 -0700
  new branch to implement on_connect_call
  r5484 at hlagh (orig r6633):  caelum | 2009-06-11 11:03:10 -0700
  on_connect_call implementation and set_datetime_format support for Oracle
  r5492 at hlagh (orig r6641):  caelum | 2009-06-11 16:39:28 -0700
  connect_call_set_datetime_format for Oracle, I have no idea why this didn't get committed before...
  r5504 at hlagh (orig r6655):  caelum | 2009-06-12 17:28:06 -0700
  finished up on_connect_call stuff
  r5507 at hlagh (orig r6658):  caelum | 2009-06-13 04:03:36 -0700
  fixup _setup_connect_do, other minor cleanups
  r5508 at hlagh (orig r6659):  caelum | 2009-06-13 04:35:33 -0700
  make the on_(dis)?connect_do accessors returnn the original structure
  r5509 at hlagh (orig r6660):  caelum | 2009-06-13 08:31:52 -0700
  allow undef for _setup_connect_do
  r5522 at hlagh (orig r6679):  caelum | 2009-06-14 09:56:40 -0700
  rename connect_do store
  r5621 at hlagh (orig r6769):  caelum | 2009-06-23 07:38:33 -0700
  minor doc update
  r5628 at hlagh (orig r6777):  caelum | 2009-06-23 16:36:12 -0700
  properly test nanosecond precision with oracle and datetime_setup
  r5669 at hlagh (orig r6784):  caelum | 2009-06-24 10:49:25 -0700
  IC::DT does support timestamp with timezone
  r5768 at hlagh (orig r6846):  caelum | 2009-06-29 08:20:32 -0700
  remove DateTime from 73oracle.t
  r5781 at hlagh (orig r6849):  caelum | 2009-06-29 13:07:43 -0700
  remove the _store stuff for on_connect_do
  r5785 at hlagh (orig r6853):  ribasushi | 2009-06-29 14:38:30 -0700
  Some beautification
 
 r5802 at hlagh (orig r6870):  ribasushi | 2009-06-30 01:09:03 -0700
 Cleanup dependency handling a bit
 r5806 at hlagh (orig r6874):  ribasushi | 2009-06-30 03:39:06 -0700
 Allow broken resultsource-class-derived objects to still work
 r5807 at hlagh (orig r6875):  ribasushi | 2009-06-30 03:40:46 -0700
 clarify
 r5835 at hlagh (orig r6877):  ash | 2009-06-30 04:48:13 -0700
 Update POD on Dynamic sub-classing
 
 r5837 at hlagh (orig r6882):  ribasushi | 2009-06-30 08:36:38 -0700
  r6815 at Thesaurus (orig r6814):  ribasushi | 2009-06-28 10:32:42 +0200
  Branch to explore double joins on search_related
  r6816 at Thesaurus (orig r6815):  ribasushi | 2009-06-28 10:34:16 +0200
  Thetest case that started it all
  r6817 at Thesaurus (orig r6816):  ribasushi | 2009-06-28 10:35:11 +0200
  The proposed fix (do not add an extra join if it is already present in the topmost join)
  r6818 at Thesaurus (orig r6817):  ribasushi | 2009-06-28 11:04:26 +0200
  Minor omission
  r6819 at Thesaurus (orig r6818):  ribasushi | 2009-06-28 11:07:33 +0200
  Adjust a couple of tests for new behavior (thus all of this might be backwards incompatible to the point of being useless):
  The counts in t/90join_torture.t are now 5*3, not 5*3*3, as a second join is not induced by search_related
  The raw sql scan in t/prefetch/standard.t is just silly, won't even try to understand it
  Just to maintain the TreeLike folding, I add a 3rd children join which was inserted by search_related before the code changes
 
 r5843 at hlagh (orig r6888):  ribasushi | 2009-06-30 10:36:11 -0700
 Todoify test for now
 r5844 at hlagh (orig r6889):  ribasushi | 2009-06-30 10:37:05 -0700
 Todoify test for now (2)
 r5846 at hlagh (orig r6891):  ribasushi | 2009-06-30 10:52:31 -0700
 Todoify test for now (3)
 r5850 at hlagh (orig r6902):  ribasushi | 2009-06-30 23:46:12 -0700
 Fixed deadlock test
 r5851 at hlagh (orig r6903):  ribasushi | 2009-07-01 03:22:00 -0700
 Clarify exception text
 r5854 at hlagh (orig r6906):  ribasushi | 2009-07-01 04:23:46 -0700
  r6821 at Thesaurus (orig r6820):  ribasushi | 2009-06-28 13:09:11 +0200
  Branch for prefetch+group play
  r6823 at Thesaurus (orig r6822):  ribasushi | 2009-06-28 14:38:36 +0200
  Normalize group_by
  r6824 at Thesaurus (orig r6823):  ribasushi | 2009-06-28 14:39:54 +0200
  Proper prefetch+group test
  r6826 at Thesaurus (orig r6825):  ribasushi | 2009-06-28 14:42:48 +0200
  Whoops
  r6828 at Thesaurus (orig r6827):  ribasushi | 2009-06-28 15:06:57 +0200
  Lose the literal sql bits - castaway is right it's silly to support those
  r6833 at Thesaurus (orig r6832):  ribasushi | 2009-06-28 22:38:43 +0200
  Rogue comments
  r6837 at Thesaurus (orig r6836):  ribasushi | 2009-06-29 09:44:25 +0200
  A couple of test fixes
  r6838 at Thesaurus (orig r6837):  ribasushi | 2009-06-29 09:46:13 +0200
  Support for -select/-as in SQLAHacks field selection
  r6839 at Thesaurus (orig r6838):  ribasushi | 2009-06-29 09:49:53 +0200
  This is tested elsewhere
  r6840 at Thesaurus (orig r6839):  ribasushi | 2009-06-29 09:50:43 +0200
  This is tested elsewhere (2)
  r6841 at Thesaurus (orig r6840):  ribasushi | 2009-06-29 10:07:09 +0200
  Test cleanups
  r6842 at Thesaurus (orig r6841):  ribasushi | 2009-06-29 10:11:13 +0200
  Most of the grouped prefetch solution
  r6843 at Thesaurus (orig r6842):  ribasushi | 2009-06-29 10:14:45 +0200
  clearer
  r6845 at Thesaurus (orig r6844):  ribasushi | 2009-06-29 12:05:37 +0200
  And score! (all works)
  r6882 at Thesaurus (orig r6881):  ribasushi | 2009-06-30 16:23:06 +0200
  rs->get_column now properly recognizes prefetch and collapses if at all possible
  r6886 at Thesaurus (orig r6885):  ribasushi | 2009-06-30 17:39:58 +0200
  Whoops
 
 r5857 at hlagh (orig r6909):  ribasushi | 2009-07-01 04:27:15 -0700
 Optimize set_column on uninserted objects
 r5867 at hlagh (orig r6920):  caelum | 2009-07-01 08:40:32 -0700
  r5859 at hlagh (orig r6912):  caelum | 2009-07-01 06:21:30 -0700
  new connected() for dbd::sybase users
  r5860 at hlagh (orig r6913):  caelum | 2009-07-01 06:25:46 -0700
  add a couple of dbd::sybase reconnection tests
  r5861 at hlagh (orig r6914):  caelum | 2009-07-01 06:35:07 -0700
  better connection test
  r5862 at hlagh (orig r6915):  caelum | 2009-07-01 06:45:05 -0700
  use dbh->do for connected instead of prepare_cached
  r5863 at hlagh (orig r6916):  ribasushi | 2009-07-01 06:55:21 -0700
  Segfault
  r5864 at hlagh (orig r6917):  caelum | 2009-07-01 07:03:22 -0700
  use ->do instead of ->prepare_cached in oracle's connected() too
  r5865 at hlagh (orig r6918):  caelum | 2009-07-01 08:20:52 -0700
  fix segfault with old DBD::Sybase
  r5866 at hlagh (orig r6919):  caelum | 2009-07-01 08:39:18 -0700
  move connection tests into _ping()
 
 r5873 at hlagh (orig r6923):  ijw | 2009-07-01 10:34:32 -0700
 Added a test for a resultset to related-resultset join for 0 related records
 r5874 at hlagh (orig r6927):  ijw | 2009-07-01 11:04:16 -0700
 Additional tests on prefetch - illustrates the bug with left-join has_many (NULL row returned) and the one that results from the trivial fix (prefetch gives no artist)
 r5876 at hlagh (orig r6931):  ribasushi | 2009-07-01 23:08:33 -0700
 Another candidate for somethingawful.com (fix left join-ed count)
 r5877 at hlagh (orig r6933):  ribasushi | 2009-07-02 00:04:13 -0700
 Changelog
 r5878 at hlagh (orig r6934):  ribasushi | 2009-07-02 02:23:48 -0700
 cleanup
 r5879 at hlagh (orig r6935):  ijw | 2009-07-02 03:41:01 -0700
 Check fetched rows == count for related resultsets
 r5880 at hlagh (orig r6936):  ijw | 2009-07-02 03:43:47 -0700
 Confirm prefetch doesn't affect main row fetch, and main row fetch works with and without counting
 r5881 at hlagh (orig r6937):  ribasushi | 2009-07-02 03:52:51 -0700
 More fail (fix is known but needs work)
 r5882 at hlagh (orig r6938):  ribasushi | 2009-07-02 04:07:22 -0700
 And more fail
 r5883 at hlagh (orig r6939):  ribasushi | 2009-07-02 04:16:46 -0700
 These tests are in prefetch/count.t
 r5884 at hlagh (orig r6940):  ribasushi | 2009-07-02 04:38:31 -0700
 cleanup
 r5885 at hlagh (orig r6941):  ribasushi | 2009-07-02 04:38:49 -0700
 Solve more prefetch inflation crap
 r5886 at hlagh (orig r6942):  ribasushi | 2009-07-02 04:47:41 -0700
 Make the code readable
 r5887 at hlagh (orig r6943):  ribasushi | 2009-07-02 06:52:35 -0700
 Everything works, just need to fix join-path chaining over search_related (to guard against obscure db quirks)
 r5889 at hlagh (orig r6945):  caelum | 2009-07-02 12:06:32 -0700
 add sybase reconnect test
 r5891 at hlagh (orig r6947):  ribasushi | 2009-07-02 13:20:21 -0700
 Last part of the join handling puzzle
 r5894 at hlagh (orig r6950):  ribasushi | 2009-07-02 15:14:50 -0700
  r6360 at Thesaurus (orig r6359):  arcanez | 2009-05-21 20:18:52 +0200
  branch to work on prefetch/select
  r6361 at Thesaurus (orig r6360):  arcanez | 2009-05-21 20:32:46 +0200
  failing test
  r6373 at Thesaurus (orig r6372):  ribasushi | 2009-05-22 11:07:26 +0200
  Simplify unresolvable test by arcanez
  r6905 at Thesaurus (orig r6904):  ribasushi | 2009-07-01 12:54:03 +0200
  Extend test
  r6950 at Thesaurus (orig r6949):  ribasushi | 2009-07-03 00:14:09 +0200
  Apparent fix - simply delay the in_storage flagging of the main object until all prefetched objects are inflated. The rest of the changes are just cosmetics, preparing for the collapse_result rewrite
 
 r5896 at hlagh (orig r6952):  ribasushi | 2009-07-02 15:17:22 -0700
 Changes
 r5909 at hlagh (orig r6964):  ribasushi | 2009-07-03 04:19:27 -0700
 Add set_ansi_mode on_connect_call for mysql
 Also switch to _do_query instead of plain dbh->do (shows up in the trace)
 r5910 at hlagh (orig r6965):  ribasushi | 2009-07-03 04:37:06 -0700
 Capitalize mysql commands
 r5911 at hlagh (orig r6966):  ribasushi | 2009-07-03 06:07:49 -0700
 Double an existing might_have test as has_one
 r5912 at hlagh (orig r6967):  ribasushi | 2009-07-03 07:36:32 -0700
 Extra test to demonstrate has_one working, and a POD clarification of multicreate
 r5917 at hlagh (orig r6972):  ribasushi | 2009-07-03 11:20:42 -0700
  r6554 at Thesaurus (orig r6553):  frew | 2009-06-09 00:06:42 +0200
  branch for mssql top issues
  r6572 at Thesaurus (orig r6571):  frew | 2009-06-09 23:18:46 +0200
  more tests for SQL Server!
  r6573 at Thesaurus (orig r6572):  frew | 2009-06-09 23:49:10 +0200
  Added AmbiguousGlob.pm for silly servers like mssql and mysql.  See docs for more info
  r6574 at Thesaurus (orig r6573):  frew | 2009-06-09 23:55:22 +0200
  fix plan
  r6602 at Thesaurus (orig r6601):  frew | 2009-06-10 17:03:30 +0200
  more failing tests
  r6608 at Thesaurus (orig r6607):  frew | 2009-06-10 20:05:53 +0200
  don't use eval!
  r6610 at Thesaurus (orig r6609):  frew | 2009-06-10 20:07:49 +0200
  beginning of DWIM for IDENTITY_INSERT
  r6628 at Thesaurus (orig r6627):  frew | 2009-06-11 18:13:02 +0200
  still busted :-(
  r6631 at Thesaurus (orig r6630):  frew | 2009-06-11 19:39:00 +0200
  general function to go from column names and ident to result source
  r6632 at Thesaurus (orig r6631):  frew | 2009-06-11 19:40:11 +0200
  Use new _resolve_column_sources method and begin insert_bulk method
  r6635 at Thesaurus (orig r6634):  frew | 2009-06-11 20:12:38 +0200
  updated _resolve_column_source to _resolve_column_info as per ribasushi's suggestion
  r6650 at Thesaurus (orig r6649):  frew | 2009-06-12 17:13:32 +0200
  Now I just need to check if the actual values are set...
  r6651 at Thesaurus (orig r6650):  frew | 2009-06-12 17:26:53 +0200
  Insert Identity works!
  r6652 at Thesaurus (orig r6651):  frew | 2009-06-12 17:34:13 +0200
  silly warns.
  r6684 at Thesaurus (orig r6683):  frew | 2009-06-15 16:49:00 +0200
  failing test
  r6686 at Thesaurus (orig r6685):  ribasushi | 2009-06-15 18:10:26 +0200
  make all resolved attrs visible to sqla
  r6698 at Thesaurus (orig r6697):  ribasushi | 2009-06-17 02:31:37 +0200
  Half way working stuff, needs a LOT of tweaking still
  r6729 at Thesaurus (orig r6728):  ribasushi | 2009-06-19 19:49:27 +0200
  Merge badness
  r6730 at Thesaurus (orig r6729):  ribasushi | 2009-06-19 19:49:40 +0200
  fix eol
  r6731 at Thesaurus (orig r6730):  ribasushi | 2009-06-19 19:55:47 +0200
  augment inheritance
  r6735 at Thesaurus (orig r6734):  ribasushi | 2009-06-20 10:34:42 +0200
  Maybe I've nailed it
  r6746 at Thesaurus (orig r6745):  ribasushi | 2009-06-20 23:53:55 +0200
  Test and merge fixes
  r6747 at Thesaurus (orig r6746):  ribasushi | 2009-06-21 00:01:09 +0200
  Really fix tests
  r6748 at Thesaurus (orig r6747):  ribasushi | 2009-06-21 00:01:54 +0200
  Really fix tests
  r6749 at Thesaurus (orig r6748):  ribasushi | 2009-06-21 00:18:33 +0200
  Now really final
  r6750 at Thesaurus (orig r6749):  ribasushi | 2009-06-21 00:22:23 +0200
  whoops
  r6751 at Thesaurus (orig r6750):  ribasushi | 2009-06-21 00:42:18 +0200
  That should be all
  r6752 at Thesaurus (orig r6751):  ribasushi | 2009-06-21 08:54:00 +0200
  Make sure quoting works
  r6755 at Thesaurus (orig r6754):  ribasushi | 2009-06-21 15:21:23 +0200
  Groundwork for sanification of the toplimit test
  r6863 at Thesaurus (orig r6862):  ribasushi | 2009-06-30 01:13:49 +0200
  Make sure storage classes use c3, just like the rest of dbic (tested on 5.8 as well)
  r6869 at Thesaurus (orig r6868):  ribasushi | 2009-06-30 09:53:27 +0200
  Some fixes after review
  r6874 at Thesaurus (orig r6873):  ribasushi | 2009-06-30 11:54:34 +0200
  Fix borked next invocation
  r6896 at Thesaurus (orig r6895):  frew | 2009-06-30 21:38:26 +0200
  silly misspells and trailing whitespace
  r6955 at Thesaurus (orig r6954):  ribasushi | 2009-07-03 01:21:28 +0200
  Some hack consolidation
  r6962 at Thesaurus (orig r6961):  ribasushi | 2009-07-03 12:06:57 +0200
  Fix some mssql shortcommings when confronted with the new subequeried prefetch sql
  r6963 at Thesaurus (orig r6962):  ribasushi | 2009-07-03 12:47:57 +0200
  Ask for newer DBD::Pg in author mode, suggest the newer version otherwise (proper array support). Make test more resilient as well
  r6964 at Thesaurus (orig r6963):  ribasushi | 2009-07-03 12:49:16 +0200
  Switch to C3 mro throughout the ::Storage hierarchy (DBIx::Class brings in MRO::Compat, and all ::Storage's are based on it, tested on 5.8
  r6969 at Thesaurus (orig r6968):  ribasushi | 2009-07-03 19:54:04 +0200
  Duh
  r6970 at Thesaurus (orig r6969):  frew | 2009-07-03 19:59:48 +0200
  fix tests for new codez
  r6971 at Thesaurus (orig r6970):  ribasushi | 2009-07-03 20:18:53 +0200
  detabify
  r6972 at Thesaurus (orig r6971):  ribasushi | 2009-07-03 20:20:07 +0200
  changes
 
 r5920 at hlagh (orig r6979):  ribasushi | 2009-07-04 02:34:08 -0700
 Hide devel documentation from the indexer
 r5921 at hlagh (orig r6980):  ribasushi | 2009-07-04 02:37:25 -0700
 Add set_ansi_mode POD
 r5922 at hlagh (orig r6981):  ribasushi | 2009-07-04 02:45:24 -0700
 Backout mysql changes for further polishing
 r5925 at hlagh (orig r6984):  ribasushi | 2009-07-04 03:08:16 -0700
 Missing newline
 r5926 at hlagh (orig r6985):  ribasushi | 2009-07-04 03:11:18 -0700
 typo
 r5927 at hlagh (orig r6986):  ribasushi | 2009-07-04 03:40:47 -0700
 Fix POD
 r5928 at hlagh (orig r6987):  ribasushi | 2009-07-04 04:09:39 -0700
 todos are shorter now
 r5929 at hlagh (orig r6989):  castaway | 2009-07-05 13:00:55 -0700
 Added Pod::Inherit use to Makefile.PL at author-time, comments/suggestions as to whether its too "noisy" welcome.
 
 r5930 at hlagh (orig r6990):  ribasushi | 2009-07-05 15:06:52 -0700
 Couple of makefile fixes:
 use is compile time, use require
 recommends is for distro maintainers only, push the dependency into the authors hash (it is not to be executed by mere mortals)
 
 r5931 at hlagh (orig r6991):  ribasushi | 2009-07-05 15:55:36 -0700
 Forgotten pod exclusions
 r5932 at hlagh (orig r6992):  ribasushi | 2009-07-05 16:07:05 -0700
 Temporarily backout Pod::Inherit changes
 r5933 at hlagh (orig r6993):  ribasushi | 2009-07-05 16:10:22 -0700
 Put Pod::Inherit stuff back after proper copy



Property changes on: DBIx-Class/0.08/branches/table_name_ref
___________________________________________________________________
Name: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:5969
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:5651
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:6819
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:5969
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:5651
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:6993
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510

Modified: DBIx-Class/0.08/branches/table_name_ref/Changes
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/Changes	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/Changes	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,16 +1,43 @@
 Revision history for DBIx::Class
 
+        - Added call to Pod::Inherit in Makefile.PL - 
+          currently at author-time only, so we need to add the produced
+          .pod files to the MANIFEST
+        - Fixed the has_many prefetch with limit/group deficiency -
+          it is now possible to select "top 5 commenters" while
+          prefetching all their comments
+        - New resultsed method count_rs, returns a ::ResultSetColumn
+          which in turn returns a single count value
+        - Even better support of count with limit
+        - New on_connect_call/on_disconnect_call functionality (check
+          POD of Storage::DBI)
+        - Automatic datetime handling environment/session setup for
+          Oracle via connect_call_datetime_setup()
+        - count/all on related left-joined empty resultsets now correctly
+          returns 0/()
         - Fixed regression when both page and offset are specified on
           a resultset
         - Fixed HRI returning too many empty results on multilevel
           nonexisting prefetch
-        - Fixed the prefetch with limit bug
-        - New resultsed method count_rs, returns a ::ResultSetColumn
-          which in turn returns a single count value
         - make_column_dirty() now overwrites the deflated value with an
           inflated one if such exists
         - Fixed set_$rel with where restriction deleting rows outside 
           the restriction
+        - populate() returns the created objects or an arrayref of the
+          created objects depending on scalar vs. list context
+        - Fixed find_related on 'single' relationships - the former
+          implementation would overspecify the WHERE condition, reporting
+          no related objects when there in fact is one
+        - SQL::Translator::Parser::DBIx::Class now attaches tables to the
+          central schema object in relationship dependency order
+        - Fixed regression in set_column() preventing sourceless object
+          manipulations
+        - Fixed a bug in search_related doubling a join if the original
+          $rs already joins/prefetches the same relation
+        - Storage::DBI::connected() improvements for Oracle and Sybase
+        - Fixed prefetch+incomplete select regression introduced in
+          0.08100
+        - MSSQL limit (TOP emulation) fixes and improvements
 
 0.08107 2009-06-14 08:21:00 (UTC)
         - Fix serialization regression introduced in 0.08103 (affects
@@ -27,8 +54,8 @@
         - Update of numeric columns now properly uses != to determine
           dirtyness instead of the usual eq
         - Fixes to IC::DT tests
-        - Fixed exception when undef_if_invalid and timezone are both set on 
-          an invalid datetime column
+        - Fixed exception when undef_if_invalid and timezone are both set
+          on an invalid datetime column
 
 0.08104 2009-06-10 13:38:00 (UTC)
         - order_by now can take \[$sql, @bind] as in

Modified: DBIx-Class/0.08/branches/table_name_ref/Features_09
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/Features_09	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/Features_09	2009-07-09 05:37:22 UTC (rev 7006)
@@ -14,12 +14,8 @@
  - "belongs_to" to "contains/refers/something"
 
 Using inflated objects/references as values in searches
- - Goes together with subselects above
  - should deflate then run search
 
-FilterColumn - like Inflate, only for changing scalar values
- - This seems to be vaporware atm..
-
 SQL/API feature complete?
  - UNION
  - proper join conditions!
@@ -27,17 +23,16 @@
 
 Moosification - ouch
 
+Metamodel stuff - introspection
+
 Prefetch improvements
  - slow on mysql, speedup?
  - multi has_many prefetch
- - paging working with prefetch
 
 Magically "discover" needed joins/prefetches and add them
  - eg $books->search({ 'author.name' => 'Fred'}), autoadds: join => 'author'
  - also guess aliases when supplying column names that are on joined/related tables
 
-Metamodel stuff - introspection
-
 Storage API/restructure
  - call update/insert etc on the ResultSource, which then calls to storage
  - handle different storages/db-specific code better
@@ -52,4 +47,3 @@
 Documentation - improvements
  - better indexing for finding of stuff in general
  - more cross-referencing of docs
-

Modified: DBIx-Class/0.08/branches/table_name_ref/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/Makefile.PL	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/Makefile.PL	2009-07-09 05:37:22 UTC (rev 7006)
@@ -9,22 +9,13 @@
 perl_version '5.006001';
 all_from 'lib/DBIx/Class.pm';
 
-requires 'DBD::SQLite'              => 1.25;
-requires 'Data::Page'               => 2.00;
-requires 'SQL::Abstract'            => 1.56;
-requires 'SQL::Abstract::Limit'     => 0.13;
-requires 'Class::C3::Componentised' => 1.0005;
-requires 'Carp::Clan'               => 6.0;
-requires 'DBI'                      => 1.605;
-requires 'Module::Find'             => 0.06;
-requires 'Class::Inspector'         => 1.24;
-requires 'Class::Accessor::Grouped' => 0.08003;
-requires 'JSON::Any'                => 1.18;
-requires 'Scope::Guard'             => 0.03;
-requires 'Path::Class'              => 0.16;
-requires 'Sub::Name'                => 0.04;
-requires 'MRO::Compat'              => 0.09;
 
+test_requires 'Test::Builder'       => 0.33;
+test_requires 'Test::Deep'          => 0;
+test_requires 'Test::Exception'     => 0;
+test_requires 'Test::More'          => 0.82;
+test_requires 'Test::Warn'          => 0.11;
+
 # Core
 requires 'List::Util'               => 0;
 requires 'Scalar::Util'             => 0;
@@ -33,11 +24,22 @@
 # Perl 5.8.0 doesn't have utf8::is_utf8()
 requires 'Encode'                   => 0 if ($] <= 5.008000);
 
-test_requires 'Test::More'          => 0.82;
-test_requires 'Test::Builder'       => 0.33;
-test_requires 'Test::Warn'          => 0.11;
-test_requires 'Test::Exception'     => 0;
-test_requires 'Test::Deep'          => 0;
+# Dependencies (keep in alphabetical order)
+requires 'Carp::Clan'               => 6.0;
+requires 'Class::Accessor::Grouped' => 0.08003;
+requires 'Class::C3::Componentised' => 1.0005;
+requires 'Class::Inspector'         => 1.24;
+requires 'Data::Page'               => 2.00;
+requires 'DBD::SQLite'              => 1.25;
+requires 'DBI'                      => 1.605;
+requires 'JSON::Any'                => 1.18;
+requires 'MRO::Compat'              => 0.09;
+requires 'Module::Find'             => 0.06;
+requires 'Path::Class'              => 0.16;
+requires 'Scope::Guard'             => 0.03;
+requires 'SQL::Abstract'            => 1.56;
+requires 'SQL::Abstract::Limit'     => 0.13;
+requires 'Sub::Name'                => 0.04;
 
 recommends 'SQL::Translator'        => 0.09004;
 
@@ -59,6 +61,7 @@
 
 my %force_requires_if_author = (
   'Test::Pod::Coverage'       => 1.04,
+  'Module::Install::Pod::Inherit' => 0.01,
   'SQL::Translator'           => 0.09007,
 
   # CDBI-compat related
@@ -73,30 +76,41 @@
   'Test::Memory::Cycle'       => 0,
   'Devel::Cycle'              => 1.10,
 
-  # t/inflate/datetime*.t
-  # t/72.pg
   # t/36datetime.t
   # t/60core.t
   'DateTime::Format::SQLite'  => 0,
-  'DateTime::Format::MySQL'   => 0,
-  'DateTime::Format::Pg'      => 0,
 
   # t/96_is_deteministic_value.t
-  'DateTime::Format::Strptime' => 0,
+  'DateTime::Format::Strptime'=> 0,
 
-  # t/72pg.t
+  # t/93storage_replication.t
+  'Moose',                    => 0.77,
+  'MooseX::AttributeHelpers'  => 0.12,
+  'MooseX::Types',            => 0.10,
+  'namespace::clean'          => 0.11,
+  'Hash::Merge',              => 0.11,
+
+  # database-dependent reqs
+  #
   $ENV{DBICTEST_PG_DSN}
-    ? ('Sys::SigAction'=> 0)
-    : ()
+    ? (
+      'Sys::SigAction' => 0,
+      'DBD::Pg' => 2.009002,
+      'DateTime::Format::Pg' => 0,
+    ) : ()
   ,
 
-  # t/93storage_replication.t
-  'Moose',                        => 0.77,
-  'MooseX::AttributeHelpers'      => 0.12,
-  'MooseX::Types',                => 0.10,
-  'namespace::clean'              => 0.11,
-  'Hash::Merge',                  => 0.11,
+  $ENV{DBICTEST_MYSQL_DSN}
+    ? (
+      'DateTime::Format::MySQL' => 0,
+    ) : ()
+  ,
 
+  $ENV{DBICTEST_ORACLE_DSN}
+    ? (
+      'DateTime::Format::Oracle' => 0,
+    ) : ()
+  ,
 );
 
 if ($Module::Install::AUTHOR) {
@@ -111,7 +125,7 @@
 
 EOW
 
-  foreach my $module (keys %force_requires_if_author) {
+  foreach my $module (sort keys %force_requires_if_author) {
     build_requires ($module => $force_requires_if_author{$module});
   }
 
@@ -122,6 +136,9 @@
     print "Removing MANIFEST\n";
     unlink 'MANIFEST';
   }
+
+  eval { require Module::Install::Pod::Inherit };
+  Module::Install::Pod::Inherit::PodInherit() if !$@;
 }
 
 auto_install();

Modified: DBIx-Class/0.08/branches/table_name_ref/TODO
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/TODO	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/TODO	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,12 +3,8 @@
   - ResultSource objects caching ->resultset causes interesting problems
   - find why XSUB dumper kills schema in Catalyst (may be Pg only?)
 
-2006-04-11 by castaway
- - docs of copy() should say that is_auto_increment is essential for auto_incrementing keys
-
 2006-03-25 by mst
   - find a way to un-wantarray search without breaking compat
-  - audit logging component
   - delay relationship setup if done via ->load_classes
   - double-sided relationships
   - make short form of class specifier in relationships work
@@ -21,9 +17,6 @@
    We should still support the old inflate/deflate syntax, but this new 
    way should be recommended. 
 
-2006-02-07 by castaway
- - Extract DBIC::SQL::Abstract into a separate module for CPAN
-
 2006-03-18 by bluefeet
  - Support table locking.
 
@@ -47,18 +40,8 @@
        if you haven't specified one of the others
 
 2008-10-30 by ribasushi
- Leftovers for next dev-release
   - Rewrite the test suite to rely on $schema->deploy, allowing for seamless
     testing of various RDBMS using the same tests
-  - Proper support of default create (i.e. create({}) ), with proper workarounds
-    for different Storage's
   - Automatically infer quote_char/name_sep from $schema->storage
-  - Finally incorporate View support (needs real tests)
   - Fix and properly test chained search attribute merging
-
-2008-11-07 by ribasushi
-  - Be loud when a relationship resolution fails because we did not select/as
-    a neccessary pk
   - Recursive update() (all code seems to be already available)
-  - $rs->populate changes its syntax depending on wantarray context (BAD)
-    Also the interface differs from $schema->populate (not so good)

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Manual/Cookbook.pod	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Manual/Cookbook.pod	2009-07-09 05:37:22 UTC (rev 7006)
@@ -859,6 +859,9 @@
     use strict; 
     use warnings; 
     use base qw/My::Schema::Result::User/; 
+
+    # This line is important
+    __PACKAGE__->table('users');
      
     sub hello 
     { 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Ordered.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Ordered.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Ordered.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -272,14 +272,14 @@
     return defined $lsib ? $lsib : 0;
 }
 
-# an optimised method to get the last sibling position without inflating a row object
-sub _last_sibling_pos {
+# an optimized method to get the last sibling position value without inflating a row object
+sub _last_sibling_posval {
     my $self = shift;
     my $position_column = $self->position_column;
 
     my $cursor = $self->next_siblings->search(
         {},
-        { rows => 1, order_by => { '-desc' => $position_column }, columns => $position_column },
+        { rows => 1, order_by => { '-desc' => $position_column }, select => $position_column },
     )->cursor;
 
     my ($pos) = $cursor->next;
@@ -313,7 +313,7 @@
 
 sub move_next {
     my $self = shift;
-    return 0 unless $self->next_siblings->count;
+    return 0 unless defined $self->_last_sibling_posval;  # quick way to check for no more siblings
     return $self->move_to ($self->_position + 1);
 }
 
@@ -341,7 +341,11 @@
 
 sub move_last {
     my $self = shift;
-    return $self->move_to( $self->_group_rs->count );
+    my $last_posval = $self->_last_sibling_posval;
+
+    return 0 unless defined $last_posval;
+
+    return $self->move_to( $self->_position_from_value ($last_posval) );
 }
 
 =head2 move_to
@@ -436,18 +440,21 @@
         $self->move_last;
 
         $self->set_inflated_columns({ %$to_group, $position_column => undef });
-        my $new_group_count = $self->_group_rs->count;
+        my $new_group_last_posval = $self->_last_sibling_posval;
+        my $new_group_last_position = $self->_position_from_value (
+          $new_group_last_posval
+        );
 
-        if ( not defined($to_position) or $to_position > $new_group_count) {
+        if ( not defined($to_position) or $to_position > $new_group_last_position) {
             $self->set_column(
-                $position_column => $new_group_count
-                    ? $self->_next_position_value ( $self->_last_sibling_pos )
+                $position_column => $new_group_last_position
+                    ? $self->_next_position_value ( $new_group_last_posval )
                     : $self->_initial_position_value
             );
         }
         else {
             my $bumped_pos_val = $self->_position_value ($to_position);
-            my @between = ($to_position, $new_group_count);
+            my @between = ($to_position, $new_group_last_position);
             $self->_shift_siblings (1, @between);   #shift right
             $self->set_column( $position_column => $bumped_pos_val );
         }
@@ -473,10 +480,10 @@
     my $position_column = $self->position_column;
 
     unless ($self->get_column($position_column)) {
-        my $lsib_pos = $self->_last_sibling_pos;
+        my $lsib_posval = $self->_last_sibling_posval;
         $self->set_column(
-            $position_column => (defined $lsib_pos
-                ? $self->_next_position_value ( $lsib_pos )
+            $position_column => (defined $lsib_posval
+                ? $self->_next_position_value ( $lsib_posval )
                 : $self->_initial_position_value
             )
         );
@@ -616,6 +623,27 @@
     return $self->get_column ($self->position_column);
 }
 
+=head2 _position_from_value
+
+  my $num_pos = $item->_position_of_value ( $pos_value )
+
+Returns the B<absolute numeric position> of an object with a B<position
+value> set to C<$pos_value>. By default simply returns C<$pos_value>.
+
+=cut
+sub _position_from_value {
+    my ($self, $val) = @_;
+
+    return 0 unless defined $val;
+
+#    #the right way to do this
+#    return $self -> _group_rs
+#                 -> search({ $self->position_column => { '<=', $val } })
+#                 -> count
+
+    return $val;
+}
+
 =head2 _position_value
 
   my $pos_value = $item->_position_value ( $pos )

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/Accessor.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/Accessor.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/Accessor.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -6,6 +6,11 @@
 use Sub::Name ();
 use Class::Inspector ();
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::Accessor' => 'DBIx::Class::Relationship' }
+  );
+
 sub register_relationship {
   my ($class, $rel, $info) = @_;
   if (my $acc_type = $info->{attrs}{accessor}) {

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/BelongsTo.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/BelongsTo.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/BelongsTo.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,6 +7,11 @@
 use strict;
 use warnings;
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::BelongsTo' => 'DBIx::Class::Relationship' }
+  );
+
 sub belongs_to {
   my ($class, $rel, $f_class, $cond, $attrs) = @_;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/CascadeActions.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/CascadeActions.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/CascadeActions.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,11 @@
 use strict;
 use warnings;
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::CascadeActions' => 'DBIx::Class::Relationship' }
+  );
+
 sub delete {
   my ($self, @rest) = @_;
   return $self->next::method(@rest) unless ref $self;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasMany.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasMany.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasMany.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,11 @@
 use strict;
 use warnings;
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::HasMany' => 'DBIx::Class::Relationship' }
+  );
+
 sub has_many {
   my ($class, $rel, $f_class, $cond, $attrs) = @_;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasOne.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasOne.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/HasOne.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,11 @@
 use strict;
 use warnings;
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::HasOne' => 'DBIx::Class::Relationship' }
+  );
+
 sub might_have {
   shift->_has_one('LEFT' => @_);
 }

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ManyToMany.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ManyToMany.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ManyToMany.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,6 +7,11 @@
 use Carp::Clan qw/^DBIx::Class/;
 use Sub::Name ();
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::ManyToMany' => 'DBIx::Class::Relationship' }
+  );
+
 sub many_to_many {
   my ($class, $meth, $rel, $f_rel, $rel_attrs) = @_;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ProxyMethods.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ProxyMethods.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Relationship/ProxyMethods.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -6,6 +6,11 @@
 use Sub::Name ();
 use base qw/DBIx::Class/;
 
+our %_pod_inherit_config = 
+  (
+   class_map => { 'DBIx::Class::Relationship::ProxyMethods' => 'DBIx::Class::Relationship' }
+  );
+
 sub register_relationship {
   my ($class, $rel, $info) = @_;
   if (my $proxy_list = $info->{attrs}{proxy}) {

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSet.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSet.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -957,7 +957,9 @@
 
 sub _construct_object {
   my ($self, @row) = @_;
-  my $info = $self->_collapse_result($self->{_attrs}{as}, \@row);
+
+  my $info = $self->_collapse_result($self->{_attrs}{as}, \@row)
+    or return ();
   my @new = $self->result_class->inflate_result($self->result_source, @$info);
   @new = $self->{_attrs}{record_filter}->(@new)
     if exists $self->{_attrs}{record_filter};
@@ -967,6 +969,19 @@
 sub _collapse_result {
   my ($self, $as_proto, $row) = @_;
 
+  # if the first row that ever came in is totally empty - this means we got
+  # hit by a smooth^Wempty left-joined resultset. Just noop in that case
+  # instead of producing a {}
+  #
+  my $has_def;
+  for (@$row) {
+    if (defined $_) {
+      $has_def++;
+      last;
+    }
+  }
+  return undef unless $has_def;
+
   my @copy = @$row;
 
   # 'foo'         => [ undef, 'foo' ]
@@ -1227,6 +1242,11 @@
   $tmp_attrs->{select} = $rsrc->storage->_count_select ($rsrc, $tmp_attrs);
   $tmp_attrs->{as} = 'count';
 
+  # read the comment on top of the actual function to see what this does
+  $tmp_attrs->{from} = $self->_switch_to_inner_join_if_needed (
+    $tmp_attrs->{from}, $tmp_attrs->{alias}
+  );
+
   my $tmp_rs = $rsrc->resultset_class->new($rsrc, $tmp_attrs)->get_column ('count');
 
   return $tmp_rs;
@@ -1243,8 +1263,8 @@
 
   my $sub_attrs = { %$attrs };
 
-  # these can not go in the subquery, and there is no point of ordering it
-  delete $sub_attrs->{$_} for qw/collapse select as order_by/;
+  # extra selectors do not go in the subquery and there is no point of ordering it
+  delete $sub_attrs->{$_} for qw/collapse prefetch_select select as order_by/;
 
   # if we prefetch, we group_by primary keys only as this is what we would get out of the rs via ->next/->all
   # clobber old group_by regardless
@@ -1254,8 +1274,20 @@
 
   $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $sub_attrs);
 
+  # read the comment on top of the actual function to see what this does
+  $sub_attrs->{from} = $self->_switch_to_inner_join_if_needed (
+    $sub_attrs->{from}, $sub_attrs->{alias}
+  );
+
+  # this is so that ordering can be thrown away in things like Top limit
+  $sub_attrs->{-for_count_only} = 1;
+
+  my $sub_rs = $rsrc->resultset_class->new ($rsrc, $sub_attrs);
+
   $attrs->{from} = [{
-    count_subq => $rsrc->resultset_class->new ($rsrc, $sub_attrs )->as_query
+    -alias => 'count_subq',
+    -source_handle => $rsrc->handle,
+    count_subq => $sub_rs->as_query,
   }];
 
   # the subquery replaces this
@@ -1265,6 +1297,77 @@
 }
 
 
+# The DBIC relationship chaining implementation is pretty simple - every
+# new related_relationship is pushed onto the {from} stack, and the {select}
+# window simply slides further in. This means that when we count somewhere
+# in the middle, we got to make sure that everything in the join chain is an
+# actual inner join, otherwise the count will come back with unpredictable
+# results (a resultset may be generated with _some_ rows regardless of if
+# the relation which the $rs currently selects has rows or not). E.g.
+# $artist_rs->cds->count - normally generates:
+# SELECT COUNT( * ) FROM artist me LEFT JOIN cd cds ON cds.artist = me.artistid
+# which actually returns the number of artists * (number of cds || 1)
+#
+# So what we do here is crawl {from}, determine if the current alias is at
+# the top of the stack, and if not - make sure the chain is inner-joined down
+# to the root.
+#
+sub _switch_to_inner_join_if_needed {
+  my ($self, $from, $alias) = @_;
+
+  return $from if (
+    ref $from ne 'ARRAY'
+      ||
+    ref $from->[0] ne 'HASH'
+      ||
+    ! $from->[0]{-alias}
+      ||
+    $from->[0]{-alias} eq $alias
+  );
+
+  # this would be the case with a subquery - we'll never find
+  # the target as it is not in the parseable part of {from}
+  return $from if @$from == 1;
+
+  my $switch_branch;
+  JOINSCAN:
+  for my $j (@{$from}[1 .. $#$from]) {
+    if ($j->[0]{-alias} eq $alias) {
+      $switch_branch = $j->[0]{-join_path};
+      last JOINSCAN;
+    }
+  }
+
+  # something else went wrong
+  return $from unless $switch_branch;
+
+  # So it looks like we will have to switch some stuff around.
+  # local() is useless here as we will be leaving the scope
+  # anyway, and deep cloning is just too fucking expensive
+  # So replace the inner hashref manually
+  my @new_from = ($from->[0]);
+  my $sw_idx = { map { $_ => 1 } @$switch_branch };
+
+  for my $j (@{$from}[1 .. $#$from]) {
+    my $jalias = $j->[0]{-alias};
+
+    if ($sw_idx->{$jalias}) {
+      my %attrs = %{$j->[0]};
+      delete $attrs{-join_type};
+      push @new_from, [
+        \%attrs,
+        @{$j}[ 1 .. $#$j ],
+      ];
+    }
+    else {
+      push @new_from, $j;
+    }
+  }
+
+  return \@new_from;
+}
+
+
 sub _bool {
   return 1;
 }
@@ -1311,13 +1414,12 @@
 
   my @obj;
 
-  # TODO: don't call resolve here
   if (keys %{$self->_resolved_attrs->{collapse}}) {
-#  if ($self->{attrs}{prefetch}) {
-      # Using $self->cursor->all is really just an optimisation.
-      # If we're collapsing has_many prefetches it probably makes
-      # very little difference, and this is cleaner than hacking
-      # _construct_object to survive the approach
+    # Using $self->cursor->all is really just an optimisation.
+    # If we're collapsing has_many prefetches it probably makes
+    # very little difference, and this is cleaner than hacking
+    # _construct_object to survive the approach
+    $self->cursor->reset;
     my @row = $self->cursor->next;
     while (@row) {
       push(@obj, $self->_construct_object(@row));
@@ -1330,6 +1432,7 @@
   }
 
   $self->set_cache(\@obj) if $self->{attrs}{cache};
+
   return @obj;
 }
 
@@ -1344,6 +1447,8 @@
 =back
 
 Resets the resultset's cursor, so you can iterate through the elements again.
+Implicitly resets the storage cursor, so a subsequent L</next> will trigger
+another query.
 
 =cut
 
@@ -1925,16 +2030,25 @@
 # of the attributes supplied
 #
 # used to determine if a subquery is neccessary
+#
+# supports some virtual attributes:
+#   -join
+#     This will scan for any joins being present on the resultset.
+#     It is not a mere key-search but a deep inspection of {from}
+#
 
 sub _has_resolved_attr {
   my ($self, @attr_names) = @_;
 
   my $attrs = $self->_resolved_attrs;
 
-  my $join_check_req;
+  my %extra_checks;
 
   for my $n (@attr_names) {
-    ++$join_check_req if $n eq '-join';
+    if (grep { $n eq $_ } (qw/-join/) ) {
+      $extra_checks{$n}++;
+      next;
+    }
 
     my $attr =  $attrs->{$n};
 
@@ -1953,7 +2067,7 @@
 
   # a resolved join is expressed as a multi-level from
   return 1 if (
-    $join_check_req
+    $extra_checks{-join}
       and
     ref $attrs->{from} eq 'ARRAY'
       and
@@ -2119,13 +2233,16 @@
 can also be passed an object representing the foreign row, and the
 value will be set to its primary key.
 
-To create related objects, pass a hashref for the value if the related
-item is a foreign key relationship (L<DBIx::Class::Relationship/belongs_to>),
-and use the name of the relationship as the key. (NOT the name of the field,
-necessarily). For C<has_many> and C<has_one> relationships, pass an arrayref
-of hashrefs containing the data for each of the rows to create in the foreign
-tables, again using the relationship name as the key.
+To create related objects, pass a hashref of related-object column values
+B<keyed on the relationship name>. If the relationship is of type C<multi>
+(L<DBIx::Class::Relationship/has_many>) - pass an arrayref of hashrefs.
+The process will correctly identify columns holding foreign keys, and will
+transparrently populate them from the keys of the corresponding relation.
+This can be applied recursively, and will work correctly for a structure
+with an arbitrary depth and width, as long as the relationships actually
+exists and the correct column data has been supplied.
 
+
 Instead of hashrefs of plain related data (key/value pairs), you may
 also pass new or inserted objects. New objects (not inserted yet, see
 L</new>), will be inserted into their appropriate tables.
@@ -2450,7 +2567,7 @@
         "' has no such relationship $rel")
       unless $rel_info;
 
-    my ($from,$seen) = $self->_resolve_from($rel);
+    my ($from,$seen) = $self->_chain_relationship($rel);
 
     my $join_count = $seen->{$rel};
     my $alias = ($join_count > 1 ? join('_', $rel, $join_count) : $rel);
@@ -2548,7 +2665,12 @@
 # in order to properly resolve prefetch aliases (any alias
 # with a relation_chain_depth less than the depth of the
 # current prefetch is not considered)
-sub _resolve_from {
+#
+# The increments happen in 1/2s to make it easier to correlate the
+# join depth with the join path. An integer means a relationship
+# specified via a search_related, whereas a fraction means an added
+# join/prefetch via attributes
+sub _chain_relationship {
   my ($self, $rel) = @_;
   my $source = $self->result_source;
   my $attrs = $self->{attrs};
@@ -2564,19 +2686,63 @@
   }];
 
   my $seen = { %{$attrs->{seen_join} || {} } };
+  my $jpath = ($attrs->{seen_join} && keys %{$attrs->{seen_join}}) 
+    ? $from->[-1][0]{-join_path} 
+    : [];
 
+
   # we need to take the prefetch the attrs into account before we
   # ->_resolve_join as otherwise they get lost - captainL
   my $merged = $self->_merge_attr( $attrs->{join}, $attrs->{prefetch} );
 
-  push @$from, $source->_resolve_join($merged, $attrs->{alias}, $seen) if ($merged);
+  my @requested_joins = $source->_resolve_join(
+    $merged,
+    $attrs->{alias},
+    $seen,
+    $jpath,
+  );
 
-  ++$seen->{-relation_chain_depth};
+  push @$from, @requested_joins;
 
-  push @$from, $source->_resolve_join($rel, $attrs->{alias}, $seen);
+  $seen->{-relation_chain_depth} += 0.5;
 
-  ++$seen->{-relation_chain_depth};
+  # if $self already had a join/prefetch specified on it, the requested
+  # $rel might very well be already included. What we do in this case
+  # is effectively a no-op (except that we bump up the chain_depth on
+  # the join in question so we could tell it *is* the search_related)
+  my $already_joined;
 
+
+  # we consider the last one thus reverse
+  for my $j (reverse @requested_joins) {
+    if ($rel eq $j->[0]{-join_path}[-1]) {
+      $j->[0]{-relation_chain_depth} += 0.5;
+      $already_joined++;
+      last;
+    }
+  }
+
+# alternative way to scan the entire chain - not backwards compatible
+#  for my $j (reverse @$from) {
+#    next unless ref $j eq 'ARRAY';
+#    if ($j->[0]{-join_path} && $j->[0]{-join_path}[-1] eq $rel) {
+#      $j->[0]{-relation_chain_depth} += 0.5;
+#      $already_joined++;
+#      last;
+#    }
+#  }
+
+  unless ($already_joined) {
+    push @$from, $source->_resolve_join(
+      $rel,
+      $attrs->{alias},
+      $seen,
+      $jpath,
+    );
+  }
+
+  $seen->{-relation_chain_depth} += 0.5;
+
   return ($from,$seen);
 }
 
@@ -2687,30 +2853,29 @@
       [
         @{ $attrs->{from} },
         $source->_resolve_join(
-          $join, $alias, { %{ $attrs->{seen_join} || {} } }
+          $join,
+          $alias,
+          { %{ $attrs->{seen_join} || {} } },
+          ($attrs->{seen_join} && keys %{$attrs->{seen_join}})
+            ? $attrs->{from}[-1][0]{-join_path}
+            : []
+          ,
         )
       ];
   }
 
-  if ( $attrs->{order_by} ) {
+  if ( defined $attrs->{order_by} ) {
     $attrs->{order_by} = (
       ref( $attrs->{order_by} ) eq 'ARRAY'
       ? [ @{ $attrs->{order_by} } ]
-      : [ $attrs->{order_by} ]
+      : [ $attrs->{order_by} || () ]
     );
   }
-  else {
-    $attrs->{order_by} = [];
+
+  if ($attrs->{group_by} and ! ref $attrs->{group_by}) {
+    $attrs->{group_by} = [ $attrs->{group_by} ];
   }
 
-  # If the order_by is otherwise empty - we will use this for TOP limit
-  # emulation and the like.
-  # Although this is needed only if the order_by is not defined, it is
-  # actually cheaper to just populate this rather than properly examining
-  # order_by (stuf like [ {} ] and the like)
-  $attrs->{_virtual_order_by} = [ $self->result_source->primary_columns ];
-
-
   $attrs->{collapse} ||= {};
   if ( my $prefetch = delete $attrs->{prefetch} ) {
     $prefetch = $self->_merge_attr( {}, $prefetch );
@@ -2722,10 +2887,11 @@
     my @prefetch =
       $source->_resolve_prefetch( $prefetch, $alias, $join_map, $prefetch_ordering, $attrs->{collapse} );
 
-    push( @{ $attrs->{select} }, map { $_->[0] } @prefetch );
-    push( @{ $attrs->{as} },     map { $_->[1] } @prefetch );
+    $attrs->{prefetch_select} = [ map { $_->[0] } @prefetch ];
+    push @{ $attrs->{select} }, @{$attrs->{prefetch_select}};
+    push @{ $attrs->{as} }, (map { $_->[1] } @prefetch);
 
-    push( @{ $attrs->{order_by} }, @$prefetch_ordering );
+    push( @{$attrs->{order_by}}, @$prefetch_ordering );
     $attrs->{_collapse_order_by} = \@$prefetch_ordering;
   }
 
@@ -2738,8 +2904,11 @@
   # even though it doesn't make much sense, this is what pre 081xx has
   # been doing
   if (my $page = delete $attrs->{page}) {
-    $attrs->{offset} = ($attrs->{rows} * ($page - 1)) +
-      ($attrs->{offset} || 0);
+    $attrs->{offset} = 
+      ($attrs->{rows} * ($page - 1))
+            +
+      ($attrs->{offset} || 0)
+    ;
   }
 
   return $self->{_attrs} = $attrs;
@@ -2751,13 +2920,21 @@
   my $paths = {};
   return $paths unless ref $fromspec eq 'ARRAY';
 
+  my $cur_depth = $seen->{-relation_chain_depth} || 0;
+
+  if (int ($cur_depth) != $cur_depth) {
+    $self->throw_exception ("-relation_chain_depth is not an integer, something went horribly wrong ($cur_depth)");
+  }
+
   for my $j (@$fromspec) {
 
     next if ref $j ne 'ARRAY';
-    next if $j->[0]{-relation_chain_depth} < ( $seen->{-relation_chain_depth} || 0);
+    next if $j->[0]{-relation_chain_depth} < $cur_depth;
 
+    my $jpath = $j->[0]{-join_path};
+
     my $p = $paths;
-    $p = $p->{$_} ||= {} for @{$j->[0]{-join_path}};
+    $p = $p->{$_} ||= {} for @{$jpath}[$cur_depth .. $#$jpath];
     push @{$p->{-join_aliases} }, $j->[0]{-alias};
   }
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSetColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSetColumn.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSetColumn.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -39,27 +39,48 @@
 
   $rs->throw_exception("column must be supplied") unless $column;
 
-  my $new_parent_rs = $rs->search_rs; # we don't want to mess up the original, so clone it
+  my $orig_attrs = $rs->_resolved_attrs;
+  my $new_parent_rs = $rs->search_rs;
 
   # prefetch causes additional columns to be fetched, but we can not just make a new
   # rs via the _resolved_attrs trick - we need to retain the separation between
   # +select/+as and select/as. At the same time we want to preserve any joins that the
   # prefetch would otherwise generate.
-  my $init_attrs = $new_parent_rs->{attrs} ||= {};
-  delete $init_attrs->{collapse};
-  $init_attrs->{join} = $rs->_merge_attr( delete $init_attrs->{join}, delete $init_attrs->{prefetch} );
 
+  my $new_attrs = $new_parent_rs->{attrs} ||= {};
+  $new_attrs->{join} = $rs->_merge_attr( delete $new_attrs->{join}, delete $new_attrs->{prefetch} );
+
   # If $column can be found in the 'as' list of the parent resultset, use the
   # corresponding element of its 'select' list (to keep any custom column
   # definition set up with 'select' or '+select' attrs), otherwise use $column
   # (to create a new column definition on-the-fly).
-  my $attrs = $new_parent_rs->_resolved_attrs;
 
-  my $as_list = $attrs->{as} || [];
-  my $select_list = $attrs->{select} || [];
+  my $as_list = $orig_attrs->{as} || [];
+  my $select_list = $orig_attrs->{select} || [];
   my $as_index = List::Util::first { ($as_list->[$_] || "") eq $column } 0..$#$as_list;
   my $select = defined $as_index ? $select_list->[$as_index] : $column;
 
+  # {collapse} would mean a has_many join was injected, which in turn means
+  # we need to group IF WE CAN (only if the column in question is unique)
+  if (!$new_attrs->{group_by} && keys %{$orig_attrs->{collapse}}) {
+
+    # scan for a constraint that would contain our column only - that'd be proof
+    # enough it is unique
+    my $constraints = { $rs->result_source->unique_constraints };
+    for my $constraint_columns ( values %$constraints ) {
+
+      next unless @$constraint_columns == 1;
+
+      my $col = $constraint_columns->[0];
+      my $fqcol = join ('.', $new_attrs->{alias}, $col);
+
+      if ($col eq $select or $fqcol eq $select) {
+        $new_attrs->{group_by} = [ $select ];
+        last;
+      }
+    }
+  }
+
   my $new = bless { _select => $select, _as => $column, _parent_resultset => $new_parent_rs }, $class;
   return $new;
 }

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSource.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSource.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/ResultSource.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -896,7 +896,7 @@
   }
   return unless $f_source; # Can't test rel without f_source
 
-  eval { $self->_resolve_join($rel, 'me') };
+  eval { $self->_resolve_join($rel, 'me', {}, []) };
 
   if ($@) { # If the resolve failed, back out and re-throw the error
     delete $rels{$rel}; #
@@ -1090,19 +1090,17 @@
 
   # we need a supplied one, because we do in-place modifications, no returns
   $self->throw_exception ('You must supply a seen hashref as the 3rd argument to _resolve_join')
-    unless $seen;
+    unless ref $seen eq 'HASH';
 
-  # This isn't quite right, we should actually dive into $seen and reconstruct
-  # the entire path (the reference entry point would be the join conditional
-  # with depth == current_depth - 1. At this point however nothing depends on
-  # having the entire path, transcending related_resultset, so just leave it
-  # as is, hairy enough already.
-  $jpath ||= [];
+  $self->throw_exception ('You must supply a joinpath arrayref as the 4th argument to _resolve_join')
+    unless ref $jpath eq 'ARRAY';
 
+  $jpath = [@$jpath];
+
   if (ref $join eq 'ARRAY') {
     return
       map {
-        $self->_resolve_join($_, $alias, $seen, [@$jpath], $force_left);
+        $self->_resolve_join($_, $alias, $seen, $jpath, $force_left);
       } @$join;
   } elsif (ref $join eq 'HASH') {
     return
@@ -1120,6 +1118,8 @@
     $self->throw_exception("No idea how to resolve join reftype ".ref $join);
   } else {
 
+    return() unless defined $join;
+
     my $count = ++$seen->{$join};
     my $as = ($count > 1 ? "${join}_${count}" : $join);
 
@@ -1196,7 +1196,6 @@
 
 sub _resolve_condition {
   my ($self, $cond, $as, $for) = @_;
-  #warn %$cond;
   if (ref $cond eq 'HASH') {
     my %ret;
     foreach my $k (keys %{$cond}) {
@@ -1237,7 +1236,7 @@
   } elsif (ref $cond eq 'ARRAY') {
     return [ map { $self->_resolve_condition($_, $as, $for) } @$cond ];
   } else {
-   die("Can't handle this yet :(");
+   die("Can't handle condition $cond yet :(");
   }
 }
 
@@ -1342,15 +1341,14 @@
       "don't know how to resolve prefetch reftype ".ref($pre));
   }
   else {
-
     my $p = $alias_map;
     $p = $p->{$_} for (@$pref_path, $pre);
 
     $self->throw_exception (
-      "Unable to resolve prefetch $pre - join alias map does not contain an entry for path "
+      "Unable to resolve prefetch $pre - join alias map does not contain an entry for path: "
       . join (' -> ', @$pref_path, $pre)
     ) if (ref $p->{-join_aliases} ne 'ARRAY' or not @{$p->{-join_aliases}} );
-    
+
     my $as = shift @{$p->{-join_aliases}};
 
     my $rel_info = $self->relationship_info( $pre );

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Row.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Row.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Row.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -162,7 +162,7 @@
   if ($attrs) {
     $new->throw_exception("attrs must be a hashref")
       unless ref($attrs) eq 'HASH';
-    
+
     my ($related,$inflated);
     ## Pretend all the rels are actual objects, unset below if not, for insert() to fix
     $new->{_rel_in_storage} = 1;
@@ -235,7 +235,7 @@
       }
       $new->throw_exception("No such column $key on $class")
         unless $class->has_column($key);
-      $new->store_column($key => $attrs->{$key});          
+      $new->store_column($key => $attrs->{$key});
     }
 
     $new->{_relationship_data} = $related if $related;
@@ -283,7 +283,7 @@
   my $rollback_guard;
 
   # Check if we stored uninserted relobjs here in new()
-  my %related_stuff = (%{$self->{_relationship_data} || {}}, 
+  my %related_stuff = (%{$self->{_relationship_data} || {}},
                        %{$self->{_inflated_column} || {}});
 
   if(!$self->{_rel_in_storage}) {
@@ -332,7 +332,7 @@
 
   ## PK::Auto
   my @auto_pri = grep {
-                   !defined $self->get_column($_) || 
+                   !defined $self->get_column($_) ||
                    ref($self->get_column($_)) eq 'SCALAR'
                  } $self->primary_columns;
 
@@ -413,7 +413,7 @@
 Indicates whether the object exists as a row in the database or
 not. This is set to true when L<DBIx::Class::ResultSet/find>,
 L<DBIx::Class::ResultSet/create> or L<DBIx::Class::ResultSet/insert>
-are used. 
+are used.
 
 Creating a row object using L<DBIx::Class::ResultSet/new>, or calling
 L</delete> on one, sets it to false.
@@ -519,14 +519,14 @@
 
 The object is still perfectly usable, but L</in_storage> will
 now return 0 and the object must be reinserted using L</insert>
-before it can be used to L</update> the row again. 
+before it can be used to L</update> the row again.
 
 If you delete an object in a class with a C<has_many> relationship, an
 attempt is made to delete all the related objects as well. To turn
 this behaviour off, pass C<< cascade_delete => 0 >> in the C<$attr>
 hashref of the relationship, see L<DBIx::Class::Relationship>. Any
 database-level cascade or restrict will take precedence over a
-DBIx-Class-based cascading delete. 
+DBIx-Class-based cascading delete.
 
 If you delete an object within a txn_do() (see L<DBIx::Class::Storage/txn_do>)
 and the transaction subsequently fails, the row object will remain marked as
@@ -600,7 +600,7 @@
   return $self->{_column_data}{$column} if exists $self->{_column_data}{$column};
   if (exists $self->{_inflated_column}{$column}) {
     return $self->store_column($column,
-      $self->_deflated_column($column, $self->{_inflated_column}{$column}));   
+      $self->_deflated_column($column, $self->{_inflated_column}{$column}));
   }
   $self->throw_exception( "No such column '${column}'" ) unless $self->has_column($column);
   return undef;
@@ -702,7 +702,7 @@
 Throws an exception if the column does not exist.
 
 Marks a column as having been changed regardless of whether it has
-really changed.  
+really changed.
 
 =cut
 sub make_column_dirty {
@@ -711,7 +711,7 @@
   $self->throw_exception( "No such column '${column}'" )
     unless exists $self->{_column_data}{$column} || $self->has_column($column);
 
-  # the entire clean/dirty code relieas on exists, not on true/false
+  # the entire clean/dirty code relies on exists, not on true/false
   return 1 if exists $self->{_dirty_columns}{$column};
 
   $self->{_dirty_columns}{$column} = 1;
@@ -787,9 +787,12 @@
   $self->store_column($column, $new_value);
 
   my $dirty;
-  if (defined $old_value xor defined $new_value) {
+  if (!$self->in_storage) { # no point tracking dirtyness on uninserted data
     $dirty = 1;
   }
+  elsif (defined $old_value xor defined $new_value) {
+    $dirty = 1;
+  }
   elsif (not defined $old_value) {  # both undef
     $dirty = 0;
   }
@@ -799,8 +802,8 @@
   else {  # do a numeric comparison if datatype allows it
     my $colinfo = $self->column_info ($column);
 
-    # cache for speed
-    if (not defined $colinfo->{is_numeric}) {
+    # cache for speed (the object may *not* have a resultsource instance)
+    if (not defined $colinfo->{is_numeric} && $self->_source_handle) {
       $colinfo->{is_numeric} =
         $self->result_source->schema->storage->is_datatype_numeric ($colinfo->{data_type})
           ? 1
@@ -829,7 +832,7 @@
 
   $row->set_columns({ $col => $val, ... });
 
-=over 
+=over
 
 =item Arguments: \%columndata
 
@@ -864,7 +867,7 @@
 =back
 
 Sets more than one column value at once. Any inflated values are
-deflated and the raw values stored. 
+deflated and the raw values stored.
 
 Any related values passed as Row objects, using the relation name as a
 key, are reduced to the appropriate foreign key values and stored. If
@@ -908,7 +911,7 @@
       }
     }
   }
-  $self->set_columns($upd);    
+  $self->set_columns($upd);
 }
 
 =head2 copy
@@ -954,7 +957,7 @@
   $new->set_inflated_columns($changes);
   $new->insert;
 
-  # Its possible we'll have 2 relations to the same Source. We need to make 
+  # Its possible we'll have 2 relations to the same Source. We need to make
   # sure we don't try to insert the same row twice esle we'll violate unique
   # constraints
   my $rels_copied = {};
@@ -963,7 +966,7 @@
     my $rel_info = $self->result_source->relationship_info($rel);
 
     next unless $rel_info->{attrs}{cascade_copy};
-  
+
     my $resolved = $self->result_source->_resolve_condition(
       $rel_info->{cond}, $rel, $new
     );
@@ -975,7 +978,7 @@
       $copied->{$id_str} = 1;
       my $rel_copy = $related->copy($resolved);
     }
- 
+
   }
   return $new;
 }
@@ -1050,7 +1053,6 @@
   my $new = {
     _source_handle => $source_handle,
     _column_data => $me,
-    _in_storage => 1
   };
   bless $new, (ref $class || $class);
 
@@ -1062,14 +1064,25 @@
       unless $pre_source;
     if (ref($pre_val->[0]) eq 'ARRAY') { # multi
       my @pre_objects;
-      foreach my $pre_rec (@$pre_val) {
-        unless ($pre_source->primary_columns == grep { exists $pre_rec->[0]{$_}
-           and defined $pre_rec->[0]{$_} } $pre_source->primary_columns) {
-          next;
+
+      for my $me_pref (@$pre_val) {
+
+        # the collapser currently *could* return bogus elements with all
+        # columns set to undef
+        my $has_def;
+        for (values %{$me_pref->[0]}) {
+          if (defined $_) {
+            $has_def++;
+            last;
+          }
         }
-        push(@pre_objects, $pre_source->result_class->inflate_result(
-                             $pre_source, @{$pre_rec}));
+        next unless $has_def;
+
+        push @pre_objects, $pre_source->result_class->inflate_result(
+          $pre_source, @$me_pref
+        );
       }
+
       $new->related_resultset($pre)->set_cache(\@pre_objects);
     } elsif (defined $pre_val->[0]) {
       my $fetched;
@@ -1087,11 +1100,13 @@
       } elsif ($accessor eq 'filter') {
         $new->{_inflated_column}{$pre} = $fetched;
       } else {
-       $class->throw_exception("Prefetch not supported with accessor '$accessor'");
+       $class->throw_exception("Implicit prefetch (via select/columns) not supported with accessor '$accessor'");
       }
       $new->related_resultset($pre)->set_cache([ $fetched ]);
     }
   }
+
+  $new->in_storage (1);
   return $new;
 }
 
@@ -1260,11 +1275,11 @@
     my $self = shift @_;
     my $attrs = shift @_;
     my $resultset = $self->result_source->resultset;
-    
+
     if(defined $attrs) {
     	$resultset = $resultset->search(undef, $attrs);
     }
-    
+
     return $resultset->find($self->{_orig_ident} || $self->ident_condition);
 }
 

Added: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks/MSSQL.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks/MSSQL.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,33 @@
+package # Hide from PAUSE
+  DBIx::Class::SQLAHacks::MSSQL;
+
+use base qw( DBIx::Class::SQLAHacks );
+use Carp::Clan qw/^DBIx::Class|^SQL::Abstract/;
+
+#
+# MSSQL is retarded wrt TOP (crappy limit) and ordering.
+# One needs to add a TOP to *all* ordered subqueries, if
+# TOP has been used in the statement at least once.
+# Do it here.
+#
+sub select {
+  my $self = shift;
+
+  my ($sql, @bind) = $self->SUPER::select (@_);
+
+  # ordering was requested and there are at least 2 SELECT/FROM pairs
+  # (thus subquery), and there is no TOP specified
+  if (
+    $sql =~ /\bSELECT\b .+? \bFROM\b .+? \bSELECT\b .+? \bFROM\b/isx
+      &&
+    $sql !~ /^ \s* SELECT \s+ TOP \s+ \d+ /xi
+      &&
+    scalar $self->_order_by_chunks ($_[3]->{order_by})
+  ) {
+    $sql =~ s/^ \s* SELECT \s/SELECT TOP 100 PERCENT /xi;
+  }
+
+  return wantarray ? ($sql, @bind) : $sql;
+}
+
+1;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/SQLAHacks.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,6 +1,10 @@
 package # Hide from PAUSE
   DBIx::Class::SQLAHacks;
 
+# This module is a subclass of SQL::Abstract::Limit and includes a number
+# of DBIC-specific workarounds, not yet suitable for inclusion into the
+# SQLA core
+
 use base qw/SQL::Abstract::Limit/;
 use strict;
 use warnings;
@@ -12,12 +16,13 @@
   no warnings qw/redefine/;
   no strict qw/refs/;
   for my $f (qw/carp croak/) {
+
     my $orig = \&{"SQL::Abstract::$f"};
     *{"SQL::Abstract::$f"} = sub {
 
       local $Carp::CarpLevel = 1;   # even though Carp::Clan ignores this, $orig will not
 
-      if (Carp::longmess() =~ /DBIx::Class::SQLAHacks::[\w]+\(\) called/) {
+      if (Carp::longmess() =~ /DBIx::Class::SQLAHacks::[\w]+ .+? called \s at/x) {
         __PACKAGE__->can($f)->(@_);
       }
       else {
@@ -27,6 +32,9 @@
   }
 }
 
+
+# Tries to determine limit dialect.
+#
 sub new {
   my $self = shift->SUPER::new(@_);
 
@@ -39,14 +47,12 @@
   $self;
 }
 
-
 # Some databases (sqlite) do not handle multiple parenthesis
-# around in/between arguments. A tentative x IN ( ( 1, 2 ,3) )
+# around in/between arguments. A tentative x IN ( (1, 2 ,3) )
 # is interpreted as x IN 1 or something similar.
 #
 # Since we currently do not have access to the SQLA AST, resort
 # to barbaric mutilation of any SQL supplied in literal form
-
 sub _strip_outer_paren {
   my ($self, $arg) = @_;
 
@@ -116,41 +122,194 @@
 sub _Top {
   my ( $self, $sql, $order, $rows, $offset ) = @_;
 
+  # mangle the input sql so it can be properly aliased in the outer queries
+  $sql =~ s/^ \s* SELECT \s+ (.+?) \s+ (?=FROM)//ix
+    or croak "Unrecognizable SELECT: $sql";
+  my $sql_select = $1;
+  my @sql_select = split (/\s*,\s*/, $sql_select);
+
+  # we can't support subqueries (in fact MSSQL can't) - croak
+  if (@sql_select != @{$self->{_dbic_rs_attrs}{select}}) {
+    croak (sprintf (
+      'SQL SELECT did not parse cleanly - retrieved %d comma separated elements, while '
+    . 'the resultset select attribure contains %d elements: %s',
+      scalar @sql_select,
+      scalar @{$self->{_dbic_rs_attrs}{select}},
+      $sql_select,
+    ));
+  }
+
+  my $name_sep = $self->name_sep || '.';
+  my $esc_name_sep = "\Q$name_sep\E";
+  my $col_re = qr/ ^ (?: (.+) $esc_name_sep )? ([^$esc_name_sep]+) $ /x;
+
+  my $rs_alias = $self->{_dbic_rs_attrs}{alias};
+  my $quoted_rs_alias = $self->_quote ($rs_alias);
+
+  # construct the new select lists, rename(alias) some columns if necessary
+  my (@outer_select, @inner_select, %seen_names, %col_aliases, %outer_col_aliases);
+
+  for (@{$self->{_dbic_rs_attrs}{select}}) {
+    next if ref $_;
+    my ($table, $orig_colname) = ( $_ =~ $col_re );
+    next unless $table;
+    $seen_names{$orig_colname}++;
+  }
+
+  for my $i (0 .. $#sql_select) {
+
+    my $colsel_arg = $self->{_dbic_rs_attrs}{select}[$i];
+    my $colsel_sql = $sql_select[$i];
+
+    # this may or may not work (in case of a scalarref or something)
+    my ($table, $orig_colname) = ( $colsel_arg =~ $col_re );
+
+    my $quoted_alias;
+    # do not attempt to understand non-scalar selects - alias numerically
+    if (ref $colsel_arg) {
+      $quoted_alias = $self->_quote ('column_' . (@inner_select + 1) );
+    }
+    # column name seen more than once - alias it
+    elsif ($orig_colname && ($seen_names{$orig_colname} > 1) ) {
+      $quoted_alias = $self->_quote ("${table}__${orig_colname}");
+    }
+
+    # we did rename - make a record and adjust
+    if ($quoted_alias) {
+      # alias inner
+      push @inner_select, "$colsel_sql AS $quoted_alias";
+
+      # push alias to outer
+      push @outer_select, $quoted_alias;
+
+      # Any aliasing accumulated here will be considered
+      # both for inner and outer adjustments of ORDER BY
+      $self->__record_alias (
+        \%col_aliases,
+        $quoted_alias,
+        $colsel_arg,
+        $table ? $orig_colname : undef,
+      );
+    }
+
+    # otherwise just leave things intact inside, and use the abbreviated one outside
+    # (as we do not have table names anymore)
+    else {
+      push @inner_select, $colsel_sql;
+
+      my $outer_quoted = $self->_quote ($orig_colname);  # it was not a duplicate so should just work
+      push @outer_select, $outer_quoted;
+      $self->__record_alias (
+        \%outer_col_aliases,
+        $outer_quoted,
+        $colsel_arg,
+        $table ? $orig_colname : undef,
+      );
+    }
+  }
+
+  my $outer_select = join (', ', @outer_select );
+  my $inner_select = join (', ', @inner_select );
+
+  %outer_col_aliases = (%outer_col_aliases, %col_aliases);
+
+  # deal with order
   croak '$order supplied to SQLAHacks limit emulators must be a hash'
     if (ref $order ne 'HASH');
 
   $order = { %$order }; #copy
 
-  my $last = $rows + $offset;
+  my $req_order = $order->{order_by};
 
-  my $req_order = $self->_order_by ($order->{order_by});
+  # examine normalized version, collapses nesting
+  my $limit_order;
+  if (scalar $self->_order_by_chunks ($req_order)) {
+    $limit_order = $req_order;
+  }
+  else {
+    $limit_order = [ map
+      { join ('', $rs_alias, $name_sep, $_ ) }
+      ( $self->{_dbic_rs_attrs}{_source_handle}->resolve->primary_columns )
+    ];
+  }
 
-  my $limit_order = $req_order ? $order->{order_by} : $order->{_virtual_order_by};
+  my ( $order_by_inner, $order_by_outer ) = $self->_order_directions($limit_order);
+  my $order_by_requested = $self->_order_by ($req_order);
 
-  delete $order->{$_} for qw/order_by _virtual_order_by/;
+  # generate the rest
+  delete $order->{order_by};
   my $grpby_having = $self->_order_by ($order);
 
-  my ( $order_by_inner, $order_by_outer ) = $self->_order_directions($limit_order);
+  # short circuit for counts - the ordering complexity is needless
+  if ($self->{_dbic_rs_attrs}{-for_count_only}) {
+    return "SELECT TOP $rows $inner_select $sql $grpby_having $order_by_outer";
+  }
 
-  $sql =~ s/^\s*(SELECT|select)//;
+  # we can't really adjust the order_by columns, as introspection is lacking
+  # resort to simple substitution
+  for my $col (keys %outer_col_aliases) {
+    for ($order_by_requested, $order_by_outer) {
+      $_ =~ s/\s+$col\s+/ $outer_col_aliases{$col} /g;
+    }
+  }
+  for my $col (keys %col_aliases) {
+    $order_by_inner =~ s/\s+$col\s+/ $col_aliases{$col} /g;
+  }
 
-  $sql = <<"SQL";
-  SELECT * FROM
-  (
-    SELECT TOP $rows * FROM
+
+  my $inner_lim = $rows + $offset;
+
+  $sql = "SELECT TOP $inner_lim $inner_select $sql $grpby_having $order_by_inner";
+
+  if ($offset) {
+    $sql = <<"SQL";
+
+    SELECT TOP $rows $outer_select FROM
     (
-        SELECT TOP $last $sql $grpby_having $order_by_inner
-    ) AS foo
+      $sql
+    ) $quoted_rs_alias
     $order_by_outer
-  ) AS bar
-  $req_order
+SQL
 
+  }
+
+  if ($order_by_requested) {
+    $sql = <<"SQL";
+
+    SELECT $outer_select FROM
+      ( $sql ) $quoted_rs_alias
+    $order_by_requested
 SQL
-    return $sql;
+
+  }
+
+  $sql =~ s/\s*\n\s*/ /g; # parsing out multiline statements is harder than a single line
+  return $sql;
 }
 
+# action at a distance to shorten Top code above
+sub __record_alias {
+  my ($self, $register, $alias, $fqcol, $col) = @_;
 
+  # record qualified name
+  $register->{$fqcol} = $alias;
+  $register->{$self->_quote($fqcol)} = $alias;
 
+  return unless $col;
+
+  # record unqualified name, undef (no adjustment) if a duplicate is found
+  if (exists $register->{$col}) {
+    $register->{$col} = undef;
+  }
+  else {
+    $register->{$col} = $alias;
+  }
+
+  $register->{$self->_quote($col)} = $register->{$col};
+}
+
+
+
 # While we're at it, this should make LIMIT queries more efficient,
 #  without digging into things too deeply
 sub _find_syntax {
@@ -158,6 +317,12 @@
   return $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax);
 }
 
+my $for_syntax = {
+  update => 'FOR UPDATE',
+  shared => 'FOR SHARE',
+};
+# Quotes table names, handles "limit" dialects (e.g. where rownum between x and
+# y), supports SELECT ... FOR UPDATE and SELECT ... FOR SHARE.
 sub select {
   my ($self, $table, $fields, $where, $order, @rest) = @_;
 
@@ -175,18 +340,14 @@
   my ($sql, @where_bind) = $self->SUPER::select(
     $table, $self->_recurse_fields($fields), $where, $order, @rest
   );
-  $sql .= 
-    $self->{for} ?
-    (
-      $self->{for} eq 'update' ? ' FOR UPDATE' :
-      $self->{for} eq 'shared' ? ' FOR SHARE'  :
-      ''
-    ) :
-    ''
-  ;
+  if (my $for = delete $self->{_dbic_rs_attrs}{for}) {
+    $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
+  }
+
   return wantarray ? ($sql, @{$self->{from_bind}}, @where_bind, @{$self->{having_bind}}, @{$self->{order_bind}} ) : $sql;
 }
 
+# Quotes table names, and handles default inserts
 sub insert {
   my $self = shift;
   my $table = shift;
@@ -202,6 +363,7 @@
   $self->SUPER::insert($table, @_);
 }
 
+# Just quotes table names.
 sub update {
   my $self = shift;
   my $table = shift;
@@ -209,6 +371,7 @@
   $self->SUPER::update($table, @_);
 }
 
+# Just quotes table names.
 sub delete {
   my $self = shift;
   my $table = shift;
@@ -238,28 +401,39 @@
           ? ' AS col'.$self->{rownum_hack_count}++
           : '')
       } @$fields);
-  } elsif ($ref eq 'HASH') {
-    foreach my $func (keys %$fields) {
-      if ($func eq 'distinct') {
-        my $_fields = $fields->{$func};
-        if (ref $_fields eq 'ARRAY' && @{$_fields} > 1) {
-          croak (
-            'The select => { distinct => ... } syntax is not supported for multiple columns.'
-           .' Instead please use { group_by => [ qw/' . (join ' ', @$_fields) . '/ ] }'
-           .' or { select => [ qw/' . (join ' ', @$_fields) . '/ ], distinct => 1 }'
-          );
-        }
-        else {
-          $_fields = @{$_fields}[0] if ref $_fields eq 'ARRAY';
-          carp (
-            'The select => { distinct => ... } syntax will be deprecated in DBIC version 0.09,'
-           ." please use { group_by => '${_fields}' } or { select => '${_fields}', distinct => 1 }"
-          );
-        }
+  }
+  elsif ($ref eq 'HASH') {
+    my %hash = %$fields;
+    my ($select, $as);
+
+    if ($hash{-select}) {
+      $select = $self->_recurse_fields (delete $hash{-select});
+      $as = $self->_quote (delete $hash{-as});
+    }
+    else {
+      my ($func, $args) = each %hash;
+      delete $hash{$func};
+
+      if (lc ($func) eq 'distinct' && ref $args eq 'ARRAY' && @$args > 1) {
+        croak (
+          'The select => { distinct => ... } syntax is not supported for multiple columns.'
+         .' Instead please use { group_by => [ qw/' . (join ' ', @$args) . '/ ] }'
+         .' or { select => [ qw/' . (join ' ', @$args) . '/ ], distinct => 1 }'
+        );
       }
-      return $self->_sqlcase($func)
-        .'( '.$self->_recurse_fields($fields->{$func}).' )';
+      $select = sprintf ('%s( %s )',
+        $self->_sqlcase($func),
+        $self->_recurse_fields($args)
+      );
     }
+
+    # there should be nothing left
+    if (keys %hash) {
+      croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
+    }
+
+    $select .= " AS $as" if $as;
+    return $select;
   }
   # Is the second check absolutely necessary?
   elsif ( $ref eq 'REF' and ref($$fields) eq 'ARRAY' ) {
@@ -277,9 +451,8 @@
 
     my $ret = '';
 
-    if (defined $arg->{group_by}) {
-      $ret = $self->_sqlcase(' group by ')
-        .$self->_recurse_fields($arg->{group_by}, { no_rownum_hack => 1 });
+    if (my $g = $self->_recurse_fields($arg->{group_by}, { no_rownum_hack => 1 }) ) {
+      $ret = $self->_sqlcase(' group by ') . $g;
     }
 
     if (defined $arg->{having}) {
@@ -428,12 +601,15 @@
     return $self->{limit_dialect};
 }
 
+# Set to an array-ref to specify separate left and right quotes for table names.
+# A single scalar is equivalen to [ $char, $char ]
 sub quote_char {
     my $self = shift;
     $self->{quote_char} = shift if @_;
     return $self->{quote_char};
 }
 
+# Character separating quoted table names.
 sub name_sep {
     my $self = shift;
     $self->{name_sep} = shift if @_;
@@ -441,50 +617,3 @@
 }
 
 1;
-
-__END__
-
-=pod
-
-=head1 NAME
-
-DBIx::Class::SQLAHacks - This module is a subclass of SQL::Abstract::Limit
-and includes a number of DBIC-specific workarounds, not yet suitable for
-inclusion into SQLA proper.
-
-=head1 METHODS
-
-=head2 new
-
-Tries to determine limit dialect.
-
-=head2 select
-
-Quotes table names, handles "limit" dialects (e.g. where rownum between x and
-y), supports SELECT ... FOR UPDATE and SELECT ... FOR SHARE.
-
-=head2 insert update delete
-
-Just quotes table names.
-
-=head2 limit_dialect
-
-Specifies the dialect of used for implementing an SQL "limit" clause for
-restricting the number of query results returned.  Valid values are: RowNum.
-
-See L<DBIx::Class::Storage::DBI/connect_info> for details.
-
-=head2 name_sep
-
-Character separating quoted table names.
-
-See L<DBIx::Class::Storage::DBI/connect_info> for details.
-
-=head2 quote_char
-
-Set to an array-ref to specify separate left and right quotes for table names.
-
-See L<DBIx::Class::Storage::DBI/connect_info> for details.
-
-=cut
-

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Schema.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Schema.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Schema.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,9 +7,8 @@
 use Carp::Clan qw/^DBIx::Class/;
 use Scalar::Util qw/weaken/;
 use File::Spec;
-use MRO::Compat;
 use Sub::Name ();
-require Module::Find;
+use Module::Find();
 
 use base qw/DBIx::Class/;
 

Added: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,44 @@
+package DBIx::Class::Storage::DBI::AmbiguousGlob;
+
+use strict;
+use warnings;
+
+use base 'DBIx::Class::Storage::DBI';
+use mro 'c3';
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::AmbiguousGlob - Storage component for RDBMS supporting multicolumn in clauses
+
+=head1 DESCRIPTION
+
+Some servers choke on things like:
+
+  COUNT(*) FROM (SELECT tab1.col, tab2.col FROM tab1 JOIN tab2 ... )
+
+claiming that col is a duplicate column (it loses the table specifiers by
+the time it gets to the *). Thus for any subquery count we select only the
+primary keys of the main table in the inner query. This hopefully still
+hits the indexes and keeps the server happy.
+
+At this point the only overriden method is C<_subq_count_select()>
+
+=cut
+
+sub _subq_count_select {
+  my ($self, $source, $rs_attrs) = @_;
+  my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
+  return @pcols ? \@pcols : [ 1 ];
+}
+
+=head1 AUTHORS
+
+See L<DBIx::Class/CONTRIBUTORS>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
+
+1;


Property changes on: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Cursor.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Cursor.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Cursor.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,10 +1,10 @@
 package DBIx::Class::Storage::DBI::Cursor;
 
-use base qw/DBIx::Class::Cursor/;
-
 use strict;
 use warnings;
 
+use base qw/DBIx::Class::Cursor/;
+
 =head1 NAME
 
 DBIx::Class::Storage::DBI::Cursor - Object representing a query cursor on a
@@ -68,7 +68,11 @@
   my ($storage, $dbh, $self) = @_;
 
   $self->_check_dbh_gen;
-  if ($self->{attrs}{rows} && $self->{pos} >= $self->{attrs}{rows}) {
+  if (
+    $self->{attrs}{software_limit}
+      && $self->{attrs}{rows}
+        && $self->{pos} >= $self->{attrs}{rows}
+  ) {
     $self->{sth}->finish if $self->{sth}->{Active};
     delete $self->{sth};
     $self->{done} = 1;
@@ -128,6 +132,7 @@
         && ($self->{attrs}{offset} || $self->{attrs}{rows})) {
     return $self->next::method;
   }
+
   $self->{storage}->dbh_do($self->can('_dbh_all'), $self);
 }
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/DB2.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/DB2.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/DB2.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,9 +4,8 @@
 use warnings;
 
 use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
 
-# __PACKAGE__->load_components(qw/PK::Auto/);
-
 sub _dbh_last_insert_id {
     my ($self, $dbh, $source, $col) = @_;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,8 +3,11 @@
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Storage::DBI/;
+use base qw/DBIx::Class::Storage::DBI::AmbiguousGlob DBIx::Class::Storage::DBI/;
+use mro 'c3';
 
+__PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::MSSQL');
+
 sub _dbh_last_insert_id {
   my ($self, $dbh, $source, $col) = @_;
   my ($id) = $dbh->selectrow_array('SELECT SCOPE_IDENTITY()');

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MultiColumnIn.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MultiColumnIn.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/MultiColumnIn.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,7 @@
 use warnings;
 
 use base 'DBIx::Class::Storage::DBI';
+use mro 'c3';
 
 =head1 NAME 
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/NoBindVars.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/NoBindVars.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/NoBindVars.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,7 @@
 use warnings;
 
 use base 'DBIx::Class::Storage::DBI';
+use mro 'c3';
 
 =head1 NAME 
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/ACCESS.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/ACCESS.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/ACCESS.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -2,9 +2,11 @@
 use strict;
 use warnings;
 
-use DBI;
 use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
 
+use DBI;
+
 my $ERR_MSG_START = __PACKAGE__ . ' failed: ';
 
 sub insert {

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/DB2_400_SQL.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/DB2_400_SQL.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/DB2_400_SQL.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,6 +3,7 @@
 use warnings;
 
 use base qw/DBIx::Class::Storage::DBI::ODBC/;
+use mro 'c3';
 
 sub _dbh_last_insert_id {
     my ($self, $dbh, $source, $col) = @_;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,15 +3,57 @@
 use warnings;
 
 use base qw/DBIx::Class::Storage::DBI::MSSQL/;
+use mro 'c3';
 
+use List::Util();
+
+sub insert_bulk {
+  my $self = shift;
+  my ($source, $cols, $data) = @_;
+
+  my $identity_insert = 0;
+
+  COLUMNS:
+  foreach my $col (@{$cols}) {
+    if ($source->column_info($col)->{is_auto_increment}) {
+      $identity_insert = 1;
+      last COLUMNS;
+    }
+  }
+
+  if ($identity_insert) {
+    my $table = $source->from;
+    $self->dbh->do("SET IDENTITY_INSERT $table ON");
+  }
+
+  $self->next::method(@_);
+
+  if ($identity_insert) {
+    my $table = $source->from;
+    $self->dbh->do("SET IDENTITY_INSERT $table OFF");
+  }
+}
+
 sub _prep_for_execute {
-    my $self = shift;
-    my ($op, $extra_bind, $ident, $args) = @_;
+  my $self = shift;
+  my ($op, $extra_bind, $ident, $args) = @_;
 
-    my ($sql, $bind) = $self->next::method (@_);
-    $sql .= ';SELECT SCOPE_IDENTITY()' if $op eq 'insert';
+  my ($sql, $bind) = $self->next::method (@_);
 
-    return ($sql, $bind);
+  if ($op eq 'insert') {
+    $sql .= ';SELECT SCOPE_IDENTITY()';
+
+    my $col_info = $self->_resolve_column_info($ident, [map $_->[0], @{$bind}]);
+    if (List::Util::first { $_->{is_auto_increment} } (values %$col_info) ) {
+
+      my $table = $ident->from;
+      my $identity_insert_on = "SET IDENTITY_INSERT $table ON";
+      my $identity_insert_off = "SET IDENTITY_INSERT $table OFF";
+      $sql = "$identity_insert_on; $sql; $identity_insert_off";
+    }
+  }
+
+  return ($sql, $bind);
 }
 
 sub _execute {
@@ -64,3 +106,4 @@
 You may distribute this code under the same terms as Perl itself.
 
 =cut
+# vim: sw=2 sts=2

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/ODBC.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,6 +3,7 @@
 use warnings;
 
 use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
 
 sub _rebless {
     my ($self) = @_;
@@ -11,9 +12,11 @@
     unless ( $@ ) {
         # Translate the backend name into a perl identifier
         $dbtype =~ s/\W/_/gi;
-        my $class = "DBIx::Class::Storage::DBI::ODBC::${dbtype}";
-        eval "require $class";
-        bless $self, $class unless $@;
+        my $subclass = "DBIx::Class::Storage::DBI::ODBC::${dbtype}";
+        if ($self->load_optional_class($subclass) && !$self->isa($subclass)) {
+            bless $self, $subclass;
+            $self->_rebless;
+        }
     }
 }
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -24,7 +24,7 @@
 =cut
 
 use base qw/DBIx::Class::Storage::DBI/;
-use Carp::Clan qw/^DBIx::Class/;
+use mro 'c3';
 
 # For ORA_BLOB => 113, ORA_CLOB => 112
 use DBD::Oracle qw( :ora_types );
@@ -83,36 +83,18 @@
   return $id;
 }
 
-=head2 connected
-
-Returns true if we have an open (and working) database connection, false if it is not (yet)
-open (or does not work). (Executes a simple SELECT to make sure it works.)
-
-The reason this is needed is that L<DBD::Oracle>'s ping() does not do a real
-OCIPing but just gets the server version, which doesn't help if someone killed
-your session.
-
-=cut
-
-sub connected {
+sub _ping {
   my $self = shift;
 
-  if (not $self->next::method(@_)) {
-    return 0;
-  }
-  else {
-    my $dbh = $self->_dbh;
+  my $dbh = $self->_dbh or return 0;
 
-    local $dbh->{RaiseError} = 1;
+  local $dbh->{RaiseError} = 1;
 
-    eval {
-      my $ping_sth = $dbh->prepare_cached("select 1 from dual");
-      $ping_sth->execute;
-      $ping_sth->finish;
-    };
+  eval {
+    $dbh->do("select 1 from dual");
+  };
 
-    return $@ ? 0 : 1;
-  }
+  return $@ ? 0 : 1;
 }
 
 sub _dbh_execute {
@@ -183,6 +165,49 @@
 
 sub datetime_parser_type { return "DateTime::Format::Oracle"; }
 
+=head2 connect_call_datetime_setup
+
+Used as:
+
+    on_connect_call => 'datetime_setup'
+
+In L<DBIx::Class::Storage::DBI/connect_info> to set the session nls date, and
+timestamp values for use with L<DBIx::Class::InflateColumn::DateTime> and the
+necessary environment variables for L<DateTime::Format::Oracle>, which is used
+by it.
+
+Maximum allowable precision is used, unless the environment variables have
+already been set.
+
+These are the defaults used:
+
+  $ENV{NLS_DATE_FORMAT}         ||= 'YYYY-MM-DD HH24:MI:SS';
+  $ENV{NLS_TIMESTAMP_FORMAT}    ||= 'YYYY-MM-DD HH24:MI:SS.FF';
+  $ENV{NLS_TIMESTAMP_TZ_FORMAT} ||= 'YYYY-MM-DD HH24:MI:SS.FF TZHTZM';
+
+To get more than second precision with L<DBIx::Class::InflateColumn::DateTime>
+for your timestamps, use something like this:
+
+  use Time::HiRes 'time';
+  my $ts = DateTime->from_epoch(epoch => time);
+
+=cut
+
+sub connect_call_datetime_setup {
+  my $self = shift;
+  my $dbh  = $self->dbh;
+
+  my $date_format = $ENV{NLS_DATE_FORMAT} ||= 'YYYY-MM-DD HH24:MI:SS';
+  my $timestamp_format = $ENV{NLS_TIMESTAMP_FORMAT} ||=
+    'YYYY-MM-DD HH24:MI:SS.FF';
+  my $timestamp_tz_format = $ENV{NLS_TIMESTAMP_TZ_FORMAT} ||=
+    'YYYY-MM-DD HH24:MI:SS.FF TZHTZM';
+
+  $dbh->do("alter session set nls_date_format = '$date_format'");
+  $dbh->do("alter session set nls_timestamp_format = '$timestamp_format'");
+  $dbh->do("alter session set nls_timestamp_tz_format='$timestamp_tz_format'");
+}
+
 sub _svp_begin {
     my ($self, $name) = @_;
  

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/WhereJoins.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/WhereJoins.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle/WhereJoins.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,10 +1,11 @@
 package DBIx::Class::Storage::DBI::Oracle::WhereJoins;
 
-use base qw( DBIx::Class::Storage::DBI::Oracle::Generic );
-
 use strict;
 use warnings;
 
+use base qw( DBIx::Class::Storage::DBI::Oracle::Generic );
+use mro 'c3';
+
 __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::OracleJoins');
 
 1;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Oracle.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,7 @@
 use warnings;
 
 use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
 
 sub _rebless {
     my ($self) = @_;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Pg.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Pg.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Pg.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,15 +3,14 @@
 use strict;
 use warnings;
 
-use DBD::Pg qw(:pg_types);
-
 use base qw/DBIx::Class::Storage::DBI::MultiColumnIn/;
+use mro 'c3';
 
-# __PACKAGE__->load_components(qw/PK::Auto/);
+use DBD::Pg qw(:pg_types);
 
-# Warn about problematic versions of DBD::Pg
-warn "DBD::Pg 1.49 is strongly recommended"
-  if ($DBD::Pg::VERSION < 1.49);
+# Ask for a DBD::Pg with array support
+warn "DBD::Pg 2.9.2 or greater is strongly recommended\n"
+  if ($DBD::Pg::VERSION < 2.009002);  # pg uses (used?) version::qv()
 
 sub with_deferred_fk_checks {
   my ($self, $sub) = @_;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Replicated/Types.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Replicated/Types.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Replicated/Types.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,13 +1,9 @@
 package # hide from PAUSE
   DBIx::Class::Storage::DBI::Replicated::Types;
 
-=head1 NAME
+# DBIx::Class::Storage::DBI::Replicated::Types - Types used internally by
+# L<DBIx::Class::Storage::DBI::Replicated>
 
-DBIx::Class::Storage::DBI::Replicated::Types - Types used internally by
-L<DBIx::Class::Storage::DBI::Replicated>
-
-=cut
-
 use MooseX::Types
   -declare => [qw/BalancerClassNamePart Weight/];
 use MooseX::Types::Moose qw/ClassName Str Num/;
@@ -17,16 +13,16 @@
 
 subtype BalancerClassNamePart,
   as ClassName;
-    
+
 coerce BalancerClassNamePart,
   from Str,
   via {
     my $type = $_;
     if($type=~m/^::/) {
       $type = 'DBIx::Class::Storage::DBI::Replicated::Balancer'.$type;
-    }  
-    Class::MOP::load_class($type);  
-    $type;  	
+    }
+    Class::MOP::load_class($type);
+    $type;
   };
 
 subtype Weight,
@@ -34,14 +30,12 @@
   where { $_ >= 0 },
   message { 'weight must be a decimal greater than 0' };
 
-=head1 AUTHOR
+# AUTHOR
+#
+#  John Napiorkowski <john.napiorkowski at takkle.com>
+#
+# LICENSE
+#
+#  You may distribute this code under the same terms as Perl itself.
 
-  John Napiorkowski <john.napiorkowski at takkle.com>
-
-=head1 LICENSE
-
-You may distribute this code under the same terms as Perl itself.
-
-=cut
-
 1;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/SQLite.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/SQLite.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/SQLite.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -2,12 +2,14 @@
 
 use strict;
 use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
+
 use POSIX 'strftime';
 use File::Copy;
 use File::Spec;
 
-use base qw/DBIx::Class::Storage::DBI/;
-
 sub _dbh_last_insert_id {
   my ($self, $dbh, $source, $col) = @_;
   $dbh->func('last_insert_rowid');

Added: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Base.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,40 @@
+package # hide from PAUSE
+    DBIx::Class::Storage::DBI::Sybase::Base;
+
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Sybase::Base - Common functionality for drivers using
+DBD::Sybase
+
+=cut
+
+sub _ping {
+  my $self = shift;
+
+  my $dbh = $self->_dbh or return 0;
+
+  local $dbh->{RaiseError} = 1;
+  eval {
+    $dbh->do('select 1');
+  };
+
+  return $@ ? 0 : 1;
+}
+
+1;
+
+=head1 AUTHORS
+
+See L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -15,6 +15,7 @@
 
 
 use base qw/DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server/;
+use mro 'c3';
 
 1;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,9 +4,10 @@
 use warnings;
 
 use base qw/
+  DBIx::Class::Storage::DBI::Sybase::Base
   DBIx::Class::Storage::DBI::ODBC::Microsoft_SQL_Server
-  DBIx::Class::Storage::DBI::Sybase
 /;
+use mro 'c3';
 
 1;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/Sybase.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,7 +3,11 @@
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Storage::DBI::NoBindVars/;
+use base qw/
+    DBIx::Class::Storage::DBI::Sybase::Base
+    DBIx::Class::Storage::DBI::NoBindVars
+/;
+use mro 'c3';
 
 sub _rebless {
     my $self = shift;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/mysql.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/mysql.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI/mysql.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,16 +3,21 @@
 use strict;
 use warnings;
 
-use base qw/DBIx::Class::Storage::DBI::MultiColumnIn/;
+use base qw/
+  DBIx::Class::Storage::DBI::MultiColumnIn
+  DBIx::Class::Storage::DBI::AmbiguousGlob
+  DBIx::Class::Storage::DBI
+/;
+use mro 'c3';
 
 __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks::MySQL');
 
 sub with_deferred_fk_checks {
   my ($self, $sub) = @_;
 
-  $self->dbh->do('SET foreign_key_checks=0');
+  $self->_do_query('SET FOREIGN_KEY_CHECKS = 0');
   $sub->();
-  $self->dbh->do('SET foreign_key_checks=1');
+  $self->_do_query('SET FOREIGN_KEY_CHECKS = 1');
 }
 
 sub _dbh_last_insert_id {
@@ -41,7 +46,7 @@
 
     $self->dbh->do("ROLLBACK TO SAVEPOINT $name")
 }
- 
+
 sub is_replicating {
     my $status = shift->dbh->selectrow_hashref('show slave status');
     return ($status->{Slave_IO_Running} eq 'Yes') && ($status->{Slave_SQL_Running} eq 'Yes');
@@ -57,38 +62,26 @@
   return shift->_per_row_update_delete (@_);
 }
 
-# MySql chokes on things like:
-# COUNT(*) FROM (SELECT tab1.col, tab2.col FROM tab1 JOIN tab2 ... )
-# claiming that col is a duplicate column (it loses the table specifiers by
-# the time it gets to the *). Thus for any subquery count we select only the
-# primary keys of the main table in the inner query. This hopefully still
-# hits the indexes and keeps mysql happy.
-# (mysql does not care if the SELECT and the GROUP BY match)
-sub _subq_count_select {
-  my ($self, $source, $rs_attrs) = @_;
-  my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
-  return @pcols ? \@pcols : [ 1 ];
-}
-
 1;
 
 =head1 NAME
 
-DBIx::Class::Storage::DBI::mysql - Automatic primary key class for MySQL
+DBIx::Class::Storage::DBI::mysql - Storage::DBI class implementing MySQL specifics
 
 =head1 SYNOPSIS
 
-  # In your table classes
-  __PACKAGE__->load_components(qw/PK::Auto Core/);
-  __PACKAGE__->set_primary_key('id');
+Storage::DBI autodetects the underlying MySQL database, and re-blesses the
+C<$storage> object into this class.
 
+  my $schema = MyDb::Schema->connect( $dsn, $user, $pass );
+
 =head1 DESCRIPTION
 
-This class implements autoincrements for MySQL.
+This class implements MySQL specific bits of L<DBIx::Class::Storage::DBI>.
 
 =head1 AUTHORS
 
-Matt S. Trout <mst at shadowcatsystems.co.uk>
+See L<DBIx::Class/CONTRIBUTORS>
 
 =head1 LICENSE
 

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage/DBI.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,10 +1,12 @@
 package DBIx::Class::Storage::DBI;
 # -*- mode: cperl; cperl-indent-level: 2 -*-
 
+use strict;
+use warnings;
+
 use base 'DBIx::Class::Storage';
+use mro 'c3';
 
-use strict;    
-use warnings;
 use Carp::Clan qw/^DBIx::Class/;
 use DBI;
 use DBIx::Class::Storage::DBI::Cursor;
@@ -13,14 +15,15 @@
 use List::Util();
 
 __PACKAGE__->mk_group_accessors('simple' =>
-    qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts
-       _conn_pid _conn_tid transaction_depth _dbh_autocommit savepoints/
+  qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts
+     _conn_pid _conn_tid transaction_depth _dbh_autocommit savepoints/
 );
 
 # the values for these accessors are picked out (and deleted) from
 # the attribute hashref passed to connect_info
 my @storage_options = qw/
-  on_connect_do on_disconnect_do disable_sth_caching unsafe auto_savepoint
+  on_connect_call on_disconnect_call on_connect_do on_disconnect_do
+  disable_sth_caching unsafe auto_savepoint
 /;
 __PACKAGE__->mk_group_accessors('simple' => @storage_options);
 
@@ -89,8 +92,8 @@
 
 =item *
 
-A single code reference which returns a connected 
-L<DBI database handle|DBI/connect> optionally followed by 
+A single code reference which returns a connected
+L<DBI database handle|DBI/connect> optionally followed by
 L<extra attributes|/DBIx::Class specific connection attributes> recognized
 by DBIx::Class:
 
@@ -109,7 +112,7 @@
     %extra_attributes,
   }];
 
-This is particularly useful for L<Catalyst> based applications, allowing the 
+This is particularly useful for L<Catalyst> based applications, allowing the
 following config (L<Config::General> style):
 
   <Model::DB>
@@ -128,7 +131,7 @@
 set C<AutoCommit> to either I<0> or I<1>.  L<DBIx::Class> further
 recommends that it be set to I<1>, and that you perform transactions
 via our L<DBIx::Class::Schema/txn_do> method.  L<DBIx::Class> will set it
-to I<1> if you do not do explicitly set it to zero.  This is the default 
+to I<1> if you do not do explicitly set it to zero.  This is the default
 for most DBDs. See L</DBIx::Class and AutoCommit> for details.
 
 =head3 DBIx::Class specific connection attributes
@@ -177,12 +180,97 @@
 Note, this only runs if you explicitly call L</disconnect> on the
 storage object.
 
+=item on_connect_call
+
+A more generalized form of L</on_connect_do> that calls the specified
+C<connect_call_METHOD> methods in your storage driver.
+
+  on_connect_do => 'select 1'
+
+is equivalent to:
+
+  on_connect_call => [ [ do_sql => 'select 1' ] ]
+
+Its values may contain:
+
+=over
+
+=item a scalar
+
+Will call the C<connect_call_METHOD> method.
+
+=item a code reference
+
+Will execute C<< $code->($storage) >>
+
+=item an array reference
+
+Each value can be a method name or code reference.
+
+=item an array of arrays
+
+For each array, the first item is taken to be the C<connect_call_> method name
+or code reference, and the rest are parameters to it.
+
+=back
+
+Some predefined storage methods you may use:
+
+=over
+
+=item do_sql
+
+Executes a SQL string or a code reference that returns a SQL string. This is
+what L</on_connect_do> and L</on_disconnect_do> use.
+
+It can take:
+
+=over
+
+=item a scalar
+
+Will execute the scalar as SQL.
+
+=item an arrayref
+
+Taken to be arguments to L<DBI/do>, the SQL string optionally followed by the
+attributes hashref and bind values.
+
+=item a code reference
+
+Will execute C<< $code->($storage) >> and execute the return array refs as
+above.
+
+=back
+
+=item datetime_setup
+
+Execute any statements necessary to initialize the database session to return
+and accept datetime/timestamp values used with
+L<DBIx::Class::InflateColumn::DateTime>.
+
+Only necessary for some databases, see your specific storage driver for
+implementation details.
+
+=back
+
+=item on_disconnect_call
+
+Takes arguments in the same form as L</on_connect_call> and executes them
+immediately before disconnecting from the database.
+
+Calls the C<disconnect_call_METHOD> methods as opposed to the
+C<connect_call_METHOD> methods called by L</on_connect_call>.
+
+Note, this only runs if you explicitly call L</disconnect> on the
+storage object.
+
 =item disable_sth_caching
 
 If set to a true value, this option will disable the caching of
 statement handles via L<DBI/prepare_cached>.
 
-=item limit_dialect 
+=item limit_dialect
 
 Sets the limit dialect. This is useful for JDBC-bridge among others
 where the remote SQL-dialect cannot be determined by the name of the
@@ -190,7 +278,7 @@
 
 =item quote_char
 
-Specifies what characters to use to quote table and column names. If 
+Specifies what characters to use to quote table and column names. If
 you use this you will want to specify L</name_sep> as well.
 
 C<quote_char> expects either a single character, in which case is it
@@ -202,8 +290,8 @@
 
 =item name_sep
 
-This only needs to be used in conjunction with C<quote_char>, and is used to 
-specify the charecter that seperates elements (schemas, tables, columns) from 
+This only needs to be used in conjunction with C<quote_char>, and is used to
+specify the charecter that seperates elements (schemas, tables, columns) from
 each other. In most cases this is simply a C<.>.
 
 The consequences of not supplying this value is that L<SQL::Abstract>
@@ -359,7 +447,35 @@
 
 This method is deprecated in favour of setting via L</connect_info>.
 
+=cut
 
+=head2 on_disconnect_do
+
+This method is deprecated in favour of setting via L</connect_info>.
+
+=cut
+
+sub _parse_connect_do {
+  my ($self, $type) = @_;
+
+  my $val = $self->$type;
+  return () if not defined $val;
+
+  my @res;
+
+  if (not ref($val)) {
+    push @res, [ 'do_sql', $val ];
+  } elsif (ref($val) eq 'CODE') {
+    push @res, $val;
+  } elsif (ref($val) eq 'ARRAY') {
+    push @res, map { [ 'do_sql', $_ ] } @$val;
+  } else {
+    $self->throw_exception("Invalid type for $type: ".ref($val));
+  }
+
+  return \@res;
+}
+
 =head2 dbh_do
 
 Arguments: ($subref | $method_name), @extra_coderef_args?
@@ -506,9 +622,13 @@
   my ($self) = @_;
 
   if( $self->connected ) {
-    my $connection_do = $self->on_disconnect_do;
-    $self->_do_connection_actions($connection_do) if ref($connection_do);
+    my @actions;
 
+    push @actions, ( $self->on_disconnect_call || () );
+    push @actions, $self->_parse_connect_do ('on_disconnect_do');
+
+    $self->_do_connection_actions(disconnect_call_ => $_) for @actions;
+
     $self->_dbh->rollback unless $self->_dbh_autocommit;
     $self->_dbh->disconnect;
     $self->_dbh(undef);
@@ -551,12 +671,20 @@
           $self->_verify_pid;
           return 0 if !$self->_dbh;
       }
-      return ($dbh->FETCH('Active') && $dbh->ping);
+      return ($dbh->FETCH('Active') && $self->_ping);
   }
 
   return 0;
 }
 
+sub _ping {
+  my $self = shift;
+
+  my $dbh = $self->_dbh or return 0;
+
+  return $dbh->ping;
+}
+
 # handle pid changes correctly
 #  NOTE: assumes $self->_dbh is a valid $dbh
 sub _verify_pid {
@@ -594,7 +722,7 @@
 
 sub _sql_maker_args {
     my ($self) = @_;
-    
+
     return ( bindtype=>'columns', array_datatypes => 1, limit_dialect => $self->dbh, %{$self->_sql_maker_opts} );
 }
 
@@ -624,8 +752,12 @@
   #  there is no transaction in progress by definition
   $self->{transaction_depth} = $self->_dbh_autocommit ? 0 : 1;
 
-  my $connection_do = $self->on_connect_do;
-  $self->_do_connection_actions($connection_do) if $connection_do;
+  my @actions;
+
+  push @actions, ( $self->on_connect_call || () );
+  push @actions, $self->_parse_connect_do ('on_connect_do');
+
+  $self->_do_connection_actions(connect_call_ => $_) for @actions;
 }
 
 sub _determine_driver {
@@ -642,33 +774,51 @@
       ($driver) = $self->_dbi_connect_info->[0] =~ /dbi:([^:]+):/i;
     }
 
-    if ($self->load_optional_class("DBIx::Class::Storage::DBI::${driver}")) {
-      bless $self, "DBIx::Class::Storage::DBI::${driver}";
+    my $storage_class = "DBIx::Class::Storage::DBI::${driver}";
+    if ($self->load_optional_class($storage_class)) {
+      mro::set_mro($storage_class, 'c3');
+      bless $self, $storage_class;
       $self->_rebless();
     }
   }
 }
 
 sub _do_connection_actions {
-  my $self = shift;
-  my $connection_do = shift;
+  my $self          = shift;
+  my $method_prefix = shift;
+  my $call          = shift;
 
-  if (!ref $connection_do) {
-    $self->_do_query($connection_do);
+  if (not ref($call)) {
+    my $method = $method_prefix . $call;
+    $self->$method(@_);
+  } elsif (ref($call) eq 'CODE') {
+    $self->$call(@_);
+  } elsif (ref($call) eq 'ARRAY') {
+    if (ref($call->[0]) ne 'ARRAY') {
+      $self->_do_connection_actions($method_prefix, $_) for @$call;
+    } else {
+      $self->_do_connection_actions($method_prefix, @$_) for @$call;
+    }
+  } else {
+    $self->throw_exception (sprintf ("Don't know how to process conection actions of type '%s'", ref($call)) );
   }
-  elsif (ref $connection_do eq 'ARRAY') {
-    $self->_do_query($_) foreach @$connection_do;
-  }
-  elsif (ref $connection_do eq 'CODE') {
-    $connection_do->($self);
-  }
-  else {
-    $self->throw_exception (sprintf ("Don't know how to process conection actions of type '%s'", ref $connection_do) );
-  }
 
   return $self;
 }
 
+sub connect_call_do_sql {
+  my $self = shift;
+  $self->_do_query(@_);
+}
+
+sub disconnect_call_do_sql {
+  my $self = shift;
+  $self->_do_query(@_);
+}
+
+# override in db-specific backend when necessary
+sub connect_call_datetime_setup { 1 }
+
 sub _do_query {
   my ($self, $action) = @_;
 
@@ -753,11 +903,11 @@
 
   $self->throw_exception ("Your Storage implementation doesn't support savepoints")
     unless $self->can('_svp_begin');
-  
+
   push @{ $self->{savepoints} }, $name;
 
   $self->debugobj->svp_begin($name) if $self->debug;
-  
+
   return $self->_svp_begin($name);
 }
 
@@ -817,7 +967,7 @@
   }
 
   $self->debugobj->svp_rollback($name) if $self->debug;
-  
+
   return $self->_svp_rollback($name);
 }
 
@@ -955,7 +1105,7 @@
 
   my $sth = $self->sth($sql,$op);
 
-  my $placeholder_index = 1; 
+  my $placeholder_index = 1;
 
   foreach my $bound (@$bind) {
     my $attributes = {};
@@ -1014,7 +1164,7 @@
 }
 
 ## Still not quite perfect, and EXPERIMENTAL
-## Currently it is assumed that all values passed will be "normal", i.e. not 
+## Currently it is assumed that all values passed will be "normal", i.e. not
 ## scalar refs, or at least, all the same type as the first set, the statement is
 ## only prepped once.
 sub insert_bulk {
@@ -1023,7 +1173,7 @@
   my $table = $source->from;
   @colvalues{@$cols} = (0..$#$cols);
   my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues);
-  
+
   $self->_query_start( $sql, @bind );
   my $sth = $self->sth($sql);
 
@@ -1036,7 +1186,7 @@
   my $bind_attributes = $self->source_bind_attributes($source);
 
   ## Bind the values and execute
-  my $placeholder_index = 1; 
+  my $placeholder_index = 1;
 
   foreach my $bound (@bind) {
 
@@ -1084,7 +1234,7 @@
   my $self = shift @_;
   my $source = shift @_;
   my $bind_attributes = $self->source_bind_attributes($source);
-  
+
   return $self->_execute('update' => [], $source, $bind_attributes, @_);
 }
 
@@ -1092,9 +1242,9 @@
 sub delete {
   my $self = shift @_;
   my $source = shift @_;
-  
+
   my $bind_attrs = $self->source_bind_attributes($source);
-  
+
   return $self->_execute('delete' => [], $source, $bind_attrs, @_);
 }
 
@@ -1193,10 +1343,10 @@
   my $self = shift;
 
   # localization is neccessary as
-  # 1) there is no infrastructure to pass this around (easy to do, but will wait)
+  # 1) there is no infrastructure to pass this around before SQLA2
   # 2) _select_args sets it and _prep_for_execute consumes it
   my $sql_maker = $self->sql_maker;
-  local $sql_maker->{for};
+  local $sql_maker->{_dbic_rs_attrs};
 
   return $self->_execute($self->_select_args(@_));
 }
@@ -1205,10 +1355,10 @@
   my $self = shift;
 
   # localization is neccessary as
-  # 1) there is no infrastructure to pass this around (easy to do, but will wait)
+  # 1) there is no infrastructure to pass this around before SQLA2
   # 2) _select_args sets it and _prep_for_execute consumes it
   my $sql_maker = $self->sql_maker;
-  local $sql_maker->{for};
+  local $sql_maker->{_dbic_rs_attrs};
 
   # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $order, $rows, $offset)
   #  = $self->_select_args($ident, $select, $cond, $attrs);
@@ -1228,8 +1378,19 @@
 sub _select_args {
   my ($self, $ident, $select, $where, $attrs) = @_;
 
+  my ($alias2source, $rs_alias) = $self->_resolve_ident_sources ($ident);
+
   my $sql_maker = $self->sql_maker;
-  my $alias2source = $self->_resolve_ident_sources ($ident);
+  $sql_maker->{_dbic_rs_attrs} = {
+    %$attrs,
+    select => $select,
+    from => $ident,
+    where => $where,
+    $rs_alias
+      ? ( _source_handle => $alias2source->{$rs_alias}->handle )
+      : ()
+    ,
+  };
 
   # calculate bind_attrs before possible $ident mangling
   my $bind_attrs = {};
@@ -1241,29 +1402,42 @@
       $bind_attrs->{$fqcn} = $bindtypes->{$col} if $bindtypes->{$col};
 
       # so that unqualified searches can be bound too
-      $bind_attrs->{$col} = $bind_attrs->{$fqcn} if $alias eq 'me';
+      $bind_attrs->{$col} = $bind_attrs->{$fqcn} if $alias eq $rs_alias;
     }
   }
 
-  my @limit;
-  if ($attrs->{software_limit} ||
-      $sql_maker->_default_limit_syntax eq "GenericSubQ") {
-        $attrs->{software_limit} = 1;
-  } else {
+  # adjust limits
+  if (
+    $attrs->{software_limit}
+      ||
+    $sql_maker->_default_limit_syntax eq "GenericSubQ"
+  ) {
+    $attrs->{software_limit} = 1;
+  }
+  else {
     $self->throw_exception("rows attribute must be positive if present")
       if (defined($attrs->{rows}) && !($attrs->{rows} > 0));
 
     # MySQL actually recommends this approach.  I cringe.
     $attrs->{rows} = 2**48 if not defined $attrs->{rows} and defined $attrs->{offset};
+  }
 
-    if ($attrs->{rows} && keys %{$attrs->{collapse}}) {
-      ($ident, $select, $where, $attrs)
-        = $self->_adjust_select_args_for_limited_prefetch ($ident, $select, $where, $attrs);
-    }
-    else {
-      push @limit, $attrs->{rows}, $attrs->{offset};
-    }
+  my @limit;
+
+  # see if we need to tear the prefetch apart (either limited has_many or grouped prefetch)
+  # otherwise delegate the limiting to the storage, unless software limit was requested
+  if (
+    ( $attrs->{rows} && keys %{$attrs->{collapse}} )
+       ||
+    ( $attrs->{group_by} && @{$attrs->{group_by}} &&
+      $attrs->{prefetch_select} && @{$attrs->{prefetch_select}} )
+  ) {
+    ($ident, $select, $where, $attrs)
+      = $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs);
   }
+  elsif (! $attrs->{software_limit} ) {
+    push @limit, $attrs->{rows}, $attrs->{offset};
+  }
 
 ###
   # This would be the point to deflate anything found in $where
@@ -1277,47 +1451,61 @@
 
   my $order = { map
     { $attrs->{$_} ? ( $_ => $attrs->{$_} ) : ()  }
-    (qw/order_by group_by having _virtual_order_by/ )
+    (qw/order_by group_by having/ )
   };
 
-
-  $sql_maker->{for} = delete $attrs->{for};
-
   return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $order, @limit);
 }
 
-sub _adjust_select_args_for_limited_prefetch {
+#
+# This is the code producing joined subqueries like:
+# SELECT me.*, other.* FROM ( SELECT me.* FROM ... ) JOIN other ON ... 
+#
+sub _adjust_select_args_for_complex_prefetch {
   my ($self, $from, $select, $where, $attrs) = @_;
 
-  if ($attrs->{group_by} and @{$attrs->{group_by}}) {
-    $self->throw_exception ('Prefetch with limit (rows/offset) is not supported on resultsets with a group_by attribute');
-  }
-
-  $self->throw_exception ('Prefetch with limit (rows/offset) is not supported on resultsets with a custom from attribute')
+  $self->throw_exception ('Complex prefetches are not supported on resultsets with a custom from attribute')
     if (ref $from ne 'ARRAY');
 
+  # copies for mangling
+  $from = [ @$from ];
+  $select = [ @$select ];
+  $attrs = { %$attrs };
 
   # separate attributes
   my $sub_attrs = { %$attrs };
-  delete $attrs->{$_} for qw/where bind rows offset/;
-  delete $sub_attrs->{$_} for qw/for collapse select order_by/;
+  delete $attrs->{$_} for qw/where bind rows offset group_by having/;
+  delete $sub_attrs->{$_} for qw/for collapse prefetch_select _collapse_order_by select as/;
 
   my $alias = $attrs->{alias};
+  my $sql_maker = $self->sql_maker;
 
-  # create subquery select list
-  my $sub_select = [ grep { $_ =~ /^$alias\./ } @{$attrs->{select}} ];
+  # create subquery select list - loop only over primary columns
+  my $sub_select = [];
+  for my $i (0 .. @{$attrs->{select}} - @{$attrs->{prefetch_select}} - 1) {
+    my $sel = $attrs->{select}[$i];
 
+    # alias any functions to the dbic-side 'as' label
+    # adjust the outer select accordingly
+    if (ref $sel eq 'HASH' && !$sel->{-select}) {
+      $sel = { -select => $sel, -as => $attrs->{as}[$i] };
+      $select->[$i] = join ('.', $attrs->{alias}, $attrs->{as}[$i]);
+    }
+
+    push @$sub_select, $sel;
+  }
+
   # bring over all non-collapse-induced order_by into the inner query (if any)
   # the outer one will have to keep them all
+  delete $sub_attrs->{order_by};
   if (my $ord_cnt = @{$attrs->{order_by}} - @{$attrs->{_collapse_order_by}} ) {
     $sub_attrs->{order_by} = [
-      @{$attrs->{order_by}}[ 0 .. ($#{$attrs->{order_by}} - $ord_cnt - 1) ]
+      @{$attrs->{order_by}}[ 0 .. $ord_cnt - 1]
     ];
   }
 
   # mangle {from}
-  $from = [ @$from ];
-  my $select_root = shift @$from;
+  my $join_root = shift @$from;
   my @outer_from = @$from;
 
   my %inner_joins;
@@ -1327,7 +1515,7 @@
   # so always include it in the inner join, and also shift away
   # from the outer stack, so that the two datasets actually do
   # meet
-  if ($select_root->{-alias} ne $alias) {
+  if ($join_root->{-alias} ne $alias) {
     $inner_joins{$alias} = 1;
 
     while (@outer_from && $outer_from[0][0]{-alias} ne $alias) {
@@ -1348,7 +1536,7 @@
   # away _any_ branches of the join tree that are:
   # 1) not mentioned in the condition/order
   # 2) left-join leaves (or left-join leaf chains)
-  # Most of the join ocnditions will not satisfy this, but for real
+  # Most of the join conditions will not satisfy this, but for real
   # complex queries some might, and we might make some RDBMS happy.
   #
   #
@@ -1358,7 +1546,6 @@
   # It may not be very efficient, but it's a reasonable stop-gap
   {
     # produce stuff unquoted, so it can be scanned
-    my $sql_maker = $self->sql_maker;
     local $sql_maker->{quote_char};
 
     my @order_by = (map
@@ -1398,20 +1585,19 @@
   }
 
   # construct the inner $from for the subquery
-  my $inner_from = [ $select_root ];
+  my $inner_from = [ $join_root ];
   for my $j (@$from) {
     push @$inner_from, $j if $inner_joins{$j->[0]{-alias}};
   }
 
   # if a multi-type join was needed in the subquery ("multi" is indicated by
   # presence in {collapse}) - add a group_by to simulate the collapse in the subq
-
   for my $alias (keys %inner_joins) {
 
     # the dot comes from some weirdness in collapse
     # remove after the rewrite
     if ($attrs->{collapse}{".$alias"}) {
-      $sub_attrs->{group_by} = $sub_select;
+      $sub_attrs->{group_by} ||= $sub_select;
       last;
     }
   }
@@ -1425,10 +1611,17 @@
   );
 
   # put it in the new {from}
-  unshift @outer_from, { $alias => $subq };
+  unshift @outer_from, {
+    -alias => $alias,
+    -source_handle => $join_root->{-source_handle},
+    $alias => $subq,
+  };
 
   # This is totally horrific - the $where ends up in both the inner and outer query
-  # Unfortunately not much can be done until SQLA2 introspection arrives
+  # Unfortunately not much can be done until SQLA2 introspection arrives, and even
+  # then if where conditions apply to the *right* side of the prefetch, you may have
+  # to both filter the inner select (e.g. to apply a limit) and then have to re-filter
+  # the outer select to exclude joins you didin't want in the first place
   #
   # OTOH it can be seen as a plus: <ash> (notes that this query would make a DBA cry ;)
   return (\@outer_from, $select, $where, $attrs);
@@ -1438,12 +1631,14 @@
   my ($self, $ident) = @_;
 
   my $alias2source = {};
+  my $rs_alias;
 
   # the reason this is so contrived is that $ident may be a {from}
   # structure, specifying multiple tables to join
   if ( Scalar::Util::blessed($ident) && $ident->isa("DBIx::Class::ResultSource") ) {
     # this is compat mode for insert/update/delete which do not deal with aliases
     $alias2source->{me} = $ident;
+    $rs_alias = 'me';
   }
   elsif (ref $ident eq 'ARRAY') {
 
@@ -1451,6 +1646,7 @@
       my $tabinfo;
       if (ref $_ eq 'HASH') {
         $tabinfo = $_;
+        $rs_alias = $tabinfo->{-alias};
       }
       if (ref $_ eq 'ARRAY' and ref $_->[0] eq 'HASH') {
         $tabinfo = $_->[0];
@@ -1461,9 +1657,37 @@
     }
   }
 
-  return $alias2source;
+  return ($alias2source, $rs_alias);
 }
 
+# Takes $ident, \@column_names
+#
+# returns { $column_name => \%column_info, ... }
+# also note: this adds -result_source => $rsrc to the column info
+#
+# usage:
+#   my $col_sources = $self->_resolve_column_info($ident, [map $_->[0], @{$bind}]);
+sub _resolve_column_info {
+  my ($self, $ident, $colnames) = @_;
+  my ($alias2src, $root_alias) = $self->_resolve_ident_sources($ident);
+
+  my $sep = $self->_sql_maker_opts->{name_sep} || '.';
+  $sep = "\Q$sep\E";
+
+  my (%return, %converted);
+  foreach my $col (@$colnames) {
+    my ($alias, $colname) = $col =~ m/^ (?: ([^$sep]+) $sep)? (.+) $/x;
+
+    # deal with unqualified cols - we assume the main alias for all
+    # unqualified ones, ugly but can't think of anything better right now
+    $alias ||= $root_alias;
+
+    my $rsrc = $alias2src->{$alias};
+    $return{$col} = $rsrc && { %{$rsrc->column_info($colname)}, -result_source => $rsrc };
+  }
+  return \%return;
+}
+
 # Returns a counting SELECT for a simple count
 # query. Abstracted so that a storage could override
 # this to { count => 'firstcol' } or whatever makes
@@ -1743,13 +1967,13 @@
 
  { add_drop_table => 1, ignore_constraint_names => 1, ignore_index_names => 1 }
 
-merged with the hash passed in. To disable any of those features, pass in a 
+merged with the hash passed in. To disable any of those features, pass in a
 hashref like the following
 
  { ignore_constraint_names => 0, # ... other options }
 
 
-Note that this feature is currently EXPERIMENTAL and may not work correctly 
+Note that this feature is currently EXPERIMENTAL and may not work correctly
 across all databases, or fully handle complex relationships.
 
 WARNING: Please check all SQL files created, before applying them.
@@ -1770,7 +1994,7 @@
   $version ||= $schema_version;
 
   $sqltargs = {
-    add_drop_table => 1, 
+    add_drop_table => 1,
     ignore_constraint_names => 1,
     ignore_index_names => 1,
     %{$sqltargs || {}}
@@ -1810,7 +2034,7 @@
     }
     print $file $output;
     close($file);
-  
+
     next unless ($preversion);
 
     require SQL::Translator::Diff;
@@ -1826,7 +2050,7 @@
       carp("Overwriting existing diff file - $difffile");
       unlink($difffile);
     }
-    
+
     my $source_schema;
     {
       my $t = SQL::Translator->new($sqltargs);
@@ -1845,7 +2069,7 @@
         unless ( $source_schema->name );
     }
 
-    # The "new" style of producers have sane normalization and can support 
+    # The "new" style of producers have sane normalization and can support
     # diffing a SQL file against a DBIC->SQLT schema. Old style ones don't
     # And we have to diff parsed SQL against parsed SQL.
     my $dest_schema = $sqlt_schema;
@@ -1866,12 +2090,12 @@
       $dest_schema->name( $filename )
         unless $dest_schema->name;
     }
-    
+
     my $diff = SQL::Translator::Diff::schema_diff($source_schema, $db,
                                                   $dest_schema,   $db,
                                                   $sqltargs
                                                  );
-    if(!open $file, ">$difffile") { 
+    if(!open $file, ">$difffile") {
       $self->throw_exception("Can't write to $difffile ($!)");
       next;
     }
@@ -1915,7 +2139,7 @@
   if(-f $filename)
   {
       my $file;
-      open($file, "<$filename") 
+      open($file, "<$filename")
         or $self->throw_exception("Can't open $filename ($!)");
       my @rows = <$file>;
       close($file);
@@ -1930,7 +2154,7 @@
   eval qq{use SQL::Translator::Producer::${type}};
   $self->throw_exception($@) if $@;
 
-  # sources needs to be a parser arg, but for simplicty allow at top level 
+  # sources needs to be a parser arg, but for simplicty allow at top level
   # coming in
   $sqltargs->{parser_args}{sources} = delete $sqltargs->{sources}
       if exists $sqltargs->{sources};
@@ -2035,7 +2259,7 @@
 
 sub is_replicating {
     return;
-    
+
 }
 
 =head2 lag_behind_master

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class/Storage.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,6 +4,7 @@
 use warnings;
 
 use base qw/DBIx::Class/;
+use mro 'c3';
 
 use Scalar::Util qw/weaken/;
 use Carp::Clan qw/^DBIx::Class/;

Modified: DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/lib/DBIx/Class.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,11 +3,12 @@
 use strict;
 use warnings;
 
+use MRO::Compat;
+
 use vars qw($VERSION);
 use base qw/DBIx::Class::Componentised Class::Accessor::Grouped/;
 use DBIx::Class::StartupCheck;
 
-
 sub mk_classdata {
   shift->mk_classaccessor(@_);
 }

Modified: DBIx-Class/0.08/branches/table_name_ref/t/03podcoverage.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/03podcoverage.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/03podcoverage.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -106,6 +106,7 @@
     'DBIx::Class::ResultSetManager'                     => { skip => 1 },
     'DBIx::Class::ResultSourceProxy'                    => { skip => 1 },
     'DBIx::Class::Storage::DBI'                         => { skip => 1 },
+    'DBIx::Class::Storage::DBI::Replicated::Types'      => { skip => 1 },
     'DBIx::Class::Storage::DBI::DB2'                    => { skip => 1 },
     'DBIx::Class::Storage::DBI::MSSQL'                  => { skip => 1 },
     'DBIx::Class::Storage::DBI::Sybase::MSSQL'          => { skip => 1 },
@@ -116,7 +117,9 @@
     'DBIx::Class::Storage::DBI::Pg'                     => { skip => 1 },
     'DBIx::Class::Storage::DBI::SQLite'                 => { skip => 1 },
     'DBIx::Class::Storage::DBI::mysql'                  => { skip => 1 },
+    'DBIx::Class::SQLAHacks'                            => { skip => 1 },
     'DBIx::Class::SQLAHacks::MySQL'                     => { skip => 1 },
+    'DBIx::Class::SQLAHacks::MSSQL'                     => { skip => 1 },
     'SQL::Translator::Parser::DBIx::Class'              => { skip => 1 },
     'SQL::Translator::Producer::DBIx::Class::File'      => { skip => 1 },
 

Modified: DBIx-Class/0.08/branches/table_name_ref/t/31stats.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/31stats.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/31stats.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -4,12 +4,7 @@
 use warnings;
 use Test::More;
 
-BEGIN {
-    eval "use DBD::SQLite";
-    plan $@
-        ? ( skip_all => 'needs DBD::SQLite for testing' )
-        : ( tests => 12 );
-}
+plan tests => 12;
 
 use lib qw(t/lib);
 

Modified: DBIx-Class/0.08/branches/table_name_ref/t/42toplimit.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/42toplimit.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/42toplimit.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -10,12 +10,31 @@
 
 # Trick the sqlite DB to use Top limit emulation
 # We could test all of this via $sq->$op directly,
-# but some conditions needs a $rsrc
+# but some conditions need a $rsrc
 delete $schema->storage->_sql_maker->{_cached_syntax};
 $schema->storage->_sql_maker->limit_dialect ('Top');
 
-my $rs = $schema->resultset ('FourKeys')->search ({}, { rows => 1, offset => 3 });
+my $rs = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 1, offset => 3 });
 
+sub default_test_order {
+   my $order_by = shift;
+   is_same_sql_bind(
+      $rs->search ({}, {order_by => $order_by})->as_query,
+      "(SELECT
+        TOP 1 me__id, source, owner, title, price, owner__id, name FROM
+         (SELECT
+           TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name
+           FROM books me
+           JOIN owners owner ON
+           owner.id = me.owner
+           WHERE ( source = ? )
+           ORDER BY me__id ASC
+         ) me ORDER BY me__id DESC
+       )",
+    [ [ source => 'Library' ] ],
+  );
+}
+
 sub test_order {
   my $args = shift;
 
@@ -26,24 +45,29 @@
 
   is_same_sql_bind(
     $rs->search ({}, {order_by => $args->{order_by}})->as_query,
-    "(
-      SELECT * FROM (
-        SELECT TOP 1 * FROM (
-          SELECT TOP 4 me.foo, me.bar, me.hello, me.goodbye, me.sensors, me.read_count FROM fourkeys me ORDER BY $args->{order_inner}
-        ) foo ORDER BY $args->{order_outer}
-      ) bar
-      $req_order
+    "(SELECT
+      me__id, source, owner, title, price, owner__id, name FROM
+      (SELECT
+        TOP 1 me__id, source, owner, title, price, owner__id, name FROM
+         (SELECT
+           TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name FROM
+           books me
+           JOIN owners owner ON owner.id = me.owner
+           WHERE ( source = ? )
+           ORDER BY $args->{order_inner}
+         ) me ORDER BY $args->{order_outer}
+      ) me $req_order
     )",
-    [],
+    [ [ source => 'Library' ] ],
   );
 }
 
 my @tests = (
   {
-    order_by => \ 'foo DESC',
+    order_by => \'foo DESC',
     order_req => 'foo DESC',
     order_inner => 'foo DESC',
-    order_outer => 'foo ASC' 
+    order_outer => 'foo ASC'
   },
   {
     order_by => { -asc => 'foo'  },
@@ -91,48 +115,38 @@
     order_inner => 'foo ASC, bar DESC, hello ASC, sensors ASC',
     order_outer => 'foo DESC, bar ASC, hello DESC, sensors DESC',
   },
-  {
-    order_by => undef,
-    order_req => undef,
-    order_inner => 'foo ASC, bar ASC, hello ASC, goodbye ASC',
-    order_outer => 'foo DESC, bar DESC, hello DESC, goodbye DESC',
-  },
-  {
-    order_by => '',
-    order_req => undef,
-    order_inner => 'foo ASC, bar ASC, hello ASC, goodbye ASC',
-    order_outer => 'foo DESC, bar DESC, hello DESC, goodbye DESC',
-  },
-  {
-    order_by => {},
-    order_req => undef,
-    order_inner => 'foo ASC, bar ASC, hello ASC, goodbye ASC',
-    order_outer => 'foo DESC, bar DESC, hello DESC, goodbye DESC',
-  },
-  {
-    order_by => [],
-    order_req => undef,
-    order_inner => 'foo ASC, bar ASC, hello ASC, goodbye ASC',
-    order_outer => 'foo DESC, bar DESC, hello DESC, goodbye DESC',
-  },
 );
 
-plan (tests => scalar @tests + 1);
+my @default_tests = ( undef, '', {}, [] );
 
+plan (tests => scalar @tests + scalar @default_tests + 1);
+
 test_order ($_) for @tests;
+default_test_order ($_) for @default_tests;
 
+
 is_same_sql_bind (
-  $rs->search ({}, { group_by => 'bar', order_by => 'bar' })->as_query,
-  '(
-    SELECT * FROM
-    (
-      SELECT TOP 1 * FROM
-      (
-        SELECT TOP 4  me.foo, me.bar, me.hello, me.goodbye, me.sensors, me.read_count FROM fourkeys me GROUP BY bar ORDER BY bar ASC
-      ) AS foo
-      ORDER BY bar DESC
-    ) AS bar
-    ORDER BY bar
-  )',
-  [],
+  $rs->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
+'(SELECT
+me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name FROM
+   ( SELECT
+      id, source, owner, title, price FROM
+      ( SELECT
+         TOP 1 id, source, owner, title, price FROM
+         ( SELECT
+            TOP 4 me.id, me.source, me.owner, me.title, me.price FROM
+            books me  JOIN
+            owners owner ON owner.id = me.owner
+            WHERE ( source = ? )
+            GROUP BY title
+            ORDER BY title ASC
+         ) me
+         ORDER BY title DESC
+      ) me
+      ORDER BY title
+   ) me  JOIN
+   owners owner ON owner.id = me.owner WHERE
+   ( source = ? )
+   ORDER BY title)' ,
+  [ [ source => 'Library' ], [ source => 'Library' ] ],
 );

Modified: DBIx-Class/0.08/branches/table_name_ref/t/60core.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/60core.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/60core.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -9,7 +9,7 @@
 
 my $schema = DBICTest->init_schema();
 
-plan tests => 106;
+plan tests => 103;
 
 eval { require DateTime::Format::SQLite };
 my $NO_DTFM = $@ ? 1 : 0;
@@ -229,20 +229,6 @@
 is ($collapsed_or_rs->all, 4, 'Collapsed joined search with OR returned correct number of rows');
 is ($collapsed_or_rs->count, 4, 'Collapsed search count with OR ok');
 
-my $pref_or_rs = $collapsed_or_rs->search ({}, { prefetch => [qw/tags/] });
-is_same_sql_bind (
-  $pref_or_rs->as_query,
-  '(SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track, tags.tagid, tags.cd, tags.tag FROM cd me LEFT JOIN tags tags ON tags.cd = me.cdid WHERE ( ( tags.tag = ? OR tags.tag = ? ) ) GROUP BY me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track, tags.tagid, tags.cd, tags.tag ORDER BY cdid, tags.cd, tags.tag)',
-  [
-    [ 'tags.tag' => 'Cheesy' ],
-    [ 'tags.tag' => 'Blue' ],
-  ],
-  'Prefetch + distinct resulted in correct group_by',
-);
-is ($pref_or_rs->all, 4, 'Prefetched grouped search with OR returned correct number of rows');
-is ($pref_or_rs->count, 4, 'Prefetched grouped count with OR ok');
-
-
 {
   my $tcount = $schema->resultset('Track')->search(
     {},

Modified: DBIx-Class/0.08/branches/table_name_ref/t/71mysql.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/71mysql.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/71mysql.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -155,41 +155,36 @@
     is_deeply($type_info, $test_type_info, 'columns_info_for - column data types');
 }
 
+my $cd = $schema->resultset ('CD')->create ({});
+my $producer = $schema->resultset ('Producer')->create ({});
+lives_ok { $cd->set_producers ([ $producer ]) } 'set_relationship doesnt die';
+
+
 ## Can we properly deal with the null search problem?
 ##
 ## Only way is to do a SET SQL_AUTO_IS_NULL = 0; on connect
 ## But I'm not sure if we should do this or not (Ash, 2008/06/03)
 
 NULLINSEARCH: {
-    
-    ok my $artist1_rs = $schema->resultset('Artist')->search({artistid=>6666})
-    => 'Created an artist resultset of 6666';
-    
+    local $TODO = 'Fix pending in branches/mysql_ansi';
+    my $ansi_schema = DBICTest::Schema->connect ($dsn, $user, $pass);
+
+    $ansi_schema->resultset('Artist')->create ({ name => 'last created artist' });
+
+    ok my $artist1_rs = $ansi_schema->resultset('Artist')->search({artistid=>6666})
+      => 'Created an artist resultset of 6666';
+
     is $artist1_rs->count, 0
-    => 'Got no returned rows';
-    
-    ok my $artist2_rs = $schema->resultset('Artist')->search({artistid=>undef})
-    => 'Created an artist resultset of undef';
-    
-    TODO: {
-    	local $TODO = "need to fix the row count =1 when select * from table where pk IS NULL problem";
-	    is $artist2_rs->count, 0
-	    => 'got no rows';    	
-    }
+      => 'Got no returned rows';
 
-    my $artist = $artist2_rs->single;
-    
-    is $artist => undef
-    => 'Nothing Found!';
-}
-    
-my $cd = $schema->resultset ('CD')->create ({});
+    ok my $artist2_rs = $ansi_schema->resultset('Artist')->search({artistid=>undef})
+      => 'Created an artist resultset of undef';
 
-my $producer = $schema->resultset ('Producer')->create ({});
+    is $artist2_rs->count, 0
+      => 'got no rows';
 
-lives_ok { $cd->set_producers ([ $producer ]) } 'set_relationship doesnt die';
+    my $artist = $artist2_rs->single;
 
-# clean up our mess
-END {
-    #$dbh->do("DROP TABLE artist") if $dbh;
+    is $artist => undef
+      => 'Nothing Found!';
 }

Modified: DBIx-Class/0.08/branches/table_name_ref/t/72pg.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/72pg.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/72pg.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -144,7 +144,9 @@
 is_deeply($type_info, $test_type_info,
           'columns_info_for - column data types');
 
-{
+SKIP: {
+  skip "Need DBD::Pg 2.9.2 or newer for array tests", 4 if $DBD::Pg::VERSION < 2.009002;
+
   lives_ok {
     $schema->resultset('ArrayTest')->create({
       arrayfield => [1, 2],

Modified: DBIx-Class/0.08/branches/table_name_ref/t/73oracle.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/73oracle.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/73oracle.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -40,7 +40,7 @@
   ' as well as following sequences: \'pkid1_seq\', \'pkid2_seq\' and \'nonpkid_seq\''
   unless ($dsn && $user && $pass);
 
-plan tests => 34;
+plan tests => 35;
 
 DBICTest::Schema->load_classes('ArtistFQN');
 my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
@@ -113,15 +113,19 @@
 is( $new->artistid, 2, "Oracle Auto-PK worked with fully-qualified tablename" );
 
 # test join with row count ambiguity
+
 my $cd = $schema->resultset('CD')->create({ cdid => 1, artist => 1, title => 'EP C', year => '2003' });
-my $track = $schema->resultset('Track')->create({ trackid => 1, cd => 1, position => 1, title => 'Track1' });
+my $track = $schema->resultset('Track')->create({ trackid => 1, cd => 1,
+    position => 1, title => 'Track1' });
 my $tjoin = $schema->resultset('Track')->search({ 'me.title' => 'Track1'},
         { join => 'cd',
           rows => 2 }
 );
 
-is($tjoin->next->title, 'Track1', "ambiguous column ok");
+ok(my $row = $tjoin->next);
 
+is($row->title, 'Track1', "ambiguous column ok");
+
 # check count distinct with multiple columns
 my $other_track = $schema->resultset('Track')->create({ trackid => 2, cd => 1, position => 1, title => 'Track2' });
 

Deleted: DBIx-Class/0.08/branches/table_name_ref/t/73oracle_inflate.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/73oracle_inflate.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/73oracle_inflate.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,76 +0,0 @@
-use strict;
-use warnings;  
-
-use Test::More;
-use lib qw(t/lib);
-use DBICTest;
-
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
-
-if (not ($dsn && $user && $pass)) {
-    plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' .
-         'Warning: This test drops and creates a table called \'track\'';
-}
-else {
-    eval "use DateTime; use DateTime::Format::Oracle;";
-    if ($@) {
-        plan skip_all => 'needs DateTime and DateTime::Format::Oracle for testing';
-    }
-    else {
-        plan tests => 7;
-    }
-}
-
-# DateTime::Format::Oracle needs this set
-$ENV{NLS_DATE_FORMAT} = 'DD-MON-YY';
-$ENV{NLS_TIMESTAMP_FORMAT} = 'YYYY-MM-DD HH24:MI:SSXFF';
-$ENV{NLS_LANG} = 'AMERICAN_AMERICA.WE8ISO8859P1';
-
-my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-
-# Need to redefine the last_updated_on column
-my $col_metadata = $schema->class('Track')->column_info('last_updated_on');
-$schema->class('Track')->add_column( 'last_updated_on' => {
-    data_type => 'date' });
-$schema->class('Track')->add_column( 'last_updated_at' => {
-    data_type => 'timestamp' });
-
-my $dbh = $schema->storage->dbh;
-
-#$dbh->do("alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SSXFF'");
-
-eval {
-  $dbh->do("DROP TABLE track");
-};
-$dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at TIMESTAMP)");
-
-# insert a row to play with
-my $new = $schema->resultset('Track')->create({ trackid => 1, cd => 1, position => 1, title => 'Track1', last_updated_on => '06-MAY-07', last_updated_at => '2009-05-03 21:17:18.5' });
-is($new->trackid, 1, "insert sucessful");
-
-my $track = $schema->resultset('Track')->find( 1 );
-
-is( ref($track->last_updated_on), 'DateTime', "last_updated_on inflated ok");
-
-is( $track->last_updated_on->month, 5, "DateTime methods work on inflated column");
-
-#note '$track->last_updated_at => ', $track->last_updated_at;
-is( ref($track->last_updated_at), 'DateTime', "last_updated_at inflated ok");
-
-is( $track->last_updated_at->nanosecond, 500_000_000, "DateTime methods work with nanosecond precision");
-
-my $dt = DateTime->now();
-$track->last_updated_on($dt);
-$track->last_updated_at($dt);
-$track->update;
-
-is( $track->last_updated_on->month, $dt->month, "deflate ok");
-is( int $track->last_updated_at->nanosecond, int $dt->nanosecond, "deflate ok with nanosecond precision");
-
-# clean up our mess
-END {
-    if($dbh) {
-        $dbh->do("DROP TABLE track");
-    }
-}
-

Modified: DBIx-Class/0.08/branches/table_name_ref/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/746mssql.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/746mssql.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,18 +1,20 @@
 use strict;
-use warnings;  
+use warnings;
 
 use Test::More;
+use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
+use DBIC::SqlMakerTest;
 
 my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
 
 plan skip_all => 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test'
   unless ($dsn && $user);
 
-plan tests => 13;
+plan tests => 27;
 
-my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1});
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
 
 {
   no warnings 'redefine';
@@ -47,7 +49,7 @@
 my %seen_id;
 
 # fresh $schema so we start unconnected
-$schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1});
+$schema = DBICTest::Schema->connect($dsn, $user, $pass);
 
 # test primary key handling
 my $new = $schema->resultset('Artist')->create({ name => 'foo' });
@@ -73,10 +75,164 @@
 is( $it->next->name, "Artist 2", "iterator->next ok" );
 is( $it->next, undef, "next past end of resultset ok" );
 
+$schema->storage->dbh_do (sub {
+    my ($storage, $dbh) = @_;
+    eval { $dbh->do("DROP TABLE Owners") };
+    eval { $dbh->do("DROP TABLE Books") };
+    $dbh->do(<<'SQL');
 
+
+CREATE TABLE Books (
+   id INT IDENTITY (1, 1) NOT NULL,
+   source VARCHAR(100),
+   owner INT,
+   title VARCHAR(10),
+   price INT NULL
+)
+
+CREATE TABLE Owners (
+   id INT IDENTITY (1, 1) NOT NULL,
+   name VARCHAR(100),
+)
+
+SQL
+
+});
+
+lives_ok ( sub {
+  $schema->populate ('Owners', [
+    [qw/id  name  /],
+    [qw/1   wiggle/],
+    [qw/2   woggle/],
+    [qw/3   boggle/],
+    [qw/4   fREW/],
+    [qw/5   fRIOUX/],
+    [qw/6   fROOH/],
+    [qw/7   fRUE/],
+    [qw/8   fISMBoC/],
+    [qw/9   station/],
+    [qw/10   mirror/],
+    [qw/11   dimly/],
+    [qw/12   face_to_face/],
+    [qw/13   icarus/],
+    [qw/14   dream/],
+    [qw/15   dyrstyggyr/],
+  ]);
+}, 'populate with PKs supplied ok' );
+
+lives_ok ( sub {
+  $schema->populate ('BooksInLibrary', [
+    [qw/source  owner title   /],
+    [qw/Library 1     secrets0/],
+    [qw/Library 1     secrets1/],
+    [qw/Eatery  1     secrets2/],
+    [qw/Library 2     secrets3/],
+    [qw/Library 3     secrets4/],
+    [qw/Eatery  3     secrets5/],
+    [qw/Library 4     secrets6/],
+    [qw/Library 5     secrets7/],
+    [qw/Eatery  5     secrets8/],
+    [qw/Library 6     secrets9/],
+    [qw/Library 7     secrets10/],
+    [qw/Eatery  7     secrets11/],
+    [qw/Library 8     secrets12/],
+  ]);
+}, 'populate without PKs supplied ok' );
+
+#
+# try a prefetch on tables with identically named columns
+#
+
+# set quote char - make sure things work while quoted
+$schema->storage->_sql_maker->{quote_char} = [qw/[ ]/];
+$schema->storage->_sql_maker->{name_sep} = '.';
+
+{
+  # try a ->has_many direction
+  my $owners = $schema->resultset ('Owners')->search ({
+      'books.id' => { '!=', undef }
+    }, {
+      prefetch => 'books',
+      order_by => 'name',
+      rows     => 3,  # 8 results total
+    });
+
+  is ($owners->page(1)->all, 3, 'has_many prefetch returns correct number of rows');
+  is ($owners->page(1)->count, 3, 'has-many prefetch returns correct count');
+
+  TODO: {
+    local $TODO = 'limit past end of resultset problem';
+    is ($owners->page(3)->all, 2, 'has_many prefetch returns correct number of rows');
+    is ($owners->page(3)->count, 2, 'has-many prefetch returns correct count');
+    is ($owners->page(3)->count_rs->next, 2, 'has-many prefetch returns correct count_rs');
+
+    # make sure count does not become overly complex FIXME
+    is_same_sql_bind (
+      $owners->page(3)->count_rs->as_query,
+      '(
+        SELECT COUNT( * )
+          FROM (
+            SELECT TOP 3 [me].[id]
+              FROM [owners] [me]
+              LEFT JOIN [books] [books] ON [books].[owner] = [me].[id]
+            WHERE ( [books].[id] IS NOT NULL )
+            GROUP BY [me].[id]
+            ORDER BY [me].[id] DESC
+          ) [count_subq]
+      )',
+      [],
+    );
+  }
+
+  # try a ->belongs_to direction (no select collapse, group_by should work)
+  my $books = $schema->resultset ('BooksInLibrary')->search ({
+      'owner.name' => [qw/wiggle woggle/],
+    }, {
+      distinct => 1,
+      prefetch => 'owner',
+      rows     => 2,  # 3 results total
+      order_by => { -desc => 'owner' },
+      # there is no sane way to order by the right side of a grouped prefetch currently :(
+      #order_by => { -desc => 'owner.name' },
+    });
+
+
+  is ($books->page(1)->all, 2, 'Prefetched grouped search returns correct number of rows');
+  is ($books->page(1)->count, 2, 'Prefetched grouped search returns correct count');
+
+  TODO: {
+    local $TODO = 'limit past end of resultset problem';
+    is ($books->page(2)->all, 1, 'Prefetched grouped search returns correct number of rows');
+    is ($books->page(2)->count, 1, 'Prefetched grouped search returns correct count');
+    is ($books->page(2)->count_rs->next, 1, 'Prefetched grouped search returns correct count_rs');
+
+    # make sure count does not become overly complex FIXME
+    is_same_sql_bind (
+      $books->page(2)->count_rs->as_query,
+      '(
+        SELECT COUNT( * )
+          FROM (
+            SELECT TOP 2 [me].[id]
+              FROM [books] [me]
+              JOIN [owners] [owner] ON [owner].[id] = [me].[owner]
+            WHERE ( ( ( [owner].[name] = ? OR [owner].[name] = ? ) AND [source] = ? ) )
+            GROUP BY [me].[id], [me].[source], [me].[owner], [me].[title], [me].[price], [owner].[id], [owner].[name]
+            ORDER BY [me].[id] DESC
+          ) [count_subq]
+      )',
+      [
+        [ 'owner.name' => 'wiggle' ],
+        [ 'owner.name' => 'woggle' ],
+        [ 'source' => 'Library' ],
+      ],
+    );
+  }
+
+}
+
 # clean up our mess
 END {
     my $dbh = eval { $schema->storage->_dbh };
     $dbh->do('DROP TABLE artist') if $dbh;
 }
-
+# vim:sw=2 sts=2

Modified: DBIx-Class/0.08/branches/table_name_ref/t/746sybase.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/746sybase.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/746sybase.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -2,6 +2,7 @@
 use warnings;  
 
 use Test::More;
+use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
 
@@ -10,13 +11,21 @@
 plan skip_all => 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test'
   unless ($dsn && $user);
 
-plan tests => 12;
+plan tests => 13;
 
 my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1});
 
+# start disconnected to test reconnection
 $schema->storage->ensure_connected;
+$schema->storage->_dbh->disconnect;
+
 isa_ok( $schema->storage, 'DBIx::Class::Storage::DBI::Sybase' );
 
+my $dbh;
+lives_ok (sub {
+  $dbh = $schema->storage->dbh;
+}, 'reconnect works');
+
 $schema->storage->dbh_do (sub {
     my ($storage, $dbh) = @_;
     eval { $dbh->do("DROP TABLE artist") };

Modified: DBIx-Class/0.08/branches/table_name_ref/t/74mssql.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/74mssql.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/74mssql.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -9,6 +9,7 @@
 }
 
 use Test::More;
+use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
 
@@ -17,15 +18,22 @@
 plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test'
   unless ($dsn);
 
-plan tests => 6;
+plan tests => 7;
 
 my $schema = DBICTest::Schema->clone;
 $schema->connection($dsn, $user, $pass);
 
-my $dbh = $schema->storage->dbh;
+# start disconnected to test reconnection
+$schema->storage->ensure_connected;
+$schema->storage->_dbh->disconnect;
 
 isa_ok($schema->storage, 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server');
 
+my $dbh;
+lives_ok (sub {
+  $dbh = $schema->storage->dbh;
+}, 'reconnect works');
+
 $dbh->do("IF OBJECT_ID('artist', 'U') IS NOT NULL
     DROP TABLE artist");
 $dbh->do("IF OBJECT_ID('cd', 'U') IS NOT NULL

Modified: DBIx-Class/0.08/branches/table_name_ref/t/83cache.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/83cache.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/83cache.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -8,10 +8,9 @@
 my $schema = DBICTest->init_schema();
 
 my $queries;
-$schema->storage->debugcb( sub{ $queries++ } );
+my $debugcb = sub{ $queries++ };
+my $sdebug = $schema->storage->debug;
 
-eval "use DBD::SQLite";
-plan skip_all => 'needs DBD::SQLite for testing' if $@;
 plan tests => 23;
 
 my $rs = $schema->resultset("Artist")->search(
@@ -46,6 +45,7 @@
 
 $queries = 0;
 $schema->storage->debug(1);
+$schema->storage->debugcb ($debugcb);
 
 $rs = $schema->resultset('Artist')->search( undef, { cache => 1 } );
 while( $artist = $rs->next ) {}
@@ -53,7 +53,8 @@
 
 is( $queries, 1, 'revisiting a row does not issue a query when cache => 1' );
 
-$schema->storage->debug(0);
+$schema->storage->debug($sdebug);
+$schema->storage->debugcb (undef);
 
 my @a = $schema->resultset("Artist")->search(
   { },
@@ -78,6 +79,7 @@
 # start test for prefetch SELECT count
 $queries = 0;
 $schema->storage->debug(1);
+$schema->storage->debugcb ($debugcb);
 
 $artist = $rs->first;
 $rs->reset();
@@ -99,7 +101,8 @@
 
 is($queries, 1, 'only one SQL statement executed');
 
-$schema->storage->debug(0);
+$schema->storage->debug($sdebug);
+$schema->storage->debugcb (undef);
 
 # make sure related_resultset is deleted after object is updated
 $artist->set_column('name', 'New Name');
@@ -131,18 +134,21 @@
 # SELECT count for nested has_many prefetch
 $queries = 0;
 $schema->storage->debug(1);
+$schema->storage->debugcb ($debugcb);
 
 $artist = ($rs->all)[0];
 
 is($queries, 1, 'only one SQL statement executed');
 
-$schema->storage->debug(0);
+$schema->storage->debug($sdebug);
+$schema->storage->debugcb (undef);
 
 my @objs;
 #$artist = $rs->find(1);
 
 $queries = 0;
 $schema->storage->debug(1);
+$schema->storage->debugcb ($debugcb);
 
 my $cds = $artist->cds;
 my $tags = $cds->next->tags;
@@ -185,5 +191,5 @@
 
 is( $queries, 1, 'only one select statement on find with has_many prefetch on resultset' );
 
-$schema->storage->debug(0);
-
+$schema->storage->debug($sdebug);
+$schema->storage->debugcb (undef);

Modified: DBIx-Class/0.08/branches/table_name_ref/t/86might_have.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/86might_have.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/86might_have.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -8,14 +8,11 @@
 my $schema = DBICTest->init_schema();
 
 my $queries;
-#$schema->storage->debugfh(IO::File->new('t/var/temp.trace', 'w'));
 $schema->storage->debugcb( sub{ $queries++ } );
+my $sdebug = $schema->storage->debug;
 
-eval "use DBD::SQLite";
-plan skip_all => 'needs DBD::SQLite for testing' if $@;
 plan tests => 2;
 
-
 my $cd = $schema->resultset("CD")->find(1);
 $cd->title('test');
 
@@ -28,7 +25,7 @@
 is($queries, 1, 'liner_notes (might_have) not prefetched - do not load 
 liner_notes on update');
 
-$schema->storage->debug(0);
+$schema->storage->debug($sdebug);
 
 
 my $cd2 = $schema->resultset("CD")->find(2, {prefetch => 'liner_notes'});
@@ -43,5 +40,4 @@
 is($queries, 1, 'liner_notes (might_have) prefetched - do not load 
 liner_notes on update');
 
-$schema->storage->debug(0);
-
+$schema->storage->debug($sdebug);

Modified: DBIx-Class/0.08/branches/table_name_ref/t/88result_set_column.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/88result_set_column.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/88result_set_column.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -8,10 +8,9 @@
 
 my $schema = DBICTest->init_schema();
 
-plan tests => 18;
+plan tests => 20;
 
-my $cd;
-my $rs = $cd = $schema->resultset("CD")->search({}, { order_by => 'cdid' });
+my $rs = $schema->resultset("CD")->search({}, { order_by => 'cdid' });
 
 my $rs_title = $rs->get_column('title');
 my $rs_year = $rs->get_column('year');
@@ -76,3 +75,22 @@
 my $owner = $schema->resultset('Owners')->find ({ name => 'Newton' });
 ok ($owner->books->count > 1, 'Owner Newton has multiple books');
 is ($owner->search_related ('books')->get_column ('price')->sum, 60, 'Correctly calculated price of all owned books');
+
+
+# make sure joined/prefetched get_column of a PK dtrt
+
+$rs->reset;
+my $j_rs = $rs->search ({}, { join => 'tracks' })->get_column ('cdid');
+is_deeply (
+  [ $j_rs->all ],
+  [ map { my $c = $rs->next; ( ($c->id) x $c->tracks->count ) } (1 .. $rs->count) ],
+  'join properly explodes amount of rows from get_column',
+);
+
+$rs->reset;
+my $p_rs = $rs->search ({}, { prefetch => 'tracks' })->get_column ('cdid');
+is_deeply (
+  [ $p_rs->all ],
+  [ $rs->get_column ('cdid')->all ],
+  'prefetch properly collapses amount of rows from get_column',
+);

Modified: DBIx-Class/0.08/branches/table_name_ref/t/90join_torture.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/90join_torture.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/90join_torture.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -46,9 +46,9 @@
 
 my $rs3 = $rs2->search_related('cds');
 
-cmp_ok(scalar($rs3->all), '==', 45, "All cds for artist returned");
+cmp_ok(scalar($rs3->all), '==', 15, "All cds for artist returned");
 
-cmp_ok($rs3->count, '==', 45, "All cds for artist returned via count");
+cmp_ok($rs3->count, '==', 15, "All cds for artist returned via count");
 
 my $rs4 = $schema->resultset("CD")->search({ 'artist.artistid' => '1' }, { join => ['tracks', 'artist'], prefetch => 'artist' });
 my @rs4_results = $rs4->all;

Added: DBIx-Class/0.08/branches/table_name_ref/t/92storage_on_connect_call.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/92storage_on_connect_call.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/t/92storage_on_connect_call.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,68 @@
+use strict;
+use warnings;
+no warnings qw/once redefine/;
+
+use lib qw(t/lib);
+use DBICTest;
+
+use Test::More tests => 9;
+
+my $schema = DBICTest->init_schema(
+  no_connect  => 1,
+  no_deploy   => 1,
+);
+
+local *DBIx::Class::Storage::DBI::connect_call_foo = sub {
+  isa_ok $_[0], 'DBIx::Class::Storage::DBI',
+    'got storage in connect_call method';
+  is $_[1], 'bar', 'got param in connect_call method';
+};
+
+local *DBIx::Class::Storage::DBI::disconnect_call_foo = sub {
+  isa_ok $_[0], 'DBIx::Class::Storage::DBI',
+    'got storage in disconnect_call method';
+};
+
+ok $schema->connection(
+  DBICTest->_database,
+  {
+    on_connect_call => [
+        [ do_sql => 'create table test1 (id integer)' ],
+        [ do_sql => [ 'insert into test1 values (?)', {}, 1 ] ],
+        [ do_sql => sub { ['insert into test1 values (2)'] } ],
+        [ sub { $_[0]->dbh->do($_[1]) }, 'insert into test1 values (3)' ],
+        # this invokes $storage->connect_call_foo('bar') (above)
+        [ foo => 'bar' ],
+    ],
+    on_connect_do => 'insert into test1 values (4)',
+    on_disconnect_call => 'foo',
+  },
+), 'connection()';
+
+is_deeply (
+  $schema->storage->dbh->selectall_arrayref('select * from test1'),
+  [ [ 1 ], [ 2 ], [ 3 ], [ 4 ] ],
+  'on_connect_call/do actions worked'
+);
+
+local *DBIx::Class::Storage::DBI::connect_call_foo = sub {
+  isa_ok $_[0], 'DBIx::Class::Storage::DBI',
+    'got storage in connect_call method';
+};
+
+local *DBIx::Class::Storage::DBI::connect_call_bar = sub {
+  isa_ok $_[0], 'DBIx::Class::Storage::DBI',
+    'got storage in connect_call method';
+};
+
+$schema->storage->disconnect;
+
+ok $schema->connection(
+  DBICTest->_database,
+  {
+    # method list form
+    on_connect_call => [ 'foo', sub { ok 1, "coderef in list form" }, 'bar' ],
+  },
+), 'connection()';
+
+$schema->storage->ensure_connected;

Modified: DBIx-Class/0.08/branches/table_name_ref/t/95sql_maker_quote.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/95sql_maker_quote.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/95sql_maker_quote.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -35,12 +35,23 @@
               {
                 'artist.artistid' => 'me.artist'
               }
-            ]
+            ],
+            [
+              {
+                'tracks' => 'tracks',
+                '-join_type' => 'left'
+              },
+              {
+                'tracks.cd' => 'me.cdid'
+              }
+            ],
           ],
           [
-            {
-              'count' => '*'
-            }
+            'me.cdid',
+            { count => 'tracks.cd' },
+            { -select => 'me.artist' },
+            { -select => 'me.title', -as => 'name' },
+            { -select => { min => 'me.year' }, -as => 'me.minyear' },
           ],
           {
             'artist.name' => 'Caterwauler McCrae',
@@ -53,8 +64,15 @@
 
 is_same_sql_bind(
   $sql, \@bind,
-  q/SELECT COUNT( * ) FROM `cd` `me`  JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` ) WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )/, [ ['artist.name' => 'Caterwauler McCrae'], ['me.year' => 2001] ],
-  'got correct SQL and bind parameters for count query with quoting'
+  q/
+    SELECT `me`.`cdid`, COUNT( `tracks`.`cd` ), `me`.`artist`, `me`.`title` AS `name`, MIN( `me`.`year` ) AS `me`.`minyear`
+      FROM `cd` `me`
+      JOIN `artist` `artist` ON ( `artist`.`artistid` = `me`.`artist` )
+      LEFT JOIN `tracks` `tracks` ON ( `tracks`.`cd` = `me`.`cdid` )
+    WHERE ( `artist`.`name` = ? AND `me`.`year` = ? )
+  /,
+  [ ['artist.name' => 'Caterwauler McCrae'], ['me.year' => 2001] ],
+  'got correct SQL and bind parameters for complex select query with quoting'
 );
 
 

Modified: DBIx-Class/0.08/branches/table_name_ref/t/cdbi/02-Film.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/cdbi/02-Film.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/cdbi/02-Film.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -8,8 +8,7 @@
     plan (skip_all => 'Class::Trigger and DBIx::ContextualFetch required');
     next;
   }
-  eval "use DBD::SQLite";
-  plan $@ ? (skip_all => 'needs DBD::SQLite for testing') : (tests => 98);
+  plan tests => 98;
 }
 
 INIT {
@@ -187,17 +186,14 @@
 	ok(!Film->retrieve('Ishtar'), 'Ishtar no longer there');
 	{
 		my $deprecated = 0;
-		#local $SIG{__WARN__} = sub { $deprecated++ if $_[0] =~ /deprecated/ };
+		local $SIG{__WARN__} = sub { $deprecated++ if $_[0] =~ /deprecated/ };
 		ok(
 			Film->delete(Director => 'Elaine May'),
 			"In fact, delete all films by Elaine May"
 		);
 		cmp_ok(Film->search(Director => 'Elaine May'), '==',
 			0, "0 Films by Elaine May");
-                SKIP: {
-                    skip "No deprecated warnings from compat layer", 1;
-		    is $deprecated, 1, "Got a deprecated warning";
-                }
+		is $deprecated, 0, "No deprecated warnings from compat layer";
 	}
 };
 is $@, '', "No problems with deletes";

Modified: DBIx-Class/0.08/branches/table_name_ref/t/count/count_rs.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/count/count_rs.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/count/count_rs.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -33,7 +33,7 @@
     \@bind,
     'SELECT COUNT( * )
       FROM cd me
-      LEFT JOIN track tracks ON tracks.cd = me.cdid
+      JOIN track tracks ON tracks.cd = me.cdid
       JOIN cd disc ON disc.cdid = tracks.cd
       LEFT JOIN lyrics lyrics ON lyrics.track_id = tracks.trackid 
      WHERE ( ( position = ? OR position = ? ) )
@@ -51,7 +51,7 @@
        FROM (
         SELECT tracks.trackid
           FROM cd me
-          LEFT JOIN track tracks ON tracks.cd = me.cdid
+          JOIN track tracks ON tracks.cd = me.cdid
           JOIN cd disc ON disc.cdid = tracks.cd
           LEFT JOIN lyrics lyrics ON lyrics.track_id = tracks.trackid 
         WHERE ( ( position = ? OR position = ? ) )
@@ -85,7 +85,7 @@
       FROM (
         SELECT cds.cdid
           FROM artist me
-          LEFT JOIN cd cds ON cds.artist = me.artistid
+          JOIN cd cds ON cds.artist = me.artistid
           LEFT JOIN track tracks ON tracks.cd = cds.cdid
           JOIN artist artist ON artist.artistid = cds.artist
         WHERE tracks.position = ? OR tracks.position = ?
@@ -105,7 +105,7 @@
       FROM (
         SELECT cds.cdid
           FROM artist me
-          LEFT JOIN cd cds ON cds.artist = me.artistid
+          JOIN cd cds ON cds.artist = me.artistid
           LEFT JOIN track tracks ON tracks.cd = cds.cdid
           JOIN artist artist ON artist.artistid = cds.artist
         WHERE tracks.position = ? OR tracks.position = ?

Modified: DBIx-Class/0.08/branches/table_name_ref/t/count/distinct.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/count/distinct.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/count/distinct.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -11,7 +11,7 @@
 
 my $schema = DBICTest->init_schema();
 
-plan tests => 58;
+plan tests => 56;
 
 # The tag Blue is assigned to cds 1 2 3 and 5
 # The tag Cheesy is assigned to cds 2 4 and 5
@@ -80,17 +80,6 @@
   is($get_count->($rs), 3, 'Count by distinct function result as select literal');
 }
 
-eval {
-  my @warnings;
-  local $SIG{__WARN__} = sub { $_[0] =~ /The select => { distinct => ... } syntax will be deprecated/ 
-    ? push @warnings, @_
-    : warn @_
-  };
-  my $row = $schema->resultset('Tag')->search({}, { select => { distinct => 'tag' } })->first;
-  is (@warnings, 1, 'Warned about deprecated distinct') if $DBIx::Class::VERSION < 0.09;
-};
-ok ($@, 'Exception on deprecated distinct usage thrown') if $DBIx::Class::VERSION >= 0.09;
-
 throws_ok(
   sub { my $row = $schema->resultset('Tag')->search({}, { select => { distinct => [qw/tag cd/] } })->first },
   qr/select => { distinct => \.\.\. } syntax is not supported for multiple columns/,
@@ -99,4 +88,3 @@
 
 # These two rely on the database to throw an exception. This might not be the case one day. Please revise.
 dies_ok(sub { my $count = $schema->resultset('Tag')->search({}, { '+select' => \'tagid AS tag_id', distinct => 1 })->count }, 'expecting to die');
-dies_ok(sub { my $count = $schema->resultset('Tag')->search({}, { select => { length => 'tag' }, distinct => 1 })->count }, 'expecting to die');

Modified: DBIx-Class/0.08/branches/table_name_ref/t/count/joined.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/count/joined.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/count/joined.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,7 +7,7 @@
 
 use DBICTest;
 
-plan tests => 3;
+plan tests => 7;
 
 my $schema = DBICTest->init_schema();
 
@@ -26,6 +26,12 @@
   "Count correct with requested distinct collapse of main table"
 );
 
+# JOIN and LEFT JOIN issues mean that we've seen problems where counted rows and fetched rows are sometimes 1 higher than they should
+# be in the related resultset.
+my $artist=$schema->resultset('Artist')->create({name => 'xxx'});
+is($artist->related_resultset('cds')->count(), 0, "No CDs found for a shiny new artist");
+is(scalar($artist->related_resultset('cds')->all()), 0, "No CDs fetched for a shiny new artist");
 
-
-
+my $artist_rs = $schema->resultset('Artist')->search({artistid => $artist->id});
+is($artist_rs->related_resultset('cds')->count(), 0, "No CDs counted for a shiny new artist using a resultset search");
+is(scalar($artist_rs->related_resultset('cds')->all), 0, "No CDs fetched for a shiny new artist using a resultset search");

Modified: DBIx-Class/0.08/branches/table_name_ref/t/count/prefetch.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/count/prefetch.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/count/prefetch.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -32,7 +32,7 @@
   is_same_sql_bind (
     $sql,
     \@bind,
-    'SELECT COUNT( * ) FROM (SELECT cds.cdid FROM artist me LEFT JOIN cd cds ON cds.artist = me.artistid LEFT JOIN track tracks ON tracks.cd = cds.cdid JOIN artist artist ON artist.artistid = cds.artist WHERE tracks.position = ? OR tracks.position = ? GROUP BY cds.cdid) count_subq',
+    'SELECT COUNT( * ) FROM (SELECT cds.cdid FROM artist me JOIN cd cds ON cds.artist = me.artistid LEFT JOIN track tracks ON tracks.cd = cds.cdid JOIN artist artist ON artist.artistid = cds.artist WHERE tracks.position = ? OR tracks.position = ? GROUP BY cds.cdid) count_subq',
     [ qw/'1' '2'/ ],
   );
 }
@@ -57,7 +57,7 @@
   is_same_sql_bind (
     $sql,
     \@bind,
-    'SELECT COUNT( * ) FROM cd me LEFT JOIN track tracks ON tracks.cd = me.cdid JOIN cd disc ON disc.cdid = tracks.cd LEFT JOIN lyrics lyrics ON lyrics.track_id = tracks.trackid WHERE ( ( position = ? OR position = ? ) )',
+    'SELECT COUNT( * ) FROM cd me JOIN track tracks ON tracks.cd = me.cdid JOIN cd disc ON disc.cdid = tracks.cd LEFT JOIN lyrics lyrics ON lyrics.track_id = tracks.trackid WHERE ( ( position = ? OR position = ? ) )',
     [ qw/'1' '2'/ ],
   );
 }

Modified: DBIx-Class/0.08/branches/table_name_ref/t/from_subquery.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/from_subquery.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/from_subquery.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -3,17 +3,12 @@
 
 use Test::More;
 
-BEGIN {
-    eval "use SQL::Abstract 1.49";
-    plan $@
-        ? ( skip_all => "Needs SQLA 1.49+" )
-        : ( tests => 8 );
-}
-
 use lib qw(t/lib);
 use DBICTest;
 use DBIC::SqlMakerTest;
 
+plan tests => 8;
+
 my $schema = DBICTest->init_schema();
 my $art_rs = $schema->resultset('Artist');
 my $cdrs = $schema->resultset('CD');

Copied: DBIx-Class/0.08/branches/table_name_ref/t/inflate/datetime_oracle.t (from rev 6821, DBIx-Class/0.08/branches/table_name_ref/t/73oracle_inflate.t)
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/inflate/datetime_oracle.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/t/inflate/datetime_oracle.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,103 @@
+use strict;
+use warnings;  
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_ORA_${_}" } qw/DSN USER PASS/};
+
+if (not ($dsn && $user && $pass)) {
+    plan skip_all => 'Set $ENV{DBICTEST_ORA_DSN}, _USER and _PASS to run this test. ' .
+         'Warning: This test drops and creates a table called \'track\'';
+}
+else {
+    eval "use DateTime; use DateTime::Format::Oracle;";
+    if ($@) {
+        plan skip_all => 'needs DateTime and DateTime::Format::Oracle for testing';
+    }
+    else {
+        plan tests => 10;
+    }
+}
+
+# DateTime::Format::Oracle needs this set
+$ENV{NLS_DATE_FORMAT} = 'DD-MON-YY';
+$ENV{NLS_TIMESTAMP_FORMAT} = 'YYYY-MM-DD HH24:MI:SSXFF';
+$ENV{NLS_LANG} = 'AMERICAN_AMERICA.WE8ISO8859P1';
+
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+# Need to redefine the last_updated_on column
+my $col_metadata = $schema->class('Track')->column_info('last_updated_on');
+$schema->class('Track')->add_column( 'last_updated_on' => {
+    data_type => 'date' });
+$schema->class('Track')->add_column( 'last_updated_at' => {
+    data_type => 'timestamp' });
+
+my $dbh = $schema->storage->dbh;
+
+#$dbh->do("alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SSXFF'");
+
+eval {
+  $dbh->do("DROP TABLE track");
+};
+$dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at TIMESTAMP)");
+
+# insert a row to play with
+my $new = $schema->resultset('Track')->create({ trackid => 1, cd => 1, position => 1, title => 'Track1', last_updated_on => '06-MAY-07', last_updated_at => '2009-05-03 21:17:18.5' });
+is($new->trackid, 1, "insert sucessful");
+
+my $track = $schema->resultset('Track')->find( 1 );
+
+is( ref($track->last_updated_on), 'DateTime', "last_updated_on inflated ok");
+
+is( $track->last_updated_on->month, 5, "DateTime methods work on inflated column");
+
+#note '$track->last_updated_at => ', $track->last_updated_at;
+is( ref($track->last_updated_at), 'DateTime', "last_updated_at inflated ok");
+
+is( $track->last_updated_at->nanosecond, 500_000_000, "DateTime methods work with nanosecond precision");
+
+my $dt = DateTime->now();
+$track->last_updated_on($dt);
+$track->last_updated_at($dt);
+$track->update;
+
+is( $track->last_updated_on->month, $dt->month, "deflate ok");
+is( int $track->last_updated_at->nanosecond, int $dt->nanosecond, "deflate ok with nanosecond precision");
+
+# test datetime_setup
+
+$schema->storage->disconnect;
+
+delete $ENV{NLS_DATE_FORMAT};
+delete $ENV{NLS_TIMESTAMP_FORMAT};
+
+$schema->connection($dsn, $user, $pass, {
+    on_connect_call => 'datetime_setup'
+});
+
+$dt = DateTime->now();
+
+my $timestamp = $dt->clone;
+$timestamp->set_nanosecond( int 500_000_000 );
+
+$track = $schema->resultset('Track')->find( 1 );
+$track->update({ last_updated_on => $dt, last_updated_at => $timestamp });
+
+$track = $schema->resultset('Track')->find(1);
+
+is( $track->last_updated_on, $dt, 'DateTime round-trip as DATE' );
+is( $track->last_updated_at, $timestamp, 'DateTime round-trip as TIMESTAMP' );
+
+is( int $track->last_updated_at->nanosecond, int 500_000_000,
+  'TIMESTAMP nanoseconds survived' );
+
+# clean up our mess
+END {
+    if($schema && ($dbh = $schema->storage->dbh)) {
+        $dbh->do("DROP TABLE track");
+    }
+}
+

Modified: DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/Artwork.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/Artwork.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/Artwork.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,6 +7,7 @@
 __PACKAGE__->add_columns(
   'cd_id' => {
     data_type => 'integer',
+    is_nullable => 0,
   },
 );
 __PACKAGE__->set_primary_key('cd_id');

Modified: DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/CD.pm
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/CD.pm	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/lib/DBICTest/Schema/CD.pm	2009-07-09 05:37:22 UTC (rev 7006)
@@ -59,6 +59,7 @@
     { proxy => [ qw/notes/ ] },
 );
 __PACKAGE__->might_have(artwork => 'DBICTest::Schema::Artwork', 'cd_id');
+__PACKAGE__->has_one(mandatory_artwork => 'DBICTest::Schema::Artwork', 'cd_id');
 
 __PACKAGE__->many_to_many( producers => cd_to_producer => 'producer' );
 __PACKAGE__->many_to_many(

Modified: DBIx-Class/0.08/branches/table_name_ref/t/multi_create/m2m.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/multi_create/m2m.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/multi_create/m2m.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -23,7 +23,7 @@
 
   my $cd2 = $schema->resultset('CD')->search ( { cdid => { '!=', $cd->cdid } }, {rows => 1} )->single;  # retrieve a cd different from the first
   $cd2->add_to_producers ({name => 'new m2m producer'});                                                # attach to an existing producer
-  ok ($cd2->producers->find ({name => 'new m2m producer'}), 'Exsiting producer attached to existing cd');
+  ok ($cd2->producers->find ({name => 'new m2m producer'}), 'Existing producer attached to existing cd');
 
 }, 'Test far-end find_or_create over many_to_many');
 

Deleted: DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_might_have_PKeqFK.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_might_have_PKeqFK.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_might_have_PKeqFK.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,65 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-use Test::Exception;
-use lib qw(t/lib);
-use DBICTest;
-
-sub mc_diag { diag (@_) if $ENV{DBIC_MULTICREATE_DEBUG} };
-
-plan tests => 8;
-
-my $schema = DBICTest->init_schema();
-
-mc_diag (<<'DG');
-* Test a multilevel might-have with a PK == FK in the might_have/has_many table
-
-CD -> might have -> Artwork
-                       \
-                        \-> has_many \
-                                      --> Artwork_to_Artist
-                        /-> has_many /
-                       /
-                     Artist
-DG
-
-lives_ok (sub {
-  my $someartist = $schema->resultset('Artist')->first;
-  my $cd = $schema->resultset('CD')->create ({
-    artist => $someartist,
-    title => 'Music to code by until the cows come home',
-    year => 2008,
-    artwork => {
-      artwork_to_artist => [
-        { artist => { name => 'cowboy joe' } },
-        { artist => { name => 'billy the kid' } },
-      ],
-    },
-  });
-
-  isa_ok ($cd, 'DBICTest::CD', 'Main CD object created');
-  is ($cd->title, 'Music to code by until the cows come home', 'Correct CD title');
-
-  my $art_obj = $cd->artwork;
-  ok ($art_obj->has_column_loaded ('cd_id'), 'PK/FK present on artwork object');
-  is ($art_obj->artists->count, 2, 'Correct artwork creator count via the new object');
-  is_deeply (
-    [ sort $art_obj->artists->get_column ('name')->all ],
-    [ 'billy the kid', 'cowboy joe' ],
-    'Artists named correctly when queried via object',
-  );
-
-  my $artwork = $schema->resultset('Artwork')->search (
-    { 'cd.title' => 'Music to code by until the cows come home' },
-    { join => 'cd' },
-  )->single;
-  is ($artwork->artists->count, 2, 'Correct artwork creator count via a new search');
-  is_deeply (
-    [ sort $artwork->artists->get_column ('name')->all ],
-    [ 'billy the kid', 'cowboy joe' ],
-    'Artists named correctly queried via a new search',
-  );
-}, 'multilevel might-have with a PK == FK in the might_have/has_many table ok');
-
-1;

Copied: DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_single_PKeqFK.t (from rev 6821, DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_might_have_PKeqFK.t)
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_single_PKeqFK.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/t/multi_create/multilev_single_PKeqFK.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,105 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+sub mc_diag { diag (@_) if $ENV{DBIC_MULTICREATE_DEBUG} };
+
+plan tests => 26;
+
+my $schema = DBICTest->init_schema();
+
+mc_diag (<<'DG');
+* Test a multilevel might-have/has_one with a PK == FK in the mid-table
+
+CD -> might have -> Artwork
+    \- has_one -/     \
+                       \
+                        \-> has_many \
+                                      --> Artwork_to_Artist
+                        /-> has_many /
+                       /
+                     Artist
+DG
+
+my $rels = {
+  has_one => 'mandatory_artwork',
+  might_have => 'artwork',
+};
+
+for my $type (qw/has_one might_have/) {
+
+  lives_ok (sub {
+
+    my $rel = $rels->{$type};
+    my $cd_title = "Simple test $type cd";
+
+    my $cd = $schema->resultset('CD')->create ({
+      artist => 1,
+      title => $cd_title,
+      year => 2008,
+      $rel => {},
+    });
+
+    isa_ok ($cd, 'DBICTest::CD', 'Main CD object created');
+    is ($cd->title, $cd_title, 'Correct CD title');
+
+    isa_ok ($cd->$rel, 'DBICTest::Artwork', 'Related artwork present');
+    ok ($cd->$rel->in_storage, 'And in storage');
+
+  }, "Simple $type creation");
+}
+
+my $artist_rs = $schema->resultset('Artist');
+for my $type (qw/has_one might_have/) {
+
+  my $rel = $rels->{$type};
+
+  my $cd_title = "Test $type cd";
+  my $artist_names = [ map { "Artist via $type $_" } (1, 2) ];
+
+  my $someartist = $artist_rs->next;
+
+  lives_ok (sub {
+    my $cd = $schema->resultset('CD')->create ({
+      artist => $someartist,
+      title => $cd_title,
+      year => 2008,
+      $rel => {
+      artwork_to_artist => [ map {
+            { artist => { name => $_ } }
+          } (@$artist_names)
+        ]
+      },
+    });
+
+
+    isa_ok ($cd, 'DBICTest::CD', 'Main CD object created');
+    is ($cd->title, $cd_title, 'Correct CD title');
+
+    my $art_obj = $cd->$rel;
+    ok ($art_obj->has_column_loaded ('cd_id'), 'PK/FK present on artwork object');
+    is ($art_obj->artists->count, 2, 'Correct artwork creator count via the new object');
+    is_deeply (
+      [ sort $art_obj->artists->get_column ('name')->all ],
+      $artist_names,
+      'Artists named correctly when queried via object',
+    );
+
+    my $artwork = $schema->resultset('Artwork')->search (
+      { 'cd.title' => $cd_title },
+      { join => 'cd' },
+    )->single;
+    is ($artwork->artists->count, 2, 'Correct artwork creator count via a new search');
+    is_deeply (
+        [ sort $artwork->artists->get_column ('name')->all ],
+      $artist_names,
+      'Artists named correctly queried via a new search',
+    );
+  }, "multilevel $type with a PK == FK in the $type/has_many table ok");
+}
+
+1;

Added: DBIx-Class/0.08/branches/table_name_ref/t/prefetch/count.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/prefetch/count.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/t/prefetch/count.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,101 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+plan tests => 23;
+
+my $schema = DBICTest->init_schema();
+
+my $cd_rs = $schema->resultset('CD')->search (
+  { 'tracks.cd' => { '!=', undef } },
+  { prefetch => ['tracks', 'artist'] },
+);
+
+
+is($cd_rs->count, 5, 'CDs with tracks count');
+is($cd_rs->search_related('tracks')->count, 15, 'Tracks associated with CDs count (before SELECT()ing)');
+
+is($cd_rs->all, 5, 'Amount of CD objects with tracks');
+is($cd_rs->search_related('tracks')->count, 15, 'Tracks associated with CDs count (after SELECT()ing)');
+
+is($cd_rs->search_related ('tracks')->all, 15, 'Track objects associated with CDs (after SELECT()ing)');
+
+my $artist = $schema->resultset('Artist')->create({name => 'xxx'});
+
+my $artist_rs = $schema->resultset('Artist')->search(
+  {artistid => $artist->id},
+  {prefetch=>'cds', join => 'twokeys' }
+);
+
+is($artist_rs->count, 1, "New artist found with prefetch turned on");
+is(scalar($artist_rs->all), 1, "New artist fetched with prefetch turned on");
+is($artist_rs->related_resultset('cds')->count, 0, "No CDs counted on a brand new artist");
+is(scalar($artist_rs->related_resultset('cds')->all), 0, "No CDs fetched on a brand new artist (count == fetch)");
+
+# create a cd, and make sure the non-existing join does not skew the count
+$artist->create_related ('cds', { title => 'yyy', year => '1999' });
+is($artist_rs->related_resultset('cds')->count, 1, "1 CDs counted on a brand new artist");
+is(scalar($artist_rs->related_resultset('cds')->all), 1, "1 CDs prefetched on a brand new artist (count == fetch)");
+
+# Really fuck shit up with one more cd and some insanity
+# this doesn't quite work as there are the prefetch gets lost
+# on search_related. This however is too esoteric to fix right
+# now
+
+my $cd2 = $artist->create_related ('cds', {
+    title => 'zzz',
+    year => '1999',
+    tracks => [{ title => 'ping' }, { title => 'pong' }],
+});
+
+my $cds = $cd2->search_related ('artist', {}, { join => 'twokeys' })
+                  ->search_related ('cds');
+my $tracks = $cds->search_related ('tracks');
+
+is($tracks->count, 2, "2 Tracks counted on cd via artist via one of the cds");
+is(scalar($tracks->all), 2, "2 Track objects on cd via artist via one of the cds");
+
+is($cds->count, 2, "2 CDs counted on artist via one of the cds");
+is(scalar($cds->all), 2, "2 CD objectson artist via one of the cds");
+
+# make sure the join collapses all the way
+is_same_sql_bind (
+  $tracks->count_rs->as_query,
+  '(
+    SELECT COUNT( * )
+      FROM artist me
+      LEFT JOIN twokeys twokeys ON twokeys.artist = me.artistid
+      JOIN cd cds ON cds.artist = me.artistid
+      JOIN track tracks ON tracks.cd = cds.cdid
+    WHERE ( me.artistid = ? )
+  )',
+  [ [ 'me.artistid' => 4 ] ],
+);
+
+
+TODO: {
+  local $TODO = "Chaining with prefetch is fundamentally broken";
+
+  my $queries;
+  $schema->storage->debugcb ( sub { $queries++ } );
+  $schema->storage->debug (1);
+
+  my $cds = $cd2->search_related ('artist', {}, { prefetch => { cds => 'tracks' }, join => 'twokeys' })
+                  ->search_related ('cds');
+
+  my $tracks = $cds->search_related ('tracks');
+
+  is($tracks->count, 2, "2 Tracks counted on cd via artist via one of the cds");
+  is(scalar($tracks->all), 2, "2 Tracks prefetched on cd via artist via one of the cds");
+  is($tracks->count, 2, "Cached 2 Tracks counted on cd via artist via one of the cds");
+
+  is($cds->count, 2, "2 CDs counted on artist via one of the cds");
+  is(scalar($cds->all), 2, "2 CDs prefetched on artist via one of the cds");
+  is($cds->count, 2, "Cached 2 CDs counted on artist via one of the cds");
+
+  is ($queries, 3, '2 counts + 1 prefetch?');
+}

Added: DBIx-Class/0.08/branches/table_name_ref/t/prefetch/grouped.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/prefetch/grouped.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/t/prefetch/grouped.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,203 @@
+use strict;
+use warnings;
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+#plan tests => 6;
+plan 'no_plan';
+
+my $schema = DBICTest->init_schema();
+my $sdebug = $schema->storage->debug;
+
+my $cd_rs = $schema->resultset('CD')->search (
+  { 'tracks.cd' => { '!=', undef } },
+  { prefetch => 'tracks' },
+);
+
+# Database sanity check
+is($cd_rs->count, 5, 'CDs with tracks count');
+for ($cd_rs->all) {
+  is ($_->tracks->count, 3, '3 tracks for CD' . $_->id );
+}
+
+# Test a belongs_to prefetch of a has_many
+{
+  my $track_rs = $schema->resultset ('Track')->search (
+    { 'me.cd' => { -in => [ $cd_rs->get_column ('cdid')->all ] } },
+    {
+      # the select/as is deliberately silly to test both funcs and refs below
+      select => [
+        'me.cd',
+        { count => 'me.trackid' },
+      ],
+      as => [qw/
+        cd
+        track_count
+      /],
+      group_by => [qw/me.cd/],
+      prefetch => 'cd',
+    },
+  );
+
+  # this used to fuck up ->all, do not remove!
+  ok ($track_rs->first, 'There is stuff in the rs');
+
+  is($track_rs->count, 5, 'Prefetched count with groupby');
+  is($track_rs->all, 5, 'Prefetched objects with groupby');
+
+  {
+    my $query_cnt = 0;
+    $schema->storage->debugcb ( sub { $query_cnt++ } );
+    $schema->storage->debug (1);
+
+    while (my $collapsed_track = $track_rs->next) {
+      my $cdid = $collapsed_track->get_column('cd');
+      is($collapsed_track->get_column('track_count'), 3, "Correct count of tracks for CD $cdid" );
+      ok($collapsed_track->cd->title, "Prefetched title for CD $cdid" );
+    }
+
+    is ($query_cnt, 1, 'Single query on prefetched titles');
+    $schema->storage->debugcb (undef);
+    $schema->storage->debug ($sdebug);
+  }
+
+  # Test sql by hand, as the sqlite db will simply paper over
+  # improper group/select combinations
+  #
+  # the exploded IN needs fixing below, coming in another branch
+  #
+  is_same_sql_bind (
+    $track_rs->count_rs->as_query,
+    '(
+      SELECT COUNT( * )
+        FROM (
+          SELECT me.cd
+            FROM track me
+            JOIN cd cd ON cd.cdid = me.cd
+          WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
+          GROUP BY me.cd
+        )
+      count_subq
+    )',
+    [ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
+    'count() query generated expected SQL',
+  );
+
+  is_same_sql_bind (
+    $track_rs->as_query,
+    '(
+      SELECT me.cd, me.track_count, cd.cdid, cd.artist, cd.title, cd.year, cd.genreid, cd.single_track
+        FROM (
+          SELECT me.cd, COUNT (me.trackid) AS track_count,
+            FROM track me
+            JOIN cd cd ON cd.cdid = me.cd
+          WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
+          GROUP BY me.cd
+          ) as me
+        JOIN cd cd ON cd.cdid = me.cd
+      WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
+    )',
+    [ map { [ 'me.cd' => $_] } ( ($cd_rs->get_column ('cdid')->all) x 2 ) ],
+    'next() query generated expected SQL',
+  );
+
+
+  # add an extra track to one of the cds, and then make sure we can get it on top
+  # (check if limit works)
+  my $top_cd = $cd_rs->slice (1,1)->next;
+  $top_cd->create_related ('tracks', {
+    title => 'over the top',
+  });
+
+  my $top_cd_collapsed_track = $track_rs->search ({}, {
+    rows => 2,
+    order_by => [
+      { -desc => 'track_count' },
+    ],
+  });
+
+  is ($top_cd_collapsed_track->count, 2);
+
+  is (
+    $top_cd->title,
+    $top_cd_collapsed_track->first->cd->title,
+    'Correct collapsed track with prefetched CD returned on top'
+  );
+}
+
+# test a has_many/might_have prefetch at the same level
+# Note that one of the CDs now has 4 tracks instead of 3
+{
+  my $most_tracks_rs = $cd_rs->search ({}, {
+    prefetch => 'liner_notes',  # tracks are alredy prefetched
+    select => ['me.cdid', { count => 'tracks.trackid' } ],
+    as => [qw/cdid track_count/],
+    group_by => 'me.cdid',
+    order_by => { -desc => 'track_count' },
+    rows => 2,
+  });
+
+  is_same_sql_bind (
+    $most_tracks_rs->count_rs->as_query,
+    '(
+      SELECT COUNT( * )
+        FROM (
+          SELECT me.cdid
+            FROM cd me
+            LEFT JOIN track tracks ON tracks.cd = me.cdid
+            LEFT JOIN liner_notes liner_notes ON liner_notes.liner_id = me.cdid
+          WHERE ( tracks.cd IS NOT NULL )
+          GROUP BY me.cdid
+          LIMIT 2
+        ) count_subq
+    )',
+    [],
+    'count() query generated expected SQL',
+  );
+
+  is_same_sql_bind (
+    $most_tracks_rs->as_query,
+    '(
+      SELECT me.cdid, me.track_count, tracks.trackid, tracks.cd, tracks.position, tracks.title, tracks.last_updated_on, tracks.last_updated_at, liner_notes.liner_id, liner_notes.notes
+        FROM (
+          SELECT me.cdid, COUNT( tracks.trackid ) AS track_count
+            FROM cd me
+            LEFT JOIN track tracks ON tracks.cd = me.cdid
+          WHERE ( tracks.cd IS NOT NULL )
+          GROUP BY me.cdid
+          ORDER BY track_count DESC
+          LIMIT 2
+        ) me
+        LEFT JOIN track tracks ON tracks.cd = me.cdid
+        LEFT JOIN liner_notes liner_notes ON liner_notes.liner_id = me.cdid
+      WHERE ( tracks.cd IS NOT NULL )
+      ORDER BY track_count DESC, tracks.cd
+    )',
+    [],
+    'next() query generated expected SQL',
+  );
+
+  is ($most_tracks_rs->count, 2, 'Limit works');
+  my $top_cd = $most_tracks_rs->first;
+  is ($top_cd->id, 2, 'Correct cd fetched on top'); # 2 because of the slice(1,1) earlier
+
+  my $query_cnt = 0;
+  $schema->storage->debugcb ( sub { $query_cnt++ } );
+  $schema->storage->debug (1);
+
+  is ($top_cd->get_column ('track_count'), 4, 'Track count fetched correctly');
+  is ($top_cd->tracks->count, 4, 'Count of prefetched tracks rs still correct');
+  is ($top_cd->tracks->all, 4, 'Number of prefetched track objects still correct');
+  is (
+    $top_cd->liner_notes->notes,
+    'Buy Whiskey!',
+    'Correct liner pre-fetched with top cd',
+  );
+
+  is ($query_cnt, 0, 'No queries executed during prefetched data access');
+  $schema->storage->debugcb (undef);
+  $schema->storage->debug ($sdebug);
+}

Added: DBIx-Class/0.08/branches/table_name_ref/t/prefetch/incomplete.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/prefetch/incomplete.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/table_name_ref/t/prefetch/incomplete.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -0,0 +1,53 @@
+use strict;
+use warnings;  
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+plan tests => 9;
+
+my $schema = DBICTest->init_schema();
+
+lives_ok(sub {
+  # while cds.* will be selected anyway (prefetch currently forces the result of _resolve_prefetch)
+  # only the requested me.name column will be fetched.
+
+  # reference sql with select => [...]
+  #   SELECT me.name, cds.title, cds.cdid, cds.artist, cds.title, cds.year, cds.genreid, cds.single_track FROM ...
+
+  my $rs = $schema->resultset('Artist')->search(
+    { 'cds.title' => { '!=', 'Generic Manufactured Singles' } },
+    {
+      prefetch => [ qw/ cds / ],
+      order_by => [ { -desc => 'me.name' }, 'cds.title' ],
+      select => [qw/ me.name  cds.title / ],
+    }
+  );
+
+  is ($rs->count, 2, 'Correct number of collapsed artists');
+  my $we_are_goth = $rs->first;
+  is ($we_are_goth->name, 'We Are Goth', 'Correct first artist');
+  is ($we_are_goth->cds->count, 1, 'Correct number of CDs for first artist');
+  is ($we_are_goth->cds->first->title, 'Come Be Depressed With Us', 'Correct cd for artist');
+}, 'explicit prefetch on a keyless object works');
+
+
+lives_ok(sub {
+  # test implicit prefetch as well
+
+  my $rs = $schema->resultset('CD')->search(
+    { title => 'Generic Manufactured Singles' },
+    {
+      join=> 'artist',
+      select => [qw/ me.title artist.name / ],
+    }
+  );
+
+  my $cd = $rs->next;
+  is ($cd->title, 'Generic Manufactured Singles', 'CD title prefetched correctly');
+  isa_ok ($cd->artist, 'DBICTest::Artist');
+  is ($cd->artist->name, 'Random Boy Band', 'Artist object has correct name');
+
+}, 'implicit keyless prefetch works');

Modified: DBIx-Class/0.08/branches/table_name_ref/t/prefetch/multiple_hasmany.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/prefetch/multiple_hasmany.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/prefetch/multiple_hasmany.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -5,13 +5,13 @@
 use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
-use Data::Dumper;
+use IO::File;
 
 plan tests => 10;
 
 my $schema = DBICTest->init_schema();
+my $sdebug = $schema->storage->debug;
 
-use IO::File;
 
 # once the following TODO is complete, remove the 2 warning tests immediately
 # after the TODO block
@@ -44,6 +44,9 @@
     ok(! $o_mm_warn, 'no warning on attempt to prefetch several same level has_many\'s (1 -> M + M)');
 
     is($queries, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query');
+    $schema->storage->debugcb (undef);
+    $schema->storage->debug ($sdebug);
+
     is($pr_tracks_count, $tracks_count, 'equal count of prefetched relations over several same level has_many\'s (1 -> M + M)');
 
     for ($pr_tracks_rs, $tracks_rs) {
@@ -79,6 +82,8 @@
     ok(! $m_o_mm_warn, 'no warning on attempt to prefetch several same level has_many\'s (M -> 1 -> M + M)');
 
     is($queries, 1, 'prefetch one->(has_many,has_many) ran exactly 1 query');
+    $schema->storage->debugcb (undef);
+    $schema->storage->debug ($sdebug);
 
     is($pr_tags_count, $tags_count, 'equal count of prefetched relations over several same level has_many\'s (M -> 1 -> M + M)');
 

Modified: DBIx-Class/0.08/branches/table_name_ref/t/prefetch/standard.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/prefetch/standard.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/prefetch/standard.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -1,25 +1,18 @@
 use strict;
-use warnings;  
+use warnings;
 
 use Test::More;
 use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
 use Data::Dumper;
+use IO::File;
 
 my $schema = DBICTest->init_schema();
-
 my $orig_debug = $schema->storage->debug;
 
-use IO::File;
+plan tests => 44;
 
-BEGIN {
-    eval "use DBD::SQLite";
-    plan $@
-        ? ( skip_all => 'needs DBD::SQLite for testing' )
-        : ( tests => 45 );
-}
-
 my $queries = 0;
 $schema->storage->debugcb(sub { $queries++; });
 $schema->storage->debug(1);
@@ -227,29 +220,11 @@
 
 $tree_like = eval { $schema->resultset('TreeLike')->search(
     { 'children.id' => 3, 'children_2.id' => 6 }, 
-    { join => [qw/children children/] }
+    { join => [qw/children children children/] }
   )->search_related('children', { 'children_4.id' => 7 }, { prefetch => 'children' }
   )->first->children->first; };
 is(eval { $tree_like->name }, 'fong', 'Tree with multiple has_many joins ok');
 
-# test that collapsed joins don't get a _2 appended to the alias
-
-my $sql = '';
-$schema->storage->debugcb(sub { $sql = $_[1] });
-$schema->storage->debug(1);
-
-eval {
-  my $row = $schema->resultset('Artist')->search_related('cds', undef, {
-    join => 'tracks',
-    prefetch => 'tracks',
-  })->search_related('tracks')->first;
-};
-
-like( $sql, qr/^SELECT tracks_2\.trackid/, "join not collapsed for search_related" );
-
-$schema->storage->debug($orig_debug);
-$schema->storage->debugobj->callback(undef);
-
 $rs = $schema->resultset('Artist');
 $rs->create({ artistid => 4, name => 'Unknown singer-songwriter' });
 $rs->create({ artistid => 5, name => 'Emo 4ever' });
@@ -314,3 +289,5 @@
 
 is($queries, 0, 'chained search_related after has_many->has_many prefetch ran no queries');
 
+$schema->storage->debug($orig_debug);
+$schema->storage->debugobj->callback(undef);

Modified: DBIx-Class/0.08/branches/table_name_ref/t/relationship/core.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/relationship/core.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/relationship/core.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,8 +7,9 @@
 use DBICTest;
 
 my $schema = DBICTest->init_schema();
+my $sdebug = $schema->storage->debug;
 
-plan tests => 78;
+plan tests => 79;
 
 # has_a test
 my $cd = $schema->resultset("CD")->find(4);
@@ -57,7 +58,7 @@
   is($queries, 0, 'No SELECT made for belongs_to if key IS NULL');
   $big_flop_cd->genre_inefficient; #should trigger a select query
   is($queries, 1, 'SELECT made for belongs_to if key IS NULL when undef_on_null_fk disabled');
-  $schema->storage->debug(0);
+  $schema->storage->debug($sdebug);
   $schema->storage->debugcb(undef);
 }
 
@@ -275,11 +276,11 @@
 
 cmp_ok($searched->count, '==', 2, "Both artist returned from map after adding another condition");
 
-# check join through cascaded has_many relationships
+# check join through cascaded has_many relationships (also empty has_many rels)
 $artist = $schema->resultset("Artist")->find(1);
 my $trackset = $artist->cds->search_related('tracks');
-# LEFT join means we also see the trackless additional album...
-cmp_ok($trackset->count, '==', 11, "Correct number of tracks for artist");
+is($trackset->count, 10, "Correct number of tracks for artist");
+is($trackset->all, 10, "Correct number of track objects for artist");
 
 # now see about updating eveything that belongs to artist 2 to artist 3
 $artist = $schema->resultset("Artist")->find(2);

Modified: DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_multi.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_multi.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_multi.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -8,9 +8,9 @@
 use DBIC::SqlMakerTest;
 
 my $schema = DBICTest->init_schema();
+my $sdebug = $schema->storage->debug;
 
-#plan tests => 4;
-plan 'no_plan';
+plan tests => 6;
 
 my $artist = $schema->resultset ('Artist')->first;
 
@@ -74,6 +74,7 @@
 });
 
 $schema->storage->debugcb(undef);
+$schema->storage->debug ($sdebug);
 
 is_same_sql (
   $sql[0],

Modified: DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_single.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_single.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/relationship/update_or_create_single.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -7,8 +7,7 @@
 
 my $schema = DBICTest->init_schema();
 
-#plan tests => 4;
-plan 'no_plan';
+plan tests => 9;
 
 my $artist = $schema->resultset ('Artist')->first;
 

Modified: DBIx-Class/0.08/branches/table_name_ref/t/zzzzzzz_sqlite_deadlock.t
===================================================================
--- DBIx-Class/0.08/branches/table_name_ref/t/zzzzzzz_sqlite_deadlock.t	2009-07-09 05:35:56 UTC (rev 7005)
+++ DBIx-Class/0.08/branches/table_name_ref/t/zzzzzzz_sqlite_deadlock.t	2009-07-09 05:37:22 UTC (rev 7006)
@@ -14,7 +14,13 @@
 
 for my $close (0,1) {
 
-  my $tmp = File::Temp->new( UNLINK => 1, TMPDIR => 1, SUFFIX => '.sqlite' );
+  my $tmp = File::Temp->new(
+    UNLINK => 1,
+    TMPDIR => 1,
+    SUFFIX => '.sqlite',
+    EXLOCK => 0,  # important for BSD and derivatives
+  );
+
   my $tmp_fn = $tmp->filename;
   close $tmp if $close;
 
@@ -28,7 +34,7 @@
   lives_ok (sub {
     my $schema = DBICTest::Schema->connect ("DBI:SQLite:$tmp_fn");
     DBICTest->deploy_schema ($schema);
-    DBICTest->populate_schema ($schema);
+    #DBICTest->populate_schema ($schema);
   });
 
   alarm 0;




More information about the Bast-commits mailing list