[Bast-commits] r8321 - in DBIx-Class/0.08/branches/prefetch_pager: . lib/DBIx lib/DBIx/Class lib/DBIx/Class/Manual lib/DBIx/Class/Relationship lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI lib/DBIx/Class/Storage/DBI/ADO lib/DBIx/Class/Storage/DBI/Oracle lib/DBIx/Class/Storage/DBI/Replicated lib/DBIx/Class/Storage/DBI/Sybase lib/SQL/Translator/Parser/DBIx t t/inflate t/lib t/lib/DBICTest/Schema t/prefetch t/relationship t/resultset t/schema t/search

ribasushi at dev.catalyst.perl.org ribasushi at dev.catalyst.perl.org
Fri Jan 15 02:16:32 GMT 2010


Author: ribasushi
Date: 2010-01-15 02:16:31 +0000 (Fri, 15 Jan 2010)
New Revision: 8321

Added:
   DBIx-Class/0.08/branches/prefetch_pager/t/resultset/nulls_only.t
   DBIx-Class/0.08/branches/prefetch_pager/t/schema/anon.t
   DBIx-Class/0.08/branches/prefetch_pager/t/search/related_strip_prefetch.t
Modified:
   DBIx-Class/0.08/branches/prefetch_pager/
   DBIx-Class/0.08/branches/prefetch_pager/Changes
   DBIx-Class/0.08/branches/prefetch_pager/Makefile.PL
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Componentised.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Cookbook.pod
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Troubleshooting.pod
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Relationship/HasOne.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSet.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSetColumn.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSource.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Row.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/SQLAHacks.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Schema.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/MSSQL.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/mysql.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/UTF8Columns.pm
   DBIx-Class/0.08/branches/prefetch_pager/lib/SQL/Translator/Parser/DBIx/Class.pm
   DBIx-Class/0.08/branches/prefetch_pager/t/51threads.t
   DBIx-Class/0.08/branches/prefetch_pager/t/51threadtxn.t
   DBIx-Class/0.08/branches/prefetch_pager/t/52cycle.t
   DBIx-Class/0.08/branches/prefetch_pager/t/60core.t
   DBIx-Class/0.08/branches/prefetch_pager/t/71mysql.t
   DBIx-Class/0.08/branches/prefetch_pager/t/73oracle.t
   DBIx-Class/0.08/branches/prefetch_pager/t/746mssql.t
   DBIx-Class/0.08/branches/prefetch_pager/t/74mssql.t
   DBIx-Class/0.08/branches/prefetch_pager/t/85utf8.t
   DBIx-Class/0.08/branches/prefetch_pager/t/86sqlt.t
   DBIx-Class/0.08/branches/prefetch_pager/t/88result_set_column.t
   DBIx-Class/0.08/branches/prefetch_pager/t/99dbic_sqlt_parser.t
   DBIx-Class/0.08/branches/prefetch_pager/t/inflate/hri.t
   DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest.pm
   DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest/Schema/Artist.pm
   DBIx-Class/0.08/branches/prefetch_pager/t/prefetch/double_prefetch.t
   DBIx-Class/0.08/branches/prefetch_pager/t/relationship/core.t
Log:
 r8162 at Thesaurus (orig r8150):  abraxxa | 2009-12-18 15:59:58 +0100
 Schema POD inprovement for dclone
 
 r8163 at Thesaurus (orig r8151):  abraxxa | 2009-12-18 16:07:27 +0100
 link to DBIx::Class::Row
 
 r8164 at Thesaurus (orig r8152):  abraxxa | 2009-12-18 16:08:56 +0100
 fixed typo in Changes
 
 r8165 at Thesaurus (orig r8153):  abraxxa | 2009-12-18 16:14:47 +0100
 dclone pod take #2
 
 r8169 at Thesaurus (orig r8157):  ribasushi | 2009-12-19 18:47:42 +0100
 detabify
 r8170 at Thesaurus (orig r8158):  ribasushi | 2009-12-19 19:41:42 +0100
 Fix RT52812
 r8171 at Thesaurus (orig r8159):  caelum | 2009-12-23 07:16:29 +0100
 minor POD fixes
 r8175 at Thesaurus (orig r8163):  ribasushi | 2009-12-24 09:59:52 +0100
 Fix deployment_statements context sensitivity regression
 r8176 at Thesaurus (orig r8164):  ribasushi | 2009-12-24 10:13:37 +0100
 Don't call the PK setter if no PK
 r8204 at Thesaurus (orig r8192):  caelum | 2009-12-30 22:58:47 +0100
 bump CAG dep
 r8231 at Thesaurus (orig r8219):  matthewt | 2010-01-02 01:41:12 +0100
 fix typo in variable name
 r8238 at Thesaurus (orig r8226):  rafl | 2010-01-02 18:46:40 +0100
 Merge branch 'native_traits'
 
 * native_traits:
   Port replicated storage from MXAH to native traits.
   Create branch native_traits
 r8244 at Thesaurus (orig r8232):  caelum | 2010-01-04 00:30:51 +0100
 fix _rebless into sybase/mssql/nobindvars
 r8247 at Thesaurus (orig r8235):  caelum | 2010-01-05 13:54:56 +0100
  r22328 at hlagh (orig r8201):  caelum | 2009-12-31 12:29:51 -0500
  new branch to fix table aliases in queries over the 30char limit
  r22329 at hlagh (orig r8202):  caelum | 2009-12-31 12:55:50 -0500
  failing test
  r22330 at hlagh (orig r8203):  caelum | 2009-12-31 13:00:35 -0500
  switch oracle tests to done_testing()
  r22331 at hlagh (orig r8204):  caelum | 2009-12-31 15:02:50 -0500
  got something working
  r22332 at hlagh (orig r8205):  caelum | 2009-12-31 15:08:30 -0500
  POD touchups
  r22343 at hlagh (orig r8216):  caelum | 2010-01-01 07:42:03 -0500
  fix uninitialized warning and a bug in ResultSet
  r22419 at hlagh (orig r8234):  caelum | 2010-01-05 07:53:18 -0500
  append half of a base64 MD5 to shortened table aliases for Oracle
 
 r8249 at Thesaurus (orig r8237):  caelum | 2010-01-05 15:27:40 +0100
 minor change: use more of the hash if possible for oracle table alias shortening
 r8251 at Thesaurus (orig r8239):  caelum | 2010-01-06 02:20:17 +0100
 bump perl_version to 5.8.1
 r8252 at Thesaurus (orig r8240):  caelum | 2010-01-06 02:21:41 +0100
 remove alignment mark on base64 md5
 r8260 at Thesaurus (orig r8248):  ribasushi | 2010-01-07 11:21:55 +0100
 5.8.1 is minimum required perl
 r8261 at Thesaurus (orig r8249):  ribasushi | 2010-01-07 11:22:42 +0100
 Minor optimization
 r8262 at Thesaurus (orig r8250):  ribasushi | 2010-01-07 11:23:35 +0100
 Wrong title
 r8265 at Thesaurus (orig r8253):  ribasushi | 2010-01-08 17:48:50 +0100
 Resolve problem reported by http://lists.scsys.co.uk/pipermail/dbix-class/2009-December/008699.html
 r8266 at Thesaurus (orig r8254):  ribasushi | 2010-01-08 17:52:01 +0100
 Put utf8columns in line with the store_column fix
 r8267 at Thesaurus (orig r8255):  ribasushi | 2010-01-08 19:03:26 +0100
 Tests while hunting for something else
 r8268 at Thesaurus (orig r8256):  ribasushi | 2010-01-08 19:14:42 +0100
 Make test look even more like http://lists.scsys.co.uk/pipermail/dbix-class/2009-November/008599.html
 r8277 at Thesaurus (orig r8265):  ribasushi | 2010-01-09 02:16:14 +0100
  r8263 at Thesaurus (orig r8251):  ribasushi | 2010-01-08 15:43:38 +0100
  New branch to find a leak
  r8264 at Thesaurus (orig r8252):  ribasushi | 2010-01-08 15:52:46 +0100
  Weird test failures
  r8272 at Thesaurus (orig r8260):  ribasushi | 2010-01-09 01:24:56 +0100
  Proper invocation
  r8273 at Thesaurus (orig r8261):  ribasushi | 2010-01-09 01:35:34 +0100
  Test for the real leak reason
  r8274 at Thesaurus (orig r8262):  ribasushi | 2010-01-09 01:37:33 +0100
  Void ctx as it should be
  r8275 at Thesaurus (orig r8263):  ribasushi | 2010-01-09 02:10:13 +0100
  A "fix" for sqlt-related schema leaks
  r8276 at Thesaurus (orig r8264):  ribasushi | 2010-01-09 02:15:53 +0100
  Changes
 
 r8287 at Thesaurus (orig r8275):  caelum | 2010-01-10 11:29:06 +0100
  r22483 at hlagh (orig r8272):  caelum | 2010-01-09 05:52:15 -0500
  new branch to add "normalize_connect_info" class method to Storage::DBI
  r22495 at hlagh (orig r8274):  caelum | 2010-01-10 05:27:42 -0500
  split connect_info parser out into private _normalize_connect_info
 
 r8289 at Thesaurus (orig r8277):  caelum | 2010-01-10 12:04:52 +0100
 fix connection details in ::DBI::Replicated docs
 r8291 at Thesaurus (orig r8279):  ribasushi | 2010-01-11 09:50:21 +0100
  r8077 at Thesaurus (orig r8065):  ribasushi | 2009-12-12 14:24:30 +0100
  Branch for yet another mssql ordered prefetch problem
  r8079 at Thesaurus (orig r8067):  ribasushi | 2009-12-12 14:37:48 +0100
  prefetch does not get disassembled properly
  r8112 at Thesaurus (orig r8100):  ribasushi | 2009-12-13 00:07:00 +0100
  Extra test to highlight search_related inefficiency
  r8113 at Thesaurus (orig r8101):  ribasushi | 2009-12-13 00:17:44 +0100
  Real test for search_related and prefetch
  r8114 at Thesaurus (orig r8102):  ribasushi | 2009-12-13 00:19:57 +0100
  Fix corner case regression on search_related on a prefetching rs
  r8115 at Thesaurus (orig r8103):  ribasushi | 2009-12-13 00:21:05 +0100
  Isolate prefetch heads using RNO with a subquery
  r8116 at Thesaurus (orig r8104):  ribasushi | 2009-12-13 00:23:46 +0100
  Changes
  r8125 at Thesaurus (orig r8113):  ribasushi | 2009-12-15 13:06:26 +0100
  Extend mssql limited prefetch tests
  r8126 at Thesaurus (orig r8114):  ribasushi | 2009-12-15 13:08:56 +0100
  Add extra test to prove Alan wrong :)
  r8132 at Thesaurus (orig r8120):  ribasushi | 2009-12-16 00:38:04 +0100
  Do not realias tables in the RNO subqueries
  r8133 at Thesaurus (orig r8121):  ribasushi | 2009-12-16 00:50:52 +0100
  Deliberately disturb alphabetical order
  r8134 at Thesaurus (orig r8122):  ribasushi | 2009-12-16 10:26:43 +0100
  Got a failing test
  r8135 at Thesaurus (orig r8123):  ribasushi | 2009-12-16 10:49:10 +0100
  Cleanup
  r8136 at Thesaurus (orig r8124):  ribasushi | 2009-12-16 10:51:58 +0100
  More moving around
  r8137 at Thesaurus (orig r8125):  ribasushi | 2009-12-16 11:25:37 +0100
  The real mssql problem - it's... bad
  r8138 at Thesaurus (orig r8126):  ribasushi | 2009-12-16 11:29:20 +0100
  Clearer debug
  r8139 at Thesaurus (orig r8127):  ribasushi | 2009-12-16 11:47:48 +0100
  This is horrific but the tests pass... maybe someone will figure out something better
  r8140 at Thesaurus (orig r8128):  ribasushi | 2009-12-16 16:45:47 +0100
  cleanup tests
  r8187 at Thesaurus (orig r8175):  ribasushi | 2009-12-24 16:22:30 +0100
  Ordered subqueries do not work in mssql after all
  r8271 at Thesaurus (orig r8259):  ribasushi | 2010-01-08 23:58:13 +0100
  Cleaner RNO sql
  r8279 at Thesaurus (orig r8267):  ribasushi | 2010-01-09 10:13:16 +0100
  Subqueries no longer experimental
  r8280 at Thesaurus (orig r8268):  ribasushi | 2010-01-09 11:26:46 +0100
  Close the book on mssql ordered subqueries
  r8281 at Thesaurus (orig r8269):  ribasushi | 2010-01-09 11:36:36 +0100
  Changes and typos
  r8283 at Thesaurus (orig r8271):  ribasushi | 2010-01-09 11:42:21 +0100
  Highlight the real problem
  r8285 at Thesaurus (orig r8273):  ribasushi | 2010-01-10 10:07:10 +0100
  Rename subquery to subselect and rewrite POD (per castaway)
  r8290 at Thesaurus (orig r8278):  ribasushi | 2010-01-10 17:01:24 +0100
  rename as per mst
 
 r8295 at Thesaurus (orig r8283):  caelum | 2010-01-11 23:42:30 +0100
 make a public ::Schema::unregister_source
 r8298 at Thesaurus (orig r8286):  abraxxa | 2010-01-12 18:04:18 +0100
 fixed a typo in Changes
 more detailed explanation for the warning about has_one/might_have rels on nullable columns
 
 r8307 at Thesaurus (orig r8295):  abraxxa | 2010-01-13 17:28:05 +0100
 added the sources parser arg to the example code
 
 r8327 at Thesaurus (orig r8315):  ribasushi | 2010-01-15 01:25:39 +0100
  r8167 at Thesaurus (orig r8155):  ribasushi | 2009-12-19 12:50:13 +0100
  New branch for null-only-result fix
  r8168 at Thesaurus (orig r8156):  ribasushi | 2009-12-19 12:51:21 +0100
  Failing test
  r8322 at Thesaurus (orig r8310):  ribasushi | 2010-01-15 00:48:09 +0100
  Correct test order
  r8323 at Thesaurus (orig r8311):  ribasushi | 2010-01-15 01:15:33 +0100
  Generalize the to-node inner-join-er to apply to all related_resultset calls, not just counts
  r8324 at Thesaurus (orig r8312):  ribasushi | 2010-01-15 01:16:05 +0100
  Adjust sql-emitter tests
  r8326 at Thesaurus (orig r8314):  ribasushi | 2010-01-15 01:25:10 +0100
  One more sql-test fix and changes
 
 r8328 at Thesaurus (orig r8316):  ribasushi | 2010-01-15 01:31:58 +0100
 Strict mysql bugfix
 r8329 at Thesaurus (orig r8317):  ribasushi | 2010-01-15 01:38:53 +0100
 Better description of mysql strict option
 r8331 at Thesaurus (orig r8319):  ribasushi | 2010-01-15 03:12:13 +0100
 Update troubleshooting doc



Property changes on: DBIx-Class/0.08/branches/prefetch_pager
___________________________________________________________________
Name: svk:merge
   - 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
   + 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:8319
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510

Modified: DBIx-Class/0.08/branches/prefetch_pager/Changes
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/Changes	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/Changes	2010-01-15 02:16:31 UTC (rev 8321)
@@ -1,10 +1,27 @@
 Revision history for DBIx::Class
 
+        - Perl 5.8.1 is now the minimum supported version
+        - Subqueries no longer marked experimental
         - might_have/has_one now warn if applied calling class's column
           has is_nullable set to true.
+        - Fixed regression in deploy() with a {sources} table limit applied
+          (RT#52812)
         - Cookbook POD fix for add_drop_table instead of add_drop_tables
         - Views without a view_definition will throw an exception when
           parsed by SQL::Translator::Parser::DBIx::Class
+        - Schema POD improvement for dclone
+        - Fix regression in context sensitiveness of deployment_statements
+        - Fix regression resulting in overcomplicated query on
+          search_related from prefetching resultsets
+        - Fix regression on all-null returning searches (properly switch
+          LEFT JOIN to JOIN in order to distinguish between both cases)
+        - Fix regression in groupedresultset count() used on strict-mode
+          MySQL connections
+        - Better isolation of RNO-limited queries from the rest of a
+          prefetching resultset
+        - New MSSQL specific resultset attribute to allow hacky ordered
+          subquery support
+        - Fix nasty schema/dbhandle leak due to SQL::Translator
 
 0.08115 2009-12-10 09:02:00 (CST)
         - Real limit/offset support for MSSQL server (via Row_Number)

Modified: DBIx-Class/0.08/branches/prefetch_pager/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/Makefile.PL	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/Makefile.PL	2010-01-15 02:16:31 UTC (rev 8321)
@@ -3,12 +3,12 @@
 use warnings;
 use POSIX ();
 
-use 5.006001; # delete this line if you want to send patches for earlier.
+use 5.008001;
 
 # ****** DO NOT ADD OPTIONAL DEPENDENCIES. EVER. --mst ******
 
 name     'DBIx-Class';
-perl_version '5.006001';
+perl_version '5.008001';
 all_from 'lib/DBIx/Class.pm';
 
 
@@ -26,17 +26,14 @@
 requires 'Scalar::Util'             => '0';
 requires 'Storable'                 => '0';
 
-# Perl 5.8.0 doesn't have utf8::is_utf8()
-requires 'Encode'                   => '0' if ($] <= 5.008000);
-
 # Dependencies (keep in alphabetical order)
 requires 'Carp::Clan'               => '6.0';
-requires 'Class::Accessor::Grouped' => '0.09000';
+requires 'Class::Accessor::Grouped' => '0.09002';
 requires 'Class::C3::Componentised' => '1.0005';
 requires 'Class::Inspector'         => '1.24';
 requires 'Data::Page'               => '2.00';
 requires 'DBD::SQLite'              => '1.25';
-requires 'DBI'                      => '1.605';
+requires 'DBI'                      => '1.609';
 requires 'JSON::Any'                => '1.18';
 requires 'MRO::Compat'              => '0.09';
 requires 'Module::Find'             => '0.06';
@@ -48,8 +45,7 @@
 requires 'Data::Dumper::Concise'    => '1.000';
 
 my %replication_requires = (
-  'Moose',                    => '0.87',
-  'MooseX::AttributeHelpers'  => '0.21',
+  'Moose',                    => '0.90',
   'MooseX::Types',            => '0.16',
   'namespace::clean'          => '0.11',
   'Hash::Merge',              => '0.11',

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Componentised.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Componentised.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Componentised.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -4,10 +4,34 @@
 use strict;
 use warnings;
 
-###
-# Keep this class for backwards compatibility
-###
-
 use base 'Class::C3::Componentised';
+use Carp::Clan qw/^DBIx::Class|^Class::C3::Componentised/;
+use mro 'c3';
 
+# this warns of subtle bugs introduced by UTF8Columns hacky handling of store_column
+sub inject_base {
+  my $class = shift;
+  my $target = shift;
+
+  my @present_components = (@{mro::get_linear_isa ($target)||[]});
+
+  no strict 'refs';
+  for my $comp (reverse @_) {
+    if (
+      $comp->isa ('DBIx::Class::UTF8Columns')
+        and
+      my @broken = grep { $_ ne 'DBIx::Class::Row' and defined ${"${_}::"}{store_column} } (@present_components)
+    ) {
+      carp "Incorrect loading order of $comp by ${target} will affect other components overriding store_column ("
+          . join (', ', @broken)
+          .'). Refer to the documentation of DBIx::Class::UTF8Columns for more info';
+    }
+    else {
+      unshift @present_components, $comp;
+    }
+  }
+
+  $class->next::method($target, @_);
+}
+
 1;

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Cookbook.pod	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Cookbook.pod	2010-01-15 02:16:31 UTC (rev 8321)
@@ -317,7 +317,7 @@
 are in any way unsure about the use of the attributes above (C< join
 >, C< select >, C< as > and C< group_by >).
 
-=head2 Subqueries (EXPERIMENTAL)
+=head2 Subqueries
 
 You can write subqueries relatively easily in DBIC.
 
@@ -365,10 +365,6 @@
        WHERE artist_id = me.artist_id
       )
 
-=head3 EXPERIMENTAL
-
-Please note that subqueries are considered an experimental feature.
-
 =head2 Predefined searches
 
 You can write your own L<DBIx::Class::ResultSet> class by inheriting from it
@@ -390,9 +386,14 @@
 
   1;
 
-To use your resultset, first tell DBIx::Class to create an instance of it
-for you, in your My::DBIC::Schema::CD class:
+If you're using L<DBIx::Class::Schema/load_namespaces>, simply place the file
+into the C<ResultSet> directory next to your C<Result> directory, and it will
+be automatically loaded.
 
+If however you are still using L<DBIx::Class::Schema/load_classes>, first tell
+DBIx::Class to create an instance of the ResultSet class for you, in your
+My::DBIC::Schema::CD class:
+
   # class definition as normal
   use base 'DBIx::Class::Core';
   __PACKAGE__->table('cd');

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Troubleshooting.pod
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Troubleshooting.pod	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Manual/Troubleshooting.pod	2010-01-15 02:16:31 UTC (rev 8321)
@@ -100,29 +100,21 @@
 L<DBIx::Class::Manual::Cookbook/Setting_quoting_for_the_generated_SQL> for
 details.
 
-Note that quoting may lead to problems with C<order_by> clauses, see
-L<... column "foo DESC" does not exist ...> for info on avoiding those.
-
 =head2 column "foo DESC" does not exist ...
 
-This can happen if you've turned on quoting and then done something like
-this:
+This can happen if you are still using the obsolete order hack, and also
+happen to turn on sql-quoting.
 
   $rs->search( {}, { order_by => [ 'name DESC' ] } );
 
-This results in SQL like this:
+Since L<DBIx::Class> >= 0.08100 and L<SQL::Abstract> >= 1.50 the above
+should be written as:
 
-  ... ORDER BY "name DESC"
+  $rs->search( {}, { order_by => { -desc => 'name' } } );
 
-The solution is to pass your order_by items as scalar references to avoid
-quoting:
+For more ways to express order clauses refer to
+L<SQL::Abstract/ORDER_BY_CLAUSES>
 
-  $rs->search( {}, { order_by => [ \'name DESC' ] } );
-
-Now you'll get SQL like this:
-
-  ... ORDER BY name DESC
-
 =head2 Perl Performance Issues on Red Hat Systems
 
 There is a problem with slow performance of certain DBIx::Class

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Relationship/HasOne.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Relationship/HasOne.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Relationship/HasOne.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -82,7 +82,7 @@
     my $key = $1;
     my $column_info = $class->column_info($key);
     if ( $column_info->{is_nullable} ) {
-      carp(qq'"might_have/has_one" must not be on columns with is_nullable set to true ($class/$key) ');
+      carp(qq'"might_have/has_one" must not be on columns with is_nullable set to true ($class/$key). This might indicate an incorrect use of those relationship helpers instead of belongs_to.');
     }
   }
 }

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSet.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSet.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -974,19 +974,6 @@
 sub _collapse_result {
   my ($self, $as_proto, $row) = @_;
 
-  # if the first row that ever came in is totally empty - this means we got
-  # hit by a smooth^Wempty left-joined resultset. Just noop in that case
-  # instead of producing a {}
-  #
-  my $has_def;
-  for (@$row) {
-    if (defined $_) {
-      $has_def++;
-      last;
-    }
-  }
-  return undef unless $has_def;
-
   my @copy = @$row;
 
   # 'foo'         => [ undef, 'foo' ]
@@ -1247,11 +1234,6 @@
   $tmp_attrs->{select} = $rsrc->storage->_count_select ($rsrc, $tmp_attrs);
   $tmp_attrs->{as} = 'count';
 
-  # read the comment on top of the actual function to see what this does
-  $tmp_attrs->{from} = $self->result_source->schema->storage->_straight_join_to_node (
-    $tmp_attrs->{from}, $tmp_attrs->{alias}
-  );
-
   my $tmp_rs = $rsrc->resultset_class->new($rsrc, $tmp_attrs)->get_column ('count');
 
   return $tmp_rs;
@@ -1279,11 +1261,6 @@
 
   $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $sub_attrs);
 
-  # read the comment on top of the actual function to see what this does
-  $sub_attrs->{from} = $self->result_source->schema->storage->_straight_join_to_node (
-    $sub_attrs->{from}, $sub_attrs->{alias}
-  );
-
   # this is so that the query can be simplified e.g.
   # * non-limiting joins can be pruned
   # * ordering can be thrown away in things like Top limit
@@ -1431,7 +1408,7 @@
   my $cond = $rsrc->schema->storage->_strip_cond_qualifiers ($self->{cond});
 
   my $needs_group_by_subq = $self->_has_resolved_attr (qw/collapse group_by -join/);
-  my $needs_subq = (not defined $cond) || $self->_has_resolved_attr(qw/row offset/);
+  my $needs_subq = $needs_group_by_subq || (not defined $cond) || $self->_has_resolved_attr(qw/row offset/);
 
   if ($needs_group_by_subq or $needs_subq) {
 
@@ -2047,7 +2024,7 @@
   return \%unaliased;
 }
 
-=head2 as_query (EXPERIMENTAL)
+=head2 as_query
 
 =over 4
 
@@ -2061,8 +2038,6 @@
 
 This is generally used as the RHS for a subquery.
 
-B<NOTE>: This feature is still experimental.
-
 =cut
 
 sub as_query {
@@ -2512,18 +2487,28 @@
 
   $self->{related_resultsets} ||= {};
   return $self->{related_resultsets}{$rel} ||= do {
-    my $rel_info = $self->result_source->relationship_info($rel);
+    my $rsrc = $self->result_source;
+    my $rel_info = $rsrc->relationship_info($rel);
 
     $self->throw_exception(
-      "search_related: result source '" . $self->result_source->source_name .
+      "search_related: result source '" . $rsrc->source_name .
         "' has no such relationship $rel")
       unless $rel_info;
 
     my $attrs = $self->_chain_relationship($rel);
 
     my $join_count = $attrs->{seen_join}{$rel};
-    my $alias = ($join_count > 1 ? join('_', $rel, $join_count) : $rel);
 
+    my $alias = $self->result_source->storage
+        ->relname_to_table_alias($rel, $join_count);
+
+    # since this is search_related, and we already slid the select window inwards
+    # (the select/as attrs were deleted in the beginning), we need to flip all 
+    # left joins to inner, so we get the expected results
+    # read the comment on top of the actual function to see what this does
+    $attrs->{from} = $rsrc->schema->storage->_straight_join_to_node ($attrs->{from}, $alias);
+
+
     #XXX - temp fix for result_class bug. There likely is a more elegant fix -groditi
     delete @{$attrs}{qw(result_class alias)};
 
@@ -2536,7 +2521,7 @@
       }
     }
 
-    my $rel_source = $self->result_source->related_source($rel);
+    my $rel_source = $rsrc->related_source($rel);
 
     my $new = do {
 
@@ -2638,10 +2623,19 @@
       ||
     $self->_has_resolved_attr (@force_subq_attrs)
   ) {
+    # Nuke the prefetch (if any) before the new $rs attrs
+    # are resolved (prefetch is useless - we are wrapping
+    # a subquery anyway).
+    my $rs_copy = $self->search;
+    $rs_copy->{attrs}{join} = $self->_merge_attr (
+      $rs_copy->{attrs}{join},
+      delete $rs_copy->{attrs}{prefetch},
+    );
+
     $from = [{
       -source_handle => $source->handle,
       -alias => $attrs->{alias},
-      $attrs->{alias} => $self->as_query,
+      $attrs->{alias} => $rs_copy->as_query,
     }];
     delete @{$attrs}{@force_subq_attrs, 'where'};
     $seen->{-relation_chain_depth} = 0;
@@ -2678,7 +2672,6 @@
   # the join in question so we could tell it *is* the search_related)
   my $already_joined;
 
-
   # we consider the last one thus reverse
   for my $j (reverse @requested_joins) {
     if ($rel eq $j->[0]{-join_path}[-1]) {
@@ -2687,7 +2680,6 @@
       last;
     }
   }
-
 # alternative way to scan the entire chain - not backwards compatible
 #  for my $j (reverse @$from) {
 #    next unless ref $j eq 'ARRAY';

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSetColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSetColumn.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSetColumn.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -98,7 +98,7 @@
   return $new;
 }
 
-=head2 as_query (EXPERIMENTAL)
+=head2 as_query
 
 =over 4
 
@@ -112,8 +112,6 @@
 
 This is generally used as the RHS for a subquery.
 
-B<NOTE>: This feature is still experimental.
-
 =cut
 
 sub as_query { return shift->_resultset->as_query(@_) }

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSource.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSource.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/ResultSource.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -1228,7 +1228,9 @@
       $force_left ||= lc($rel_info->{attrs}{join_type}||'') eq 'left';
 
       # the actual seen value will be incremented by the recursion
-      my $as = ($seen->{$rel} ? join ('_', $rel, $seen->{$rel} + 1) : $rel);
+      my $as = $self->storage->relname_to_table_alias(
+        $rel, ($seen->{$rel} && $seen->{$rel} + 1)
+      );
 
       push @ret, (
         $self->_resolve_join($rel, $alias, $seen, [@$jpath], $force_left),
@@ -1245,7 +1247,9 @@
   }
   else {
     my $count = ++$seen->{$join};
-    my $as = ($count > 1 ? "${join}_${count}" : $join);
+    my $as = $self->storage->relname_to_table_alias(
+      $join, ($count > 1 && $count)
+    );
 
     my $rel_info = $self->relationship_info($join)
       or $self->throw_exception("No such relationship ${join}");
@@ -1579,7 +1583,7 @@
 =cut
 
 sub handle {
-    return new DBIx::Class::ResultSourceHandle({
+    return DBIx::Class::ResultSourceHandle->new({
         schema         => $_[0]->schema,
         source_moniker => $_[0]->source_name
     });

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Row.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Row.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Row.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -802,7 +802,7 @@
   $self->{_orig_ident} ||= $self->ident_condition;
   my $old_value = $self->get_column($column);
 
-  $self->store_column($column, $new_value);
+  $new_value = $self->store_column($column, $new_value);
 
   my $dirty;
   if (!$self->in_storage) { # no point tracking dirtyness on uninserted data

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/SQLAHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/SQLAHacks.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/SQLAHacks.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -51,20 +51,28 @@
 sub _RowNumberOver {
   my ($self, $sql, $order, $rows, $offset ) = @_;
 
+  # get the select to make the final amount of columns equal the original one
+  my ($select) = $sql =~ /^ \s* SELECT \s+ (.+?) \s+ FROM/ix
+    or croak "Unrecognizable SELECT: $sql";
+
   # get the order_by only (or make up an order if none exists)
   my $order_by = $self->_order_by(
     (delete $order->{order_by}) || $self->_rno_default_order
   );
 
-  # whatever is left
+  # whatever is left of the order_by
   my $group_having = $self->_order_by($order);
 
-  $sql = sprintf (<<'EOS', $order_by, $sql, $group_having, $offset + 1, $offset + $rows, );
+  my $qalias = $self->_quote ($self->{_dbic_rs_attrs}{alias});
 
-SELECT * FROM (
-  SELECT orig_query.*, ROW_NUMBER() OVER(%s ) AS rno__row__index FROM (%s%s) orig_query
-) rno_subq WHERE rno__row__index BETWEEN %d AND %d
+  $sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
 
+SELECT $select FROM (
+  SELECT $qalias.*, ROW_NUMBER() OVER($order_by ) AS rno__row__index FROM (
+    ${sql}${group_having}
+  ) $qalias
+) $qalias WHERE rno__row__index BETWEEN %d AND %d
+
 EOS
 
   $sql =~ s/\s*\n\s*/ /g;   # easier to read in the debugger

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Schema.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Schema.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Schema.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -5,7 +5,7 @@
 
 use DBIx::Class::Exception;
 use Carp::Clan qw/^DBIx::Class/;
-use Scalar::Util qw/weaken/;
+use Scalar::Util ();
 use File::Spec;
 use Sub::Name ();
 use Module::Find();
@@ -1083,7 +1083,7 @@
   $self->storage->deployment_statements($self, @_);
 }
 
-=head2 create_ddl_dir (EXPERIMENTAL)
+=head2 create_ddl_dir
 
 =over 4
 
@@ -1178,9 +1178,18 @@
 
 =head2 dclone
 
-Recommeneded way of dcloning objects. This is needed to properly maintain
-references to the schema object (which itself is B<not> cloned.)
+=over 4
 
+=item Arguments: $object
+
+=item Return Value: dcloned $object
+
+=back
+
+Recommended way of dcloning L<DBIx::Class::Row> and L<DBIx::Class::ResultSet>
+objects so their references to the schema object
+(which itself is B<not> cloned) are properly maintained.
+
 =cut
 
 sub dclone {
@@ -1260,6 +1269,24 @@
   $self->_register_source(@_);
 }
 
+=head2 unregister_source
+
+=over 4
+
+=item Arguments: $moniker
+
+=back
+
+Removes the L<DBIx::Class::ResultSource> from the schema for the given moniker.
+
+=cut
+
+sub unregister_source {
+  my $self = shift;
+
+  $self->_unregister_source(@_);
+}
+
 =head2 register_extra_source
 
 =over 4
@@ -1286,7 +1313,7 @@
 
   $source = $source->new({ %$source, source_name => $moniker });
   $source->schema($self);
-  weaken($source->{schema}) if ref($self);
+  Scalar::Util::weaken($source->{schema}) if ref($self);
 
   my $rs_class = $source->result_class;
 

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -121,7 +121,7 @@
 =head3 truncation bug
 
 There is a bug with MSSQL ADO providers where data gets truncated based on the
-size on the bind sizes in the first prepare:
+size of the bind sizes in the first prepare call:
 
 L<https://rt.cpan.org/Ticket/Display.html?id=52048>
 

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -8,7 +8,7 @@
 
 =head1 NAME
 
-DBIx::Class::Storage::DBI::AmbiguousGlob - Storage component for RDBMS supporting multicolumn in clauses
+DBIx::Class::Storage::DBI::AmbiguousGlob - Storage component for RDBMS choking on count(*)
 
 =head1 DESCRIPTION
 
@@ -27,6 +27,9 @@
 
 sub _subq_count_select {
   my ($self, $source, $rs_attrs) = @_;
+
+  return $rs_attrs->{group_by} if $rs_attrs->{group_by};
+
   my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
   return @pcols ? \@pcols : [ 1 ];
 }

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/MSSQL.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -179,8 +179,9 @@
 sub last_insert_id { shift->_identity }
 
 #
-# MSSQL is retarded wrt ordered subselects. One needs to add a TOP 100%
-# to *all* subqueries, do it here.
+# MSSQL is retarded wrt ordered subselects. One needs to add a TOP
+# to *all* subqueries, but one also can't use TOP 100 PERCENT
+# http://sqladvice.com/forums/permalink/18496/22931/ShowThread.aspx#22931
 #
 sub _select_args_to_query {
   my $self = shift;
@@ -190,7 +191,11 @@
   # see if this is an ordered subquery
   my $attrs = $_[3];
   if ( scalar $self->sql_maker->_order_by_chunks ($attrs->{order_by}) ) {
-    $sql =~ s/^ \s* SELECT \s/SELECT TOP 100 PERCENT /xi;
+    $self->throw_exception(
+      'An ordered subselect encountered - this is not safe! Please see "Ordered Subselects" in DBIx::Class::Storage::DBI::MSSQL
+    ') unless $attrs->{unsafe_subselect_ok};
+    my $max = 2 ** 32;
+    $sql =~ s/^ \s* SELECT \s/SELECT TOP $max /xi;
   }
 
   return wantarray
@@ -303,6 +308,54 @@
 C<db_ddladmin> privilege, which is normally not included in the standard
 write-permissions.
 
+=head2 Ordered Subselects
+
+If you attempted the following query (among many others) in Microsoft SQL
+Server
+
+ $rs->search ({}, {
+  prefetch => 'relation',
+  rows => 2,
+  offset => 3,
+ });
+
+You may be surprised to receive an exception. The reason for this is a quirk
+in the MSSQL engine itself, and sadly doesn't have a sensible workaround due
+to the way DBIC is built. DBIC can do truly wonderful things with the aid of
+subselects, and does so automatically when necessary. The list of situations
+when a subselect is necessary is long and still changes often, so it can not
+be exhaustively enumerated here. The general rule of thumb is a joined
+L<has_many|DBIx::Class::Relationship/has_many> relationship with limit/group
+applied to the left part of the join.
+
+In its "pursuit of standards" Microsft SQL Server goes to great lengths to
+forbid the use of ordered subselects. This breaks a very useful group of
+searches like "Give me things number 4 to 6 (ordered by name), and prefetch
+all their relations, no matter how many". While there is a hack which fools
+the syntax checker, the optimizer may B<still elect to break the subselect>.
+Testing has determined that while such breakage does occur (the test suite
+contains an explicit test which demonstrates the problem), it is relative
+rare. The benefits of ordered subselects are on the other hand too great to be
+outright disabled for MSSQL.
+
+Thus compromise between usability and perfection is the MSSQL-specific
+L<resultset attribute|DBIx::Class::ResultSet/ATTRIBUTES> C<unsafe_subselect_ok>.
+It is deliberately not possible to set this on the Storage level, as the user
+should inspect (and preferrably regression-test) the return of every such
+ResultSet individually. The example above would work if written like:
+
+ $rs->search ({}, {
+  unsafe_subselect_ok => 1,
+  prefetch => 'relation',
+  rows => 2,
+  offset => 3,
+ });
+
+If it is possible to rewrite the search() in a way that will avoid the need
+for this flag - you are urged to do so. If DBIC internals insist that an
+ordered subselect is necessary for an operation, and you believe there is a
+differnt/better way to get the same result - please file a bugreport.
+
 =head1 AUTHOR
 
 See L<DBIx::Class/CONTRIBUTORS>.

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -17,7 +17,9 @@
 
 =head1 DESCRIPTION
 
-This class implements autoincrements for Oracle.
+This class implements base Oracle support. The subclass
+L<DBIx::Class::Storage::DBI::Oracle::WhereJoins> is for C<(+)> joins in Oracle
+versions before 9.
 
 =head1 METHODS
 
@@ -274,6 +276,46 @@
     $self->_get_dbh->do("ROLLBACK TO SAVEPOINT $name")
 }
 
+=head2 relname_to_table_alias
+
+L<DBIx::Class> uses L<DBIx::Class::Relationship> names as table aliases in
+queries.
+
+Unfortunately, Oracle doesn't support identifiers over 30 chars in length, so
+the L<DBIx::Class::Relationship> name is shortened and appended with half of an
+MD5 hash.
+
+See L<DBIx::Class::Storage/"relname_to_table_alias">.
+
+=cut
+
+sub relname_to_table_alias {
+  my $self = shift;
+  my ($relname, $join_count) = @_;
+
+  my $alias = $self->next::method(@_);
+
+  return $alias if length($alias) <= 30;
+
+  # get a base64 md5 of the alias with join_count
+  require Digest::MD5;
+  my $ctx = Digest::MD5->new;
+  $ctx->add($alias);
+  my $md5 = $ctx->b64digest;
+
+  # remove alignment mark just in case
+  $md5 =~ s/=*\z//;
+
+  # truncate and prepend to truncated relname without vowels
+  (my $devoweled = $relname) =~ s/[aeiou]//g;
+  my $shortened = substr($devoweled, 0, 18);
+
+  my $new_alias =
+    $shortened . '_' . substr($md5, 0, 30 - length($shortened) - 1);
+
+  return $new_alias;
+}
+
 =head1 AUTHOR
 
 See L<DBIx::Class/CONTRIBUTORS>.

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod	2010-01-15 02:16:31 UTC (rev 8321)
@@ -89,26 +89,25 @@
 you use (or upgrade to) the latest L<Catalyst::Model::DBIC::Schema>, which makes
 this job even easier.
 
-First, you need to connect your L<DBIx::Class::Schema>.  Let's assume you have
-such a schema called, "MyApp::Schema".
+First, you need to get a C<$schema> object and set the storage_type:
 
-	use MyApp::Schema;
-	my $schema = MyApp::Schema->connect($dsn, $user, $pass);
+  my $schema = MyApp::Schema->clone;
+  $schema->storage_type([
+    '::DBI::Replicated' => {
+      balancer_type => '::Random',
+      balancer_args => {
+        auto_validate_every => 5,
+        master_read_weight => 1
+      },
+      pool_args => {
+        maximum_lag =>2,
+      },
+    }
+  ]);
 
-Next, you need to set the storage_type.
+Then, you need to connect your L<DBIx::Class::Schema>.
 
-	$schema->storage_type(
-		::DBI::Replicated' => {
-			balancer_type => '::Random',
-            balancer_args => {
-				auto_validate_every => 5,
-				master_read_weight => 1
-			},
-			pool_args => {
-				maximum_lag =>2,
-			},
-		}
-	);
+  $schema->connection($dsn, $user, $pass);
 
 Let's break down the settings.  The method L<DBIx::Class::Schema/storage_type>
 takes one mandatory parameter, a scalar value, and an option second value which
@@ -160,11 +159,11 @@
 After you've configured the replicated storage, you need to add the connection
 information for the replicants:
 
-	$schema->storage->connect_replicants(
-		[$dsn1, $user, $pass, \%opts],
- 		[$dsn2, $user, $pass, \%opts],
- 		[$dsn3, $user, $pass, \%opts],
- 	);
+  $schema->storage->connect_replicants(
+    [$dsn1, $user, $pass, \%opts],
+    [$dsn2, $user, $pass, \%opts],
+    [$dsn3, $user, $pass, \%opts],
+  );
 
 These replicants should be configured as slaves to the master using the
 instructions for MySQL native replication, or if you are just learning, you

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -1,7 +1,6 @@
 package DBIx::Class::Storage::DBI::Replicated::Pool;
 
 use Moose;
-use MooseX::AttributeHelpers;
 use DBIx::Class::Storage::DBI::Replicated::Replicant;
 use List::Util 'sum';
 use Scalar::Util 'reftype';
@@ -125,26 +124,31 @@
 
 has 'replicants' => (
   is=>'rw',
-  metaclass => 'Collection::Hash',
+  traits => ['Hash'],
   isa=>HashRef['Object'],
   default=>sub {{}},
-  provides  => {
-    'set' => 'set_replicant',
-    'get' => 'get_replicant',
-    'empty' => 'has_replicants',
-    'count' => 'num_replicants',
-    'delete' => 'delete_replicant',
-    'values' => 'all_replicant_storages',
+  handles  => {
+    'set_replicant' => 'set',
+    'get_replicant' => 'get',
+    'has_replicants' => 'is_empty',
+    'num_replicants' => 'count',
+    'delete_replicant' => 'delete',
+    'all_replicant_storages' => 'values',
   },
 );
 
+around has_replicants => sub {
+    my ($orig, $self) = @_;
+    return !$self->$orig;
+};
+
 has next_unknown_replicant_id => (
   is => 'rw',
-  metaclass => 'Counter',
+  traits => ['Counter'],
   isa => Int,
   default => 1,
-  provides => {
-    inc => 'inc_unknown_replicant_id'
+  handles => {
+    'inc_unknown_replicant_id' => 'inc',
   },
 );
 

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Replicated.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -7,8 +7,7 @@
   ## use, so we explicitly test for these.
 
   my %replication_required = (
-    'Moose' => '0.87',
-    'MooseX::AttributeHelpers' => '0.21',
+    'Moose' => '0.90',
     'MooseX::Types' => '0.16',
     'namespace::clean' => '0.11',
     'Hash::Merge' => '0.11'
@@ -51,7 +50,9 @@
 also define your arguments, such as which balancer you want and any arguments
 that the Pool object should get.
 
+  my $schema = Schema::Class->clone;
   $schema->storage_type( ['::DBI::Replicated', {balancer=>'::Random'}] );
+  $schema->connection(...);
 
 Next, you need to add in the Replicants.  Basically this is an array of 
 arrayrefs, where each arrayref is database connect information.  Think of these
@@ -119,8 +120,7 @@
 
 Replicated Storage has additional requirements not currently part of L<DBIx::Class>
 
-  Moose => '0.87',
-  MooseX::AttributeHelpers => '0.20',
+  Moose => '0.90',
   MooseX::Types => '0.16',
   namespace::clean => '0.11',
   Hash::Merge => '0.11'

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -13,7 +13,11 @@
   my $self = shift;
   my $dbh  = $self->_get_dbh;
 
+  return if ref $self ne __PACKAGE__;
+
   if (not $self->_typeless_placeholders_supported) {
+    require
+      DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars;
     bless $self,
       'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars';
     $self->_rebless;

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/mysql.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/mysql.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI/mysql.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -106,6 +106,19 @@
 session variables such that MySQL behaves more predictably as far as the
 SQL standard is concerned.
 
+=head1 STORAGE OPTIONS
+
+=head2 set_strict_mode
+
+Enables session-wide strict options upon connecting. Equivalent to:
+
+  ->connect ( ... , {
+    on_connect_do => [
+      q|SET SQL_MODE = CONCAT('ANSI,TRADITIONAL,ONLY_FULL_GROUP_BY,', @@sql_mode)|,
+      q|SET SQL_AUTO_IS_NULL = 0|,
+    ]
+  });
+
 =head1 AUTHORS
 
 See L<DBIx::Class/CONTRIBUTORS>

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/Storage/DBI.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -451,14 +451,51 @@
 =cut
 
 sub connect_info {
-  my ($self, $info_arg) = @_;
+  my ($self, $info) = @_;
 
-  return $self->_connect_info if !$info_arg;
+  return $self->_connect_info if !$info;
 
+  $self->_connect_info($info); # copy for _connect_info
+
+  $info = $self->_normalize_connect_info($info)
+    if ref $info eq 'ARRAY';
+
+  for my $storage_opt (keys %{ $info->{storage_options} }) {
+    my $value = $info->{storage_options}{$storage_opt};
+
+    $self->$storage_opt($value);
+  }
+
+  # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only
+  #  the new set of options
+  $self->_sql_maker(undef);
+  $self->_sql_maker_opts({});
+
+  for my $sql_maker_opt (keys %{ $info->{sql_maker_options} }) {
+    my $value = $info->{sql_maker_options}{$sql_maker_opt};
+
+    $self->_sql_maker_opts->{$sql_maker_opt} = $value;
+  }
+
+  my %attrs = (
+    %{ $self->_default_dbi_connect_attributes || {} },
+    %{ $info->{attributes} || {} },
+  );
+
+  my @args = @{ $info->{arguments} };
+
+  $self->_dbi_connect_info([@args,
+    %attrs && !(ref $args[0] eq 'CODE') ? \%attrs : ()]);
+
+  return $self->_connect_info;
+}
+
+sub _normalize_connect_info {
+  my ($self, $info_arg) = @_;
+  my %info;
+ 
   my @args = @$info_arg;  # take a shallow copy for further mutilation
-  $self->_connect_info([@args]); # copy for _connect_info
 
-
   # combine/pre-parse arguments depending on invocation style
 
   my %attrs;
@@ -494,36 +531,23 @@
     @args = @args[0,1,2];
   }
 
-  # Kill sql_maker/_sql_maker_opts, so we get a fresh one with only
-  #  the new set of options
-  $self->_sql_maker(undef);
-  $self->_sql_maker_opts({});
+  $info{arguments} = \@args; 
 
-  if(keys %attrs) {
-    for my $storage_opt (@storage_options, 'cursor_class') {    # @storage_options is declared at the top of the module
-      if(my $value = delete $attrs{$storage_opt}) {
-        $self->$storage_opt($value);
-      }
-    }
-    for my $sql_maker_opt (qw/limit_dialect quote_char name_sep/) {
-      if(my $opt_val = delete $attrs{$sql_maker_opt}) {
-        $self->_sql_maker_opts->{$sql_maker_opt} = $opt_val;
-      }
-    }
-  }
+  my @storage_opts = grep exists $attrs{$_},
+    @storage_options, 'cursor_class';
 
-  if (ref $args[0] eq 'CODE') {
-    # _connect() never looks past $args[0] in this case
-    %attrs = ()
-  } else {
-    %attrs = (
-      %{ $self->_default_dbi_connect_attributes || {} },
-      %attrs,
-    );
-  }
+  @{ $info{storage_options} }{@storage_opts} =
+    delete @attrs{@storage_opts} if @storage_opts;
 
-  $self->_dbi_connect_info([@args, keys %attrs ? \%attrs : ()]);
-  $self->_connect_info;
+  my @sql_maker_opts = grep exists $attrs{$_},
+    qw/limit_dialect quote_char name_sep/;
+
+  @{ $info{sql_maker_options} }{@sql_maker_opts} =
+    delete @attrs{@sql_maker_opts} if @sql_maker_opts;
+
+  $info{attributes} = \%attrs if %attrs;
+
+  return \%info;
 }
 
 sub _default_dbi_connect_attributes {
@@ -1360,7 +1384,6 @@
   return $updated_cols;
 }
 
-## Still not quite perfect, and EXPERIMENTAL
 ## Currently it is assumed that all values passed will be "normal", i.e. not
 ## scalar refs, or at least, all the same type as the first set, the statement is
 ## only prepped once.
@@ -1768,11 +1791,24 @@
 
   my @limit;
 
-  # see if we need to tear the prefetch apart (either limited has_many or grouped prefetch)
-  # otherwise delegate the limiting to the storage, unless software limit was requested
+  # see if we need to tear the prefetch apart otherwise delegate the limiting to the
+  # storage, unless software limit was requested
   if (
+    #limited has_many
     ( $attrs->{rows} && keys %{$attrs->{collapse}} )
        ||
+    # limited prefetch with RNO subqueries
+    (
+      $attrs->{rows}
+        &&
+      $sql_maker->limit_dialect eq 'RowNumberOver'
+        &&
+      $attrs->{_prefetch_select}
+        &&
+      @{$attrs->{_prefetch_select}}
+    )
+      ||
+    # grouped prefetch
     ( $attrs->{group_by}
         &&
       @{$attrs->{group_by}}
@@ -1782,7 +1818,6 @@
       @{$attrs->{_prefetch_select}}
     )
   ) {
-
     ($ident, $select, $where, $attrs)
       = $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs);
   }
@@ -2136,7 +2171,7 @@
 }
 
 
-=head2 create_ddl_dir (EXPERIMENTAL)
+=head2 create_ddl_dir
 
 =over 4
 
@@ -2188,11 +2223,9 @@
  { ignore_constraint_names => 0, # ... other options }
 
 
-Note that this feature is currently EXPERIMENTAL and may not work correctly
-across all databases, or fully handle complex relationships.
+WARNING: You are strongly advised to check all SQL files created, before applying
+them.
 
-WARNING: Please check all SQL files created, before applying them.
-
 =cut
 
 sub create_ddl_dir {
@@ -2373,10 +2406,19 @@
     data => $schema,
   );
 
-  my $ret = $tr->translate
-    or $self->throw_exception( 'Unable to produce deployment statements: ' . $tr->error);
+  my @ret;
+  my $wa = wantarray;
+  if ($wa) {
+    @ret = $tr->translate;
+  }
+  else {
+    $ret[0] = $tr->translate;
+  }
 
-  return $ret;
+  $self->throw_exception( 'Unable to produce deployment statements: ' . $tr->error)
+    unless (@ret && defined $ret[0]);
+
+  return $wa ? @ret : $ret[0];
 }
 
 sub deploy {
@@ -2501,6 +2543,34 @@
   sub _sqlt_minimum_version { $minimum_sqlt_version };
 }
 
+=head2 relname_to_table_alias
+
+=over 4
+
+=item Arguments: $relname, $join_count
+
+=back
+
+L<DBIx::Class> uses L<DBIx::Class::Relationship> names as table aliases in
+queries.
+
+This hook is to allow specific L<DBIx::Class::Storage> drivers to change the
+way these aliases are named.
+
+The default behavior is C<"$relname_$join_count" if $join_count > 1>, otherwise
+C<"$relname">.
+
+=cut
+
+sub relname_to_table_alias {
+  my ($self, $relname, $join_count) = @_;
+
+  my $alias = ($join_count && $join_count > 1 ?
+    join('_', $relname, $join_count) : $relname);
+
+  return $alias;
+}
+
 sub DESTROY {
   my $self = shift;
 

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/UTF8Columns.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/UTF8Columns.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class/UTF8Columns.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -2,18 +2,8 @@
 use strict;
 use warnings;
 use base qw/DBIx::Class/;
+use utf8;
 
-BEGIN {
-
-    # Perl 5.8.0 doesn't have utf8::is_utf8()
-    # Yes, 5.8.0 support for Unicode is suboptimal, but things like RHEL3 ship with it.
-    if ($] <= 5.008000) {
-        require Encode;
-    } else {
-        require utf8;
-    }
-}
-
 __PACKAGE__->mk_classdata( '_utf8_columns' );
 
 =head1 NAME
@@ -36,6 +26,15 @@
 
 This module allows you to get columns data that have utf8 (Unicode) flag.
 
+=head2 Warning
+
+Note that this module overloads L<DBIx::Class::Row/store_column> in a way
+that may prevent other components overloading the same method from working
+correctly. This component must be the last one before L<DBIx::Class::Row>
+(which is provided by L<DBIx::Class::Core>). DBIx::Class will detect such
+incorrect component order and issue an appropriate warning, advising which
+components need to be loaded differently.
+
 =head1 SEE ALSO
 
 L<Template::Stash::ForceUTF8>, L<DBIx::Class::UUIDColumns>.
@@ -52,7 +51,7 @@
         foreach my $col (@_) {
             $self->throw_exception("column $col doesn't exist")
                 unless $self->has_column($col);
-        }        
+        }
         return $self->_utf8_columns({ map { $_ => 1 } @_ });
     } else {
         return $self->_utf8_columns;
@@ -69,17 +68,11 @@
     my ( $self, $column ) = @_;
     my $value = $self->next::method($column);
 
-    my $cols = $self->_utf8_columns;
-    if ( $cols and defined $value and $cols->{$column} ) {
+    utf8::decode($value) if (
+      defined $value and $self->_is_utf8_column($column) and ! utf8::is_utf8($value)
+    );
 
-        if ($] <= 5.008000) {
-            Encode::_utf8_on($value) unless Encode::is_utf8($value);
-        } else {
-            utf8::decode($value) unless utf8::is_utf8($value);
-        }
-    }
-
-    $value;
+    return $value;
 }
 
 =head2 get_columns
@@ -90,16 +83,13 @@
     my $self = shift;
     my %data = $self->next::method(@_);
 
-    foreach my $col (grep { defined $data{$_} } keys %{ $self->_utf8_columns || {} }) {
-
-        if ($] <= 5.008000) {
-            Encode::_utf8_on($data{$col}) unless Encode::is_utf8($data{$col});
-        } else {
-            utf8::decode($data{$col}) unless utf8::is_utf8($data{$col});
-        }
+    foreach my $col (keys %data) {
+      utf8::decode($data{$col}) if (
+        exists $data{$col} and defined $data{$col} and $self->_is_utf8_column($col) and ! utf8::is_utf8($data{$col})
+      );
     }
 
-    %data;
+    return %data;
 }
 
 =head2 store_column
@@ -109,32 +99,32 @@
 sub store_column {
     my ( $self, $column, $value ) = @_;
 
-    my $cols = $self->_utf8_columns;
-    if ( $cols and defined $value and $cols->{$column} ) {
+    # the dirtyness comparison must happen on the non-encoded value
+    my $copy;
 
-        if ($] <= 5.008000) {
-            Encode::_utf8_off($value) if Encode::is_utf8($value);
-        } else {
-            utf8::encode($value) if utf8::is_utf8($value);
-        }
+    if ( defined $value and $self->_is_utf8_column($column) and utf8::is_utf8($value) ) {
+      $copy = $value;
+      utf8::encode($value);
     }
 
     $self->next::method( $column, $value );
+
+    return $copy || $value;
 }
 
-=head1 AUTHOR
+# override this if you want to force everything to be encoded/decoded
+sub _is_utf8_column {
+  return (shift->utf8_columns || {})->{shift};
+}
 
-Daisuke Murase <typester at cpan.org>
+=head1 AUTHORS
 
-=head1 COPYRIGHT
+See L<DBIx::Class/CONTRIBUTORS>.
 
-This program is free software; you can redistribute
-it and/or modify it under the same terms as Perl itself.
+=head1 LICENSE
 
-The full text of the license can be found in the
-LICENSE file included with this module.
+You may distribute this code under the same terms as Perl itself.
 
 =cut
 
 1;
-

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/DBIx/Class.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -4,9 +4,10 @@
 use warnings;
 
 use MRO::Compat;
+use mro 'c3';
 
 use vars qw($VERSION);
-use base qw/Class::C3::Componentised Class::Accessor::Grouped/;
+use base qw/DBIx::Class::Componentised Class::Accessor::Grouped/;
 use DBIx::Class::StartupCheck;
 
 sub mk_classdata {
@@ -116,7 +117,7 @@
   # Output all artists names
   # $artist here is a DBIx::Class::Row, which has accessors
   # for all its columns. Rows are also subclasses of your Result class.
-  foreach $artist (@artists) {
+  foreach $artist (@all_artists) {
     print $artist->name, "\n";
   }
 

Modified: DBIx-Class/0.08/branches/prefetch_pager/lib/SQL/Translator/Parser/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/lib/SQL/Translator/Parser/DBIx/Class.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/lib/SQL/Translator/Parser/DBIx/Class.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -15,6 +15,7 @@
 use Exporter;
 use SQL::Translator::Utils qw(debug normalize_name);
 use Carp::Clan qw/^SQL::Translator|^DBIx::Class/;
+use Scalar::Util ();
 
 use base qw(Exporter);
 
@@ -30,6 +31,10 @@
 # We're working with DBIx::Class Schemas, not data streams.
 # -------------------------------------------------------------------
 sub parse {
+    # this is a hack to prevent schema leaks due to a retarded SQLT implementation
+    # DO NOT REMOVE (until SQLT2 is out, the all of this will be rewritten anyway)
+    Scalar::Util::weaken ($_[1]);
+
     my ($tr, $data)   = @_;
     my $args          = $tr->parser_args;
     my $dbicschema    = $args->{'DBIx::Class::Schema'} ||  $args->{"DBIx::Schema"} ||$data;
@@ -65,19 +70,19 @@
     }
 
 
-    my(@table_monikers, @view_monikers);
+    my(%table_monikers, %view_monikers);
     for my $moniker (@monikers){
       my $source = $dbicschema->source($moniker);
        if ( $source->isa('DBIx::Class::ResultSource::Table') ) {
-         push(@table_monikers, $moniker);
+         $table_monikers{$moniker}++;
       } elsif( $source->isa('DBIx::Class::ResultSource::View') ){
           next if $source->is_virtual;
-         push(@view_monikers, $moniker);
+         $view_monikers{$moniker}++;
       }
     }
 
     my %tables;
-    foreach my $moniker (sort @table_monikers)
+    foreach my $moniker (sort keys %table_monikers)
     {
         my $source = $dbicschema->source($moniker);
         my $table_name = $source->name;
@@ -112,9 +117,11 @@
             my $f = $table->add_field(%colinfo)
               || $dbicschema->throw_exception ($table->error);
         }
-        $table->primary_key($source->primary_columns);
 
         my @primary = $source->primary_columns;
+
+        $table->primary_key(@primary) if @primary;
+
         my %unique_constraints = $source->unique_constraints;
         foreach my $uniq (sort keys %unique_constraints) {
             if (!$source->_compare_relationship_keys($unique_constraints{$uniq}, \@primary)) {
@@ -131,19 +138,23 @@
         my %created_FK_rels;
 
         # global add_fk_index set in parser_args
-        my $add_fk_index = (exists $args->{add_fk_index} && ($args->{add_fk_index} == 0)) ? 0 : 1;
+        my $add_fk_index = (exists $args->{add_fk_index} && ! $args->{add_fk_index}) ? 0 : 1;
 
         foreach my $rel (sort @rels)
         {
+
             my $rel_info = $source->relationship_info($rel);
 
             # Ignore any rel cond that isn't a straight hash
             next unless ref $rel_info->{cond} eq 'HASH';
 
-            my $othertable = $source->related_source($rel);
-            next if $othertable->isa('DBIx::Class::ResultSource::View');  # can't define constraints referencing a view
-            my $rel_table = $othertable->name;
+            my $relsource = $source->related_source($rel);
 
+            # related sources might be excluded via a {sources} filter or might be views
+            next unless exists $table_monikers{$relsource->source_name};
+
+            my $rel_table = $relsource->name;
+
             # FIXME - this isn't the right way to do it, but sqlt does not
             # support quoting properly to be signaled about this
             $rel_table = $$rel_table if ref $rel_table eq 'SCALAR';
@@ -153,7 +164,7 @@
 
             # Force the order of @cond to match the order of ->add_columns
             my $idx;
-            my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $othertable->columns;            
+            my %other_columns_idx = map {'foreign.'.$_ => ++$idx } $relsource->columns;
             my @cond = sort { $other_columns_idx{$a} cmp $other_columns_idx{$b} } keys(%{$rel_info->{cond}}); 
 
             # Get the key information, mapping off the foreign/self markers
@@ -210,11 +221,12 @@
 
                   my $is_deferrable = $rel_info->{attrs}{is_deferrable};
 
-                  # do not consider deferrable constraints and self-references
-                  # for dependency calculations
+                  # calculate dependencies: do not consider deferrable constraints and
+                  # self-references for dependency calculations
                   if (! $is_deferrable and $rel_table ne $table_name) {
                     $tables{$table_name}{foreign_table_deps}{$rel_table}++;
                   }
+
                   $table->add_constraint(
                                     type             => 'foreign_key',
                                     name             => join('_', $table_name, 'fk', @keys),
@@ -274,7 +286,7 @@
     }
 
     my %views;
-    foreach my $moniker (sort @view_monikers)
+    foreach my $moniker (sort keys %view_monikers)
     {
         my $source = $dbicschema->source($moniker);
         my $view_name = $source->name;
@@ -367,7 +379,14 @@
  my $schema = MyApp::Schema->connect;
  my $trans  = SQL::Translator->new (
       parser      => 'SQL::Translator::Parser::DBIx::Class',
-      parser_args => { package => $schema },
+      parser_args => {
+          package => $schema,
+          # to explicitly specify which ResultSources are to be parsed
+          sources => [qw/
+            Artist
+            CD
+          /],
+      },
       producer    => 'SQLite',
      ) or die SQL::Translator->error;
  my $out = $trans->translate() or die $trans->error;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/51threads.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/51threads.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/51threads.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -8,7 +8,7 @@
 
 BEGIN {
     plan skip_all => 'Your perl does not support ithreads'
-        if !$Config{useithreads} || $] < 5.008;
+        if !$Config{useithreads};
 }
 
 use threads;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/51threadtxn.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/51threadtxn.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/51threadtxn.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -8,7 +8,7 @@
 
 BEGIN {
     plan skip_all => 'Your perl does not support ithreads'
-        if !$Config{useithreads} || $] < 5.008;
+        if !$Config{useithreads};
 }
 
 use threads;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/52cycle.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/52cycle.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/52cycle.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -8,16 +8,36 @@
   eval { require Test::Memory::Cycle; require Devel::Cycle };
   if ($@ or Devel::Cycle->VERSION < 1.10) {
     plan skip_all => "leak test needs Test::Memory::Cycle and Devel::Cycle >= 1.10";
-  } else {
-    plan tests => 1;
-  }
+  };
 }
 
 use DBICTest;
 use DBICTest::Schema;
+use Scalar::Util ();
 
 import Test::Memory::Cycle;
 
-my $s = DBICTest::Schema->clone;
+my $weak;
 
-memory_cycle_ok($s, 'No cycles in schema');
+{
+  my $s = $weak->{schema} = DBICTest->init_schema;
+  memory_cycle_ok($s, 'No cycles in schema');
+
+  my $rs = $weak->{resultset} = $s->resultset ('Artist');
+  memory_cycle_ok($rs, 'No cycles in resultset');
+
+  my $rsrc = $weak->{resultsource} = $rs->result_source;
+  memory_cycle_ok($rsrc, 'No cycles in resultsource');
+
+  my $row = $weak->{row} = $rs->first;
+  memory_cycle_ok($row, 'No cycles in row');
+
+  Scalar::Util::weaken ($_) for values %$weak;
+  memory_cycle_ok($weak, 'No cycles in weak object collection');
+}
+
+for (keys %$weak) {
+  ok (! $weak->{$_}, "No $_ leaks");
+}
+
+done_testing;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/60core.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/60core.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/60core.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -109,10 +109,12 @@
 {
   ok(my $artist = $schema->resultset('Artist')->create({name => 'store_column test'}));
   is($artist->name, 'X store_column test'); # used to be 'X X store...'
-  
+
   # call store_column even though the column doesn't seem to be dirty
-  ok($artist->update({name => 'X store_column test'}));
+  $artist->name($artist->name);
   is($artist->name, 'X X store_column test');
+  ok($artist->is_column_changed('name'), 'changed column marked as dirty');
+
   $artist->delete;
 }
 

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/71mysql.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/71mysql.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/71mysql.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -225,6 +225,23 @@
       => 'Nothing Found!';
 }
 
+# check for proper grouped counts
+{
+  my $ansi_schema = DBICTest::Schema->connect ($dsn, $user, $pass, { on_connect_call => 'set_strict_mode' });
+  my $rs = $ansi_schema->resultset('CD');
+
+  my $years;
+  $years->{$_->year|| scalar keys %$years}++ for $rs->all;  # NULL != NULL, thus the keys eval
+
+  lives_ok ( sub {
+    is (
+      $rs->search ({}, { group_by => 'year'})->count,
+      scalar keys %$years,
+      'grouped count correct',
+    );
+  }, 'Grouped count does not throw');
+}
+
 ZEROINSEARCH: {
   my $cds_per_year = {
     2001 => 2,
@@ -243,11 +260,11 @@
   is ($rs->count, 6, 'CDs created successfully');
 
   $rs = $rs->search ({}, {
-    select => [ {year => 'year'} ], as => ['y'], distinct => 1, order_by => 'year',
+    select => [ \ 'YEAR(year)' ], as => ['y'], distinct => 1,
   });
 
   is_deeply (
-    [ $rs->get_column ('y')->all ],
+    [ sort ($rs->get_column ('y')->all) ],
     [ sort keys %$cds_per_year ],
     'Years group successfully',
   );
@@ -255,7 +272,7 @@
   $rs->create ({ artist => 1, year => '0-1-1', title => 'Jesus Rap' });
 
   is_deeply (
-    [ $rs->get_column ('y')->all ],
+    [ sort $rs->get_column ('y')->all ],
     [ 0, sort keys %$cds_per_year ],
     'Zero-year groups successfully',
   );

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/73oracle.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/73oracle.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/73oracle.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -40,8 +40,6 @@
   ' as well as following sequences: \'pkid1_seq\', \'pkid2_seq\' and \'nonpkid_seq\''
   unless ($dsn && $user && $pass);
 
-plan tests => 36;
-
 DBICTest::Schema->load_classes('ArtistFQN');
 my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
 
@@ -65,7 +63,7 @@
 $dbh->do("CREATE SEQUENCE nonpkid_seq START WITH 20 MAXVALUE 999999 MINVALUE 0");
 $dbh->do("CREATE TABLE artist (artistid NUMBER(12), name VARCHAR(255), rank NUMBER(38), charfield VARCHAR2(10))");
 $dbh->do("CREATE TABLE sequence_test (pkid1 NUMBER(12), pkid2 NUMBER(12), nonpkid NUMBER(12), name VARCHAR(255))");
-$dbh->do("CREATE TABLE cd (cdid NUMBER(12), artist NUMBER(12), title VARCHAR(255), year VARCHAR(4))");
+$dbh->do("CREATE TABLE cd (cdid NUMBER(12), artist NUMBER(12), title VARCHAR(255), year VARCHAR(4), genreid NUMBER(12), single_track NUMBER(12))");
 $dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at DATE, small_dt DATE)");
 
 $dbh->do("ALTER TABLE artist ADD (CONSTRAINT artist_pk PRIMARY KEY (artistid))");
@@ -124,12 +122,39 @@
 is($new->artistid, 1, "Oracle Auto-PK worked");
 
 my $cd = $schema->resultset('CD')->create({ artist => 1, title => 'EP C', year => '2003' });
-is($new->artistid, 1, "Oracle Auto-PK worked - using scalar ref as table name");
+is($cd->cdid, 1, "Oracle Auto-PK worked - using scalar ref as table name");
 
 # test again with fully-qualified table name
 $new = $schema->resultset('ArtistFQN')->create( { name => 'bar' } );
 is( $new->artistid, 2, "Oracle Auto-PK worked with fully-qualified tablename" );
 
+# test rel names over the 30 char limit
+my $query = $schema->resultset('Artist')->search({
+  artistid => 1 
+}, {
+  prefetch => 'cds_very_very_very_long_relationship_name'
+});
+
+lives_and {
+  is $query->first->cds_very_very_very_long_relationship_name->first->cdid, 1
+} 'query with rel name over 30 chars survived and worked';
+
+# rel name over 30 char limit with user condition
+# This requires walking the SQLA data structure.
+{
+  local $TODO = 'user condition on rel longer than 30 chars';
+
+  $query = $schema->resultset('Artist')->search({
+    'cds_very_very_very_long_relationship_name.title' => 'EP C'
+  }, {
+    prefetch => 'cds_very_very_very_long_relationship_name'
+  });
+
+  lives_and {
+    is $query->first->cds_very_very_very_long_relationship_name->first->cdid, 1
+  } 'query with rel name over 30 chars and user condition survived and worked';
+}
+
 # test join with row count ambiguity
 
 my $track = $schema->resultset('Track')->create({ trackid => 1, cd => 1,
@@ -228,6 +253,8 @@
 	}
 }
 
+done_testing;
+
 # clean up our mess
 END {
     if($schema && ($dbh = $schema->storage->dbh)) {

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/746mssql.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/746mssql.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -178,10 +178,10 @@
 
 $schema->storage->dbh_do (sub {
     my ($storage, $dbh) = @_;
-    eval { $dbh->do("DROP TABLE Owners") };
-    eval { $dbh->do("DROP TABLE Books") };
+    eval { $dbh->do("DROP TABLE owners") };
+    eval { $dbh->do("DROP TABLE books") };
     $dbh->do(<<'SQL');
-CREATE TABLE Books (
+CREATE TABLE books (
    id INT IDENTITY (1, 1) NOT NULL,
    source VARCHAR(100),
    owner INT,
@@ -189,7 +189,7 @@
    price INT NULL
 )
 
-CREATE TABLE Owners (
+CREATE TABLE owners (
    id INT IDENTITY (1, 1) NOT NULL,
    name VARCHAR(100),
 )
@@ -205,10 +205,10 @@
     [qw/1   wiggle/],
     [qw/2   woggle/],
     [qw/3   boggle/],
-    [qw/4   fREW/],
-    [qw/5   fRIOUX/],
-    [qw/6   fROOH/],
-    [qw/7   fRUE/],
+    [qw/4   fRIOUX/],
+    [qw/5   fRUE/],
+    [qw/6   fREW/],
+    [qw/7   fROOH/],
     [qw/8   fISMBoC/],
     [qw/9   station/],
     [qw/10   mirror/],
@@ -220,11 +220,12 @@
   ]);
 }, 'populate with PKs supplied ok' );
 
+
 lives_ok (sub {
   # start a new connection, make sure rebless works
   # test an insert with a supplied identity, followed by one without
   my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
-  for (1..2) {
+  for (2, 1) {
     my $id = $_ * 20 ;
     $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
     $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
@@ -254,20 +255,129 @@
   ]);
 }, 'populate without PKs supplied ok' );
 
-# make sure ordered subselects work
+# plain ordered subqueries throw
+throws_ok (sub {
+  $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
+}, qr/ordered subselect encountered/, 'Ordered Subselect detection throws ok');
+
+# make sure ordered subselects *somewhat* work
 {
+  my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+
+  my $al = $owners->current_source_alias;
+  my $sealed_owners = $owners->result_source->resultset->search (
+    {},
+    {
+      alias => $al,
+      from => [{
+        -alias => $al,
+        -source_handle => $owners->result_source->handle,
+        $al => $owners->as_query,
+      }],
+    },
+  );
+
+  is_deeply (
+    [ map { $_->name } ($sealed_owners->all) ],
+    [ map { $_->name } ($owners->all) ],
+    'Sort preserved from within a subquery',
+  );
+}
+
+TODO: {
+  local $TODO = "This porbably will never work, but it isn't critical either afaik";
+
   my $book_owner_ids = $schema->resultset ('BooksInLibrary')
-                               ->search ({}, { join => 'owner', distinct => 1, order_by => { -desc => 'owner'} })
+                               ->search ({}, { join => 'owner', distinct => 1, order_by => 'owner.name', unsafe_subselect_ok => 1 })
                                 ->get_column ('owner');
 
-  my $owners = $schema->resultset ('Owners')->search ({
+  my $book_owners = $schema->resultset ('Owners')->search ({
     id => { -in => $book_owner_ids->as_query }
   });
 
-  is ($owners->count, 8, 'Correct amount of book owners');
-  is ($owners->all, 8, 'Correct amount of book owner objects');
+  is_deeply (
+    [ map { $_->id } ($book_owners->all) ],
+    [ $book_owner_ids->all ],
+    'Sort is preserved across IN subqueries',
+  );
 }
 
+# This is known not to work - thus the negative test
+{
+  my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+  my $corelated_owners = $owners->result_source->resultset->search (
+    {
+      id => { -in => $owners->get_column('id')->as_query },
+    },
+    {
+      order_by => 'name' #reorder because of what is shown above
+    },
+  );
+
+  cmp_ok (
+    join ("\x00", map { $_->name } ($corelated_owners->all) ),
+      'ne',
+    join ("\x00", map { $_->name } ($owners->all) ),
+    'Sadly sort not preserved from within a corelated subquery',
+  );
+
+  cmp_ok (
+    join ("\x00", sort map { $_->name } ($corelated_owners->all) ),
+      'ne',
+    join ("\x00", sort map { $_->name } ($owners->all) ),
+    'Which in fact gives a completely wrong dataset',
+  );
+}
+
+
+# make sure right-join-side single-prefetch ordering limit works
+{
+  my $rs = $schema->resultset ('BooksInLibrary')->search (
+    {
+      'owner.name' => { '!=', 'woggle' },
+    },
+    {
+      prefetch => 'owner',
+      order_by => 'owner.name',
+    }
+  );
+  # this is the order in which they should come from the above query
+  my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
+
+  is ($rs->all, 8, 'Correct amount of objects from right-sorted joined resultset');
+  is_deeply (
+    [map { $_->owner->name } ($rs->all) ],
+    \@owner_names,
+    'Rows were properly ordered'
+  );
+
+  my $limited_rs = $rs->search ({}, {rows => 7, offset => 2, unsafe_subselect_ok => 1});
+  is ($limited_rs->count, 6, 'Correct count of limited right-sorted joined resultset');
+  is ($limited_rs->count_rs->next, 6, 'Correct count_rs of limited right-sorted joined resultset');
+
+  my $queries;
+  $schema->storage->debugcb(sub { $queries++; });
+  $schema->storage->debug(1);
+
+  is_deeply (
+    [map { $_->owner->name } ($limited_rs->all) ],
+    [@owner_names[2 .. 7]],
+    'Limited rows were properly ordered'
+  );
+  is ($queries, 1, 'Only one query with prefetch');
+
+  $schema->storage->debugcb(undef);
+  $schema->storage->debug(0);
+
+
+  is_deeply (
+    [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
+    [@owner_names[2 .. 7]],
+    'Rows are still properly ordered after search_related'
+  );
+}
+
+
 #
 # try a prefetch on tables with identically named columns
 #
@@ -287,6 +397,7 @@
       prefetch => 'books',
       order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
       rows     => 3,  # 8 results total
+      unsafe_subselect_ok => 1,
     },
   );
 
@@ -315,6 +426,7 @@
       prefetch => 'owner',
       rows     => 2,  # 3 results total
       order_by => { -desc => 'owner' },
+      unsafe_subselect_ok => 1,
     },
   );
 
@@ -337,38 +449,13 @@
   is ($books->page(2)->count_rs->next, 1, 'Prefetched grouped search returns correct count_rs');
 }
 
-# make sure right-join-side ordering limit works
-{
-  my $rs = $schema->resultset ('BooksInLibrary')->search (
-    {
-      'owner.name' => [qw/wiggle woggle/],
-    },
-    {
-      join => 'owner',
-      order_by => { -desc => 'owner.name' },
-    }
-  );
-
-  is ($rs->all, 3, 'Correct amount of objects from right-sorted joined resultset');
-  my $limited_rs = $rs->search ({}, {rows => 3, offset => 1});
-  is ($limited_rs->count, 2, 'Correct count of limited right-sorted joined resultset');
-  is ($limited_rs->count_rs->next, 2, 'Correct count_rs of limited right-sorted joined resultset');
-  is ($limited_rs->all, 2, 'Correct amount of objects from limited right-sorted joined resultset');
-
-  is_deeply (
-    [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
-    [qw/woggle wiggle/],    # there is 1 woggle library book and 2 wiggle books, the limit gets us one of each
-    'Rows were properly ordered'
-  );
-}
-
 done_testing;
 
 # clean up our mess
 END {
   if (my $dbh = eval { $schema->storage->_dbh }) {
     eval { $dbh->do("DROP TABLE $_") }
-      for qw/artist money_test Books Owners/;
+      for qw/artist money_test books owners/;
   }
 }
 # vim:sw=2 sts=2

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/74mssql.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/74mssql.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/74mssql.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -31,14 +31,19 @@
 
   $schema = DBICTest::Schema->clone;
 
+  $schema->connection($dsn, $user, $pass);
+
   if ($storage_idx != 0) { # autodetect
-    $schema->storage_type("::$storage_type");
+    no warnings 'redefine';
+    local *DBIx::Class::Storage::DBI::_typeless_placeholders_supported =
+      sub { 0 };
+#    $schema->storage_type("::$storage_type");
+    $schema->storage->ensure_connected;
   }
+  else {
+    $schema->storage->ensure_connected;
+  }
 
-  $schema->connection($dsn, $user, $pass);
-
-  $schema->storage->ensure_connected;
-
   if ($storage_idx == 0 && ref($schema->storage) =~ /NoBindVars\z/) {
     my $tb = Test::More->builder;
     $tb->skip('no placeholders') for 1..$NUMBER_OF_TESTS_IN_BLOCK;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/85utf8.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/85utf8.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/85utf8.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -1,23 +1,28 @@
 use strict;
-use warnings;  
+use warnings;
 
 use Test::More;
+use Test::Warn;
 use lib qw(t/lib);
 use DBICTest;
+use utf8;
 
-my $schema = DBICTest->init_schema();
+warning_like (sub {
 
-if ($] <= 5.008000) {
+  package A::Comp;
+  use base 'DBIx::Class';
+  sub store_column { shift->next::method (@_) };
+  1;
 
-    eval 'use Encode; 1' or plan skip_all => 'Need Encode run this test';
+  package A::Test;
+  use base 'DBIx::Class::Core';
+  __PACKAGE__->load_components(qw(UTF8Columns +A::Comp));
+  1;
+}, qr/Incorrect loading order of DBIx::Class::UTF8Columns/ );
 
-} else {
 
-    eval 'use utf8; 1' or plan skip_all => 'Need utf8 run this test';
-}
+my $schema = DBICTest->init_schema();
 
-plan tests => 6;
-
 DBICTest::Schema::CD->load_components('UTF8Columns');
 DBICTest::Schema::CD->utf8_columns('title');
 Class::C3->reinitialize();
@@ -26,12 +31,12 @@
 my $utf8_char = 'uniuni';
 
 
-ok( _is_utf8( $cd->title ), 'got title with utf8 flag' );
-ok(! _is_utf8( $cd->year ), 'got year without utf8 flag' );
+ok( utf8::is_utf8( $cd->title ), 'got title with utf8 flag' );
+ok(! utf8::is_utf8( $cd->year ), 'got year without utf8 flag' );
 
-_force_utf8($utf8_char);
+utf8::decode($utf8_char);
 $cd->title($utf8_char);
-ok(! _is_utf8( $cd->{_column_data}{title} ), 'store utf8-less chars' );
+ok(! utf8::is_utf8( $cd->{_column_data}{title} ), 'store utf8-less chars' );
 
 
 my $v_utf8 = "\x{219}";
@@ -47,24 +52,7 @@
 TODO: {
   local $TODO = 'There is currently no way to propagate aliases to inflate_result()';
   $cd = $schema->resultset('CD')->find ({ title => $v_utf8 }, { select => 'title', as => 'name' });
-  ok (_is_utf8( $cd->get_column ('name') ), 'utf8 flag propagates via as');
+  ok (utf8::is_utf8( $cd->get_column ('name') ), 'utf8 flag propagates via as');
 }
 
-
-sub _force_utf8 {
-  if ($] <= 5.008000) {
-    Encode::_utf8_on ($_[0]);
-  }
-  else {
-    utf8::decode ($_[0]);
-  }
-}
-
-sub _is_utf8 {
-  if ($] <= 5.008000) {
-    return Encode::is_utf8 (shift);
-  }
-  else {
-    return utf8::is_utf8 (shift);
-  }
-}
+done_testing;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/86sqlt.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/86sqlt.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/86sqlt.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -14,6 +14,36 @@
 
 my $schema = DBICTest->init_schema (no_deploy => 1);
 
+
+# Check deployment statements ctx sensitivity
+{
+  my $not_first_table_creation_re = qr/CREATE TABLE fourkeys_to_twokeys/;
+
+
+  my $statements = $schema->deployment_statements;
+  like (
+    $statements,
+    $not_first_table_creation_re,
+    'All create statements returned in 1 string in scalar ctx'
+  );
+
+  my @statements = $schema->deployment_statements;
+  cmp_ok (scalar @statements, '>', 1, 'Multiple statement lines in array ctx');
+
+  my $i = 0;
+  while ($i <= $#statements) {
+    last if $statements[$i] =~ $not_first_table_creation_re;
+    $i++;
+  }
+
+  ok (
+    ($i > 0) && ($i <= $#statements),
+    "Creation statement was found somewherere within array ($i)"
+  );
+}
+
+
+
 # replace the sqlt calback with a custom version ading an index
 $schema->source('Track')->sqlt_deploy_callback(sub {
   my ($self, $sqlt_table) = @_;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/88result_set_column.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/88result_set_column.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/88result_set_column.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -9,8 +9,15 @@
 
 my $schema = DBICTest->init_schema();
 
-my $rs = $schema->resultset("CD")->search({}, { order_by => 'cdid' });
+my $rs = $schema->resultset("CD");
 
+cmp_ok (
+  $rs->count,
+    '!=',
+  $rs->search ({}, {columns => ['year'], distinct => 1})->count,
+  'At least one year is the same in rs'
+);
+
 my $rs_title = $rs->get_column('title');
 my $rs_year = $rs->get_column('year');
 my $max_year = $rs->get_column(\'MAX (year)');
@@ -36,6 +43,14 @@
   is($rs_year->single, 1999, "single okay");
 }, qr/Query returned more than one row/, 'single warned');
 
+
+# test distinct propagation
+is_deeply (
+  [$rs->search ({}, { distinct => 1 })->get_column ('year')->all],
+  [$rs_year->func('distinct')],
+  'distinct => 1 is passed through properly',
+);
+
 # test +select/+as for single column
 my $psrs = $schema->resultset('CD')->search({},
     {

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/99dbic_sqlt_parser.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/99dbic_sqlt_parser.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/99dbic_sqlt_parser.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -1,10 +1,12 @@
-#!/usr/bin/perl
 use strict;
 use warnings;
+
 use Test::More;
 use Test::Exception;
 use lib qw(t/lib);
 use DBICTest;
+use DBICTest::Schema;
+use Scalar::Util ();
 
 BEGIN {
   require DBIx::Class::Storage::DBI;
@@ -13,6 +15,16 @@
     if not DBIx::Class::Storage::DBI->_sqlt_version_ok;
 }
 
+# Test for SQLT-related leaks
+{
+  my $s = DBICTest::Schema->clone;
+  create_schema ({ schema => $s });
+  Scalar::Util::weaken ($s);
+
+  ok (!$s, 'Schema not leaked');
+}
+
+
 my $schema = DBICTest->init_schema();
 # Dummy was yanked out by the sqlt hook test
 # CustomSql tests the horrific/deprecated ->name(\$sql) hack
@@ -24,43 +36,43 @@
 ;
 
 { 
-	my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { } } });
+  my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { } } });
 
-	foreach my $source (@sources) {
-		my $table = get_table($sqlt_schema, $schema, $source);
+  foreach my $source (@sources) {
+    my $table = get_table($sqlt_schema, $schema, $source);
 
-		my $fk_count = scalar(grep { $_->type eq 'FOREIGN KEY' } $table->get_constraints);
-		my @indices = $table->get_indices;
-		my $index_count = scalar(@indices);
+    my $fk_count = scalar(grep { $_->type eq 'FOREIGN KEY' } $table->get_constraints);
+    my @indices = $table->get_indices;
+    my $index_count = scalar(@indices);
     $index_count++ if ($source eq 'TwoKeys'); # TwoKeys has the index turned off on the rel def
-		is($index_count, $fk_count, "correct number of indices for $source with no args");
-	}
+    is($index_count, $fk_count, "correct number of indices for $source with no args");
+  }
 }
 
 { 
-	my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { add_fk_index => 1 } } });
+  my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { add_fk_index => 1 } } });
 
-	foreach my $source (@sources) {
-		my $table = get_table($sqlt_schema, $schema, $source);
+  foreach my $source (@sources) {
+    my $table = get_table($sqlt_schema, $schema, $source);
 
-		my $fk_count = scalar(grep { $_->type eq 'FOREIGN KEY' } $table->get_constraints);
-		my @indices = $table->get_indices;
-		my $index_count = scalar(@indices);
+    my $fk_count = scalar(grep { $_->type eq 'FOREIGN KEY' } $table->get_constraints);
+    my @indices = $table->get_indices;
+    my $index_count = scalar(@indices);
     $index_count++ if ($source eq 'TwoKeys'); # TwoKeys has the index turned off on the rel def
-		is($index_count, $fk_count, "correct number of indices for $source with add_fk_index => 1");
-	}
+    is($index_count, $fk_count, "correct number of indices for $source with add_fk_index => 1");
+  }
 }
 
 { 
-	my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { add_fk_index => 0 } } });
+  my $sqlt_schema = create_schema({ schema => $schema, args => { parser_args => { add_fk_index => 0 } } });
 
-	foreach my $source (@sources) {
-		my $table = get_table($sqlt_schema, $schema, $source);
+  foreach my $source (@sources) {
+    my $table = get_table($sqlt_schema, $schema, $source);
 
-		my @indices = $table->get_indices;
-		my $index_count = scalar(@indices);
-		is($index_count, 0, "correct number of indices for $source with add_fk_index => 0");
-	}
+    my @indices = $table->get_indices;
+    my $index_count = scalar(@indices);
+    is($index_count, 0, "correct number of indices for $source with add_fk_index => 0");
+  }
 }
 
 { 
@@ -84,25 +96,43 @@
         'parser detects views with a view_definition';
 }
 
+lives_ok (sub {
+  my $sqlt_schema = create_schema ({
+    schema => $schema,
+    args => {
+      parser_args => {
+        sources => ['CD']
+      },
+    },
+  });
+
+  is_deeply (
+    [$sqlt_schema->get_tables ],
+    ['cd'],
+    'sources limitng with relationships works',
+  );
+
+});
+
 done_testing;
 
 sub create_schema {
-	my $args = shift;
+  my $args = shift;
 
-	my $schema = $args->{schema};
-	my $additional_sqltargs = $args->{args} || {};
+  my $schema = $args->{schema};
+  my $additional_sqltargs = $args->{args} || {};
 
-	my $sqltargs = {
-		add_drop_table => 1, 
-		ignore_constraint_names => 1,
-		ignore_index_names => 1,
-		%{$additional_sqltargs}
-		};
+  my $sqltargs = {
+    add_drop_table => 1, 
+    ignore_constraint_names => 1,
+    ignore_index_names => 1,
+    %{$additional_sqltargs}
+  };
 
-	my $sqlt = SQL::Translator->new( $sqltargs );
+  my $sqlt = SQL::Translator->new( $sqltargs );
 
-	$sqlt->parser('SQL::Translator::Parser::DBIx::Class');
-	return $sqlt->translate({ data => $schema }) || die $sqlt->error;
+  $sqlt->parser('SQL::Translator::Parser::DBIx::Class');
+  return $sqlt->translate({ data => $schema }) || die $sqlt->error;
 }
 
 sub get_table {

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/inflate/hri.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/inflate/hri.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/inflate/hri.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -45,7 +45,7 @@
             my @dbic_reltable = $dbic_obj->$col;
             my @hashref_reltable = @{$datahashref->{$col}};
   
-            is (scalar @hashref_reltable, scalar @dbic_reltable, 'number of related entries');
+            is (scalar @dbic_reltable, scalar @hashref_reltable, 'number of related entries');
 
             # for my $index (0..scalar @hashref_reltable) {
             for my $index (0..scalar @dbic_reltable) {

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest/Schema/Artist.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest/Schema/Artist.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest/Schema/Artist.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -44,6 +44,9 @@
 __PACKAGE__->has_many(
     cds_unordered => 'DBICTest::Schema::CD'
 );
+__PACKAGE__->has_many(
+    cds_very_very_very_long_relationship_name => 'DBICTest::Schema::CD'
+);
 
 __PACKAGE__->has_many( twokeys => 'DBICTest::Schema::TwoKeys' );
 __PACKAGE__->has_many( onekeys => 'DBICTest::Schema::OneKey' );

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest.pm	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/lib/DBICTest.pm	2010-01-15 02:16:31 UTC (rev 8321)
@@ -127,7 +127,7 @@
     my $args = shift || {};
 
     if ($ENV{"DBICTEST_SQLT_DEPLOY"}) { 
-        $schema->deploy($args);    
+        $schema->deploy($args);
     } else {
         open IN, "t/lib/sqlite.sql";
         my $sql;

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/prefetch/double_prefetch.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/prefetch/double_prefetch.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/prefetch/double_prefetch.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -27,7 +27,7 @@
       single_track_2.trackid, single_track_2.cd, single_track_2.position, single_track_2.title, single_track_2.last_updated_on, single_track_2.last_updated_at, single_track_2.small_dt,
       cd.cdid, cd.artist, cd.title, cd.year, cd.genreid, cd.single_track
     FROM artist me
-      LEFT JOIN cd cds ON cds.artist = me.artistid
+      JOIN cd cds ON cds.artist = me.artistid
       LEFT JOIN track single_track ON single_track.trackid = cds.single_track
       LEFT JOIN track single_track_2 ON single_track_2.trackid = cds.single_track
       LEFT JOIN cd cd ON cd.cdid = single_track_2.cd

Modified: DBIx-Class/0.08/branches/prefetch_pager/t/relationship/core.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/relationship/core.t	2010-01-15 02:14:39 UTC (rev 8320)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/relationship/core.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -268,7 +268,7 @@
   '(
     SELECT artist_undirected_maps.id1, artist_undirected_maps.id2
       FROM artist me
-      LEFT JOIN artist_undirected_map artist_undirected_maps
+      JOIN artist_undirected_map artist_undirected_maps
         ON artist_undirected_maps.id1 = me.artistid OR artist_undirected_maps.id2 = me.artistid
     WHERE ( artistid = ? )
   )',

Added: DBIx-Class/0.08/branches/prefetch_pager/t/resultset/nulls_only.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/resultset/nulls_only.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/resultset/nulls_only.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -0,0 +1,29 @@
+use strict;
+use warnings;
+
+use lib qw(t/lib);
+use Test::More;
+use Test::Exception;
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+
+my $cd_rs = $schema->resultset('CD')->search ({ genreid => undef }, { columns => [ 'genreid' ]} );
+my $count = $cd_rs->count;
+cmp_ok ( $count, '>', 1, 'several CDs with no genre');
+
+my @objects = $cd_rs->all;
+is (scalar @objects, $count, 'Correct amount of objects without limit');
+isa_ok ($_, 'DBICTest::CD') for @objects;
+
+is_deeply (
+  [ map { values %{{$_->get_columns}} } (@objects) ],
+  [ (undef) x $count ],
+  'All values are indeed undef'
+);
+
+
+isa_ok ($cd_rs->search ({}, { rows => 1 })->single, 'DBICTest::CD');
+
+done_testing;

Added: DBIx-Class/0.08/branches/prefetch_pager/t/schema/anon.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/schema/anon.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/schema/anon.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -0,0 +1,13 @@
+use strict;
+use warnings;
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBICTest;
+
+lives_ok (sub {
+  DBICTest->init_schema()->resultset('Artist')->find({artistid => 1 })->update({name => 'anon test'});
+}, 'Schema object not lost in chaining');
+
+done_testing;

Added: DBIx-Class/0.08/branches/prefetch_pager/t/search/related_strip_prefetch.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch_pager/t/search/related_strip_prefetch.t	                        (rev 0)
+++ DBIx-Class/0.08/branches/prefetch_pager/t/search/related_strip_prefetch.t	2010-01-15 02:16:31 UTC (rev 8321)
@@ -0,0 +1,43 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBIC::SqlMakerTest;
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+
+my $rs = $schema->resultset('CD')->search (
+  { 'tracks.id' => { '!=', 666 }},
+  { join => 'artist', prefetch => 'tracks', rows => 2 }
+);
+
+my $rel_rs = $rs->search_related ('tags', { 'tags.tag' => { '!=', undef }}, { distinct => 1});
+
+is_same_sql_bind (
+  $rel_rs->as_query,
+  '(
+    SELECT tags.tagid, tags.cd, tags.tag
+      FROM (
+        SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
+          FROM cd me
+          JOIN artist artist ON artist.artistid = me.artist
+          LEFT JOIN track tracks ON tracks.cd = me.cdid
+        WHERE ( tracks.id != ? )
+        LIMIT 2
+      ) me
+      JOIN artist artist ON artist.artistid = me.artist
+      LEFT JOIN track tracks ON tracks.cd = me.cdid
+      JOIN tags tags ON tags.cd = me.cdid
+    WHERE ( tags.tag IS NOT NULL )
+    GROUP BY tags.tagid, tags.cd, tags.tag
+  )',
+
+  [ [ 'tracks.id' => 666 ] ],
+  'Prefetch spec successfully stripped on search_related'
+);
+
+done_testing;




More information about the Bast-commits mailing list