[Bast-commits] r9307 - in DBIx-Class/0.08/branches/filter_column: .
lib/DBIx lib/DBIx/Class lib/DBIx/Class/CDBICompat
lib/DBIx/Class/InflateColumn lib/DBIx/Class/Manual
lib/DBIx/Class/SQLAHacks lib/DBIx/Class/Storage
lib/DBIx/Class/Storage/DBI lib/DBIx/Class/Storage/DBI/Sybase
t t/cdbi t/count t/inflate t/lib/DBICNSTest/Result
t/lib/DBICNSTest/ResultSet t/prefetch t/sqlahacks
t/sqlahacks/limit_dialects t/sqlahacks/sql_maker t/storage
ribasushi at dev.catalyst.perl.org
ribasushi at dev.catalyst.perl.org
Wed May 5 09:52:30 GMT 2010
Author: ribasushi
Date: 2010-05-05 10:52:30 +0100 (Wed, 05 May 2010)
New Revision: 9307
Added:
DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/Result/D.pm
DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/ResultSet/D.pm
DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rno.t
DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rownum.t
DBIx-Class/0.08/branches/filter_column/t/sqlahacks/oraclejoin.t
DBIx-Class/0.08/branches/filter_column/t/storage/dbi_env.t
DBIx-Class/0.08/branches/filter_column/t/storage/deploy.t
DBIx-Class/0.08/branches/filter_column/t/storage/global_destruction.t
Removed:
DBIx-Class/0.08/branches/filter_column/t/41orrible.t
Modified:
DBIx-Class/0.08/branches/filter_column/
DBIx-Class/0.08/branches/filter_column/Changes
DBIx-Class/0.08/branches/filter_column/Makefile.PL
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/CDBICompat/ColumnCase.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/InflateColumn/DateTime.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Manual/Cookbook.pod
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/ResultSet.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Row.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/OracleJoins.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/SQLite.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Schema.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/MSSQL.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Replicated.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBIHacks.pm
DBIx-Class/0.08/branches/filter_column/t/39load_namespaces_1.t
DBIx-Class/0.08/branches/filter_column/t/72pg.t
DBIx-Class/0.08/branches/filter_column/t/746mssql.t
DBIx-Class/0.08/branches/filter_column/t/90join_torture.t
DBIx-Class/0.08/branches/filter_column/t/93single_accessor_object.t
DBIx-Class/0.08/branches/filter_column/t/cdbi/columns_as_hashes.t
DBIx-Class/0.08/branches/filter_column/t/count/count_rs.t
DBIx-Class/0.08/branches/filter_column/t/count/prefetch.t
DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_mssql.t
DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_sybase.t
DBIx-Class/0.08/branches/filter_column/t/prefetch/grouped.t
DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/toplimit.t
DBIx-Class/0.08/branches/filter_column/t/sqlahacks/sql_maker/sql_maker_quote.t
Log:
r9155 at Thesaurus (orig r9142): rabbit | 2010-04-14 15:41:51 +0200
Add forgotten changes
r9156 at Thesaurus (orig r9143): caelum | 2010-04-14 17:04:00 +0200
support $ENV{DBI_DSN} and $ENV{DBI_DRIVER} (patch from Possum)
r9157 at Thesaurus (orig r9144): rabbit | 2010-04-14 17:50:58 +0200
Fix exception message
r9190 at Thesaurus (orig r9177): caelum | 2010-04-15 01:41:26 +0200
datetime millisecond precision for MSSQL
r9200 at Thesaurus (orig r9187): ribasushi | 2010-04-18 23:06:29 +0200
Fix leftover tabs
r9201 at Thesaurus (orig r9188): castaway | 2010-04-20 08:06:26 +0200
Warn if a class found in ResultSet/ is not a subclass of ::ResultSet
r9203 at Thesaurus (orig r9190): rbuels | 2010-04-20 21:12:22 +0200
create_ddl_dir mkpaths its dir if necessary. also, added storage/deploy.t as place to put deployment tests
r9204 at Thesaurus (orig r9191): rbuels | 2010-04-20 21:20:06 +0200
do not croak, rbuels! jeez.
r9205 at Thesaurus (orig r9192): castaway | 2010-04-21 08:03:08 +0200
Added missing test file (oops)
r9213 at Thesaurus (orig r9200): rabbit | 2010-04-24 02:23:05 +0200
10% speed up on quoted statement generation
r9215 at Thesaurus (orig r9202): rabbit | 2010-04-24 02:27:47 +0200
Revert bogus commit
r9216 at Thesaurus (orig r9203): ribasushi | 2010-04-24 02:31:06 +0200
_quote is now properly handled in SQLA
r9217 at Thesaurus (orig r9204): caelum | 2010-04-24 02:32:58 +0200
add "IMPROVING PERFORMANCE" section to Cookbook
r9231 at Thesaurus (orig r9218): ribasushi | 2010-04-26 13:13:13 +0200
Bump CAG and SQLA dependencies
r9232 at Thesaurus (orig r9219): ribasushi | 2010-04-26 15:27:38 +0200
Bizarre fork failure
r9233 at Thesaurus (orig r9220): castaway | 2010-04-26 21:45:32 +0200
Add tests using select/as to sqlahacks
r9234 at Thesaurus (orig r9221): castaway | 2010-04-26 21:49:10 +0200
Add test for fetching related obj/col as well
r9245 at Thesaurus (orig r9232): abraxxa | 2010-04-27 15:58:56 +0200
fixed missing ' in update_or_create with key attr example
r9247 at Thesaurus (orig r9234): ribasushi | 2010-04-27 16:53:06 +0200
Better concurrency in test (parent blocks)
r9248 at Thesaurus (orig r9235): ribasushi | 2010-04-27 16:53:34 +0200
Reformat tests/comments a bit
r9249 at Thesaurus (orig r9236): ribasushi | 2010-04-27 18:40:10 +0200
Better comment
r9250 at Thesaurus (orig r9237): ribasushi | 2010-04-27 18:40:31 +0200
Rename test
r9251 at Thesaurus (orig r9238): ribasushi | 2010-04-27 19:11:45 +0200
Fix global destruction problems
r9271 at Thesaurus (orig r9258): ribasushi | 2010-04-28 11:10:00 +0200
Refactor SQLA/select interaction (in reality just cleanup)
r9272 at Thesaurus (orig r9259): caelum | 2010-04-28 11:20:08 +0200
update ::DBI::Replicated
r9273 at Thesaurus (orig r9260): caelum | 2010-04-28 12:20:01 +0200
add _verify_pid and _verify_tid to methods that croak in ::Replicated
r9274 at Thesaurus (orig r9261): ribasushi | 2010-04-28 14:39:02 +0200
Fix failing test and some warnings
r9288 at Thesaurus (orig r9275): rabbit | 2010-04-29 10:32:10 +0200
Allow limit syntax change in-flight without digging into internals
r9292 at Thesaurus (orig r9279): castaway | 2010-04-30 12:26:52 +0200
Argh.. committing missing test file for load_namespaces tests
r9295 at Thesaurus (orig r9282): rabbit | 2010-05-01 11:06:21 +0200
The final version of the test
r9309 at Thesaurus (orig r9296): rabbit | 2010-05-04 09:44:51 +0200
Test for RT#56257
r9310 at Thesaurus (orig r9297): rabbit | 2010-05-04 10:00:11 +0200
Refactor count handling, make count-resultset attribute lists inclusive rather than exclusive (side effect - solves RT#56257
r9318 at Thesaurus (orig r9305): rabbit | 2010-05-05 11:49:51 +0200
r9296 at Thesaurus (orig r9283): ribasushi | 2010-05-01 11:51:15 +0200
Branch to clean up various limit dialects
r9297 at Thesaurus (orig r9284): rabbit | 2010-05-01 11:55:04 +0200
Preliminary version
r9301 at Thesaurus (orig r9288): rabbit | 2010-05-03 18:31:24 +0200
Fix incorrect comparison
r9302 at Thesaurus (orig r9289): rabbit | 2010-05-03 18:32:36 +0200
Do not add TOP prefixes to queries already containing it
r9303 at Thesaurus (orig r9290): rabbit | 2010-05-03 18:33:15 +0200
Add an as selector to a prefetch subquery to aid the subselecting-limit analyzer
r9304 at Thesaurus (orig r9291): rabbit | 2010-05-03 18:34:49 +0200
Rewrite mssql test to verify both types of limit dialects with and without quoting, rewrite the RNO, Top and RowNum dialects to rely on a factored out column re-aliaser
r9305 at Thesaurus (orig r9292): rabbit | 2010-05-03 21:06:01 +0200
Fix Top tests, make extra col selector order consistent
r9307 at Thesaurus (orig r9294): ribasushi | 2010-05-04 00:50:35 +0200
Fix test warning
r9308 at Thesaurus (orig r9295): ribasushi | 2010-05-04 01:04:32 +0200
Some databases (db2) do not like leading __s - use a different weird identifier for extra selector names
r9313 at Thesaurus (orig r9300): rabbit | 2010-05-05 11:08:33 +0200
Rename test
r9314 at Thesaurus (orig r9301): rabbit | 2010-05-05 11:11:32 +0200
If there was no offset, there is no sense in reordering
r9315 at Thesaurus (orig r9302): rabbit | 2010-05-05 11:12:19 +0200
Split and fix oracle tests
r9317 at Thesaurus (orig r9304): rabbit | 2010-05-05 11:49:33 +0200
Changes
Property changes on: DBIx-Class/0.08/branches/filter_column
___________________________________________________________________
Modified: svk:merge
- 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/chaining_fixes:8626
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_pod:8786
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_refactor:8691
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dephandling:8674
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/handle_all_storage_methods_in_replicated:8612
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_quotes:8812
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pod_fixes:8902
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pri_key_refactor:8751
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-interbase:8929
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
+ 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/chaining_fixes:8626
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_pod:8786
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dbicadmin_refactor:8691
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/dephandling:8674
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/handle_all_storage_methods_in_replicated:8612
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_quotes:8812
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pod_fixes:8902
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch:5699
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pri_key_refactor:8751
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-interbase:8929
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subqueried_limit_fixes:9304
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:9305
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
Modified: DBIx-Class/0.08/branches/filter_column/Changes
===================================================================
--- DBIx-Class/0.08/branches/filter_column/Changes 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/Changes 2010-05-05 09:52:30 UTC (rev 9307)
@@ -1,8 +1,25 @@
Revision history for DBIx::Class
+ - Add a warning to load_namespaces if a class in ResultSet/
+ is not a subclass of DBIx::Class::ResultSet
+ - ::Storage::DBI now correctly preserves a parent $dbh from
+ terminating children, even during interpreter-global
+ out-of-order destruction
- Add DBIx::Class::FilterColumn for non-ref filtering
+ - InflateColumn::DateTime support for MSSQL via DBD::Sybase
+ - Millisecond precision support for MSSQL datetimes for
+ InflateColumn::DateTime
+ - Support connecting using $ENV{DBI_DSN} and $ENV{DBI_DRIVER}
+ - current_source_alias method on ResultSet objects to
+ determine the alias to use in programatically assembled
+ search()es (originally added in 0.08100 but unmentioned)
+ - Rewrite/unification of all subselecting limit emulations
+ (RNO, Top, RowNum) to be much more robust wrt complex joined
+ resultsets
+ - MSSQL limits now don't require nearly as many applications of
+ the unsafe_subselect_ok attribute, due to optimized queries
+ - Depend on optimized SQL::Abstract (faster SQL generation)
-
0.08121 2010-04-11 18:43:00 (UTC)
- Support for Firebird RDBMS with DBD::InterBase and ODBC
- Add core support for INSERT RETURNING (for storages that
Modified: DBIx-Class/0.08/branches/filter_column/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/filter_column/Makefile.PL 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/Makefile.PL 2010-05-05 09:52:30 UTC (rev 9307)
@@ -18,7 +18,6 @@
### All of them should go to DBIx::Class::Optional::Dependencies
###
-
name 'DBIx-Class';
perl_version '5.008001';
all_from 'lib/DBIx/Class.pm';
@@ -37,7 +36,7 @@
my $runtime_requires = {
'Carp::Clan' => '6.0',
- 'Class::Accessor::Grouped' => '0.09002',
+ 'Class::Accessor::Grouped' => '0.09003',
'Class::C3::Componentised' => '1.0005',
'Class::Inspector' => '1.24',
'Data::Page' => '2.00',
@@ -45,7 +44,7 @@
'MRO::Compat' => '0.09',
'Module::Find' => '0.06',
'Path::Class' => '0.18',
- 'SQL::Abstract' => '1.64',
+ 'SQL::Abstract' => '1.66',
'SQL::Abstract::Limit' => '0.13',
'Sub::Name' => '0.04',
'Data::Dumper::Concise' => '1.000',
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/CDBICompat/ColumnCase.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/CDBICompat/ColumnCase.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/CDBICompat/ColumnCase.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -25,9 +25,15 @@
sub has_many {
my ($class, $rel, $f_class, $f_key, @rest) = @_;
- return $class->next::method($rel, $f_class, ( ref($f_key) ?
- $f_key :
- lc($f_key) ), @rest);
+ return $class->next::method(
+ $rel,
+ $f_class,
+ (ref($f_key) ?
+ $f_key :
+ lc($f_key||'')
+ ),
+ @rest
+ );
}
sub get_inflated_column {
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/InflateColumn/DateTime.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/InflateColumn/DateTime.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/InflateColumn/DateTime.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -132,7 +132,7 @@
$info->{_ic_dt_method} ||= "timestamp_without_timezone";
} elsif ($type eq "smalldatetime") {
$type = "datetime";
- $info->{_ic_dt_method} ||= "datetime";
+ $info->{_ic_dt_method} ||= "smalldatetime";
}
}
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Manual/Cookbook.pod
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Manual/Cookbook.pod 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Manual/Cookbook.pod 2010-05-05 09:52:30 UTC (rev 9307)
@@ -292,7 +292,7 @@
my $count = $rs->count;
# Equivalent SQL:
- # SELECT COUNT( * ) FROM (SELECT me.name FROM artist me GROUP BY me.name) count_subq:
+ # SELECT COUNT( * ) FROM (SELECT me.name FROM artist me GROUP BY me.name) me:
=head2 Grouping results
@@ -2084,6 +2084,47 @@
statement and dig down to see if certain parameters cause aberrant behavior.
You might want to check out L<DBIx::Class::QueryLog> as well.
+=head1 IMPROVING PERFORMANCE
+
+=over
+
+=item *
+
+Install L<Class::XSAccessor> to speed up L<Class::Accessor::Grouped>.
+
+=item *
+
+On Perl 5.8 install L<Class::C3::XS>.
+
+=item *
+
+L<prefetch|DBIx::Class::ResultSet/prefetch> relationships, where possible. See
+L</Using joins and prefetch>.
+
+=item *
+
+Use L<populate|DBIx::Class::ResultSet/populate> in void context to insert data
+when you don't need the resulting L<DBIx::Class::Row> objects, if possible, but
+see the caveats.
+
+When inserting many rows, for best results, populate a large number of rows at a
+time, but not so large that the table is locked for an unacceptably long time.
+
+If using L<create|DBIx::Class::ResultSet/create> instead, use a transaction and
+commit every C<X> rows; where C<X> gives you the best performance without
+locking the table for too long.
+
+=item *
+
+When selecting many rows, if you don't need full-blown L<DBIx::Class::Row>
+objects, consider using L<DBIx::Class::ResultClass::HashRefInflator>.
+
+=item *
+
+See also L</STARTUP SPEED> and L</MEMORY USAGE> in this document.
+
+=back
+
=head1 STARTUP SPEED
L<DBIx::Class|DBIx::Class> programs can have a significant startup delay
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/ResultSet.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/ResultSet.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -1235,11 +1235,12 @@
my $rsrc = $self->result_source;
$attrs ||= $self->_resolved_attrs;
- my $tmp_attrs = { %$attrs };
+ # only take pieces we need for a simple count
+ my $tmp_attrs = { map
+ { $_ => $attrs->{$_} }
+ qw/ alias from where bind join /
+ };
- # take off any limits, record_filter is cdbi, and no point of ordering a count
- delete $tmp_attrs->{$_} for (qw/select as rows offset order_by record_filter/);
-
# overwrite the selector (supplied by the storage)
$tmp_attrs->{select} = $rsrc->storage->_count_select ($rsrc, $tmp_attrs);
$tmp_attrs->{as} = 'count';
@@ -1256,37 +1257,48 @@
my ($self, $attrs) = @_;
my $rsrc = $self->result_source;
- $attrs ||= $self->_resolved_attrs_copy;
+ $attrs ||= $self->_resolved_attrs;
- my $sub_attrs = { %$attrs };
+ my $sub_attrs = { map
+ { $_ => $attrs->{$_} }
+ qw/ alias from where bind join group_by having rows offset /
+ };
- # extra selectors do not go in the subquery and there is no point of ordering it
- delete $sub_attrs->{$_} for qw/collapse select _prefetch_select as order_by/;
-
# if we multi-prefetch we group_by primary keys only as this is what we would
# get out of the rs via ->next/->all. We *DO WANT* to clobber old group_by regardless
if ( keys %{$attrs->{collapse}} ) {
$sub_attrs->{group_by} = [ map { "$attrs->{alias}.$_" } ($rsrc->_pri_cols) ]
}
- $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $attrs);
+ # Calculate subquery selector
+ if (my $g = $sub_attrs->{group_by}) {
+ # necessary as the group_by may refer to aliased functions
+ my $sel_index;
+ for my $sel (@{$attrs->{select}}) {
+ $sel_index->{$sel->{-as}} = $sel
+ if (ref $sel eq 'HASH' and $sel->{-as});
+ }
+
+ for my $g_part (@$g) {
+ push @{$sub_attrs->{select}}, $sel_index->{$g_part} || $g_part;
+ }
+ }
+ else {
+ my @pcols = map { "$attrs->{alias}.$_" } ($rsrc->primary_columns);
+ $sub_attrs->{select} = @pcols ? \@pcols : [ 1 ];
+ }
+
+
# this is so that the query can be simplified e.g.
# * ordering can be thrown away in things like Top limit
$sub_attrs->{-for_count_only} = 1;
- my $sub_rs = $rsrc->resultset_class->new ($rsrc, $sub_attrs);
-
- $attrs->{from} = [{
- -alias => 'count_subq',
- -source_handle => $rsrc->handle,
- count_subq => $sub_rs->as_query,
- }];
-
- # the subquery replaces this
- delete $attrs->{$_} for qw/where bind collapse group_by having having_bind rows offset/;
-
- return $self->_count_rs ($attrs);
+ return $rsrc->resultset_class
+ ->new ($rsrc, $sub_attrs)
+ ->as_subselect_rs
+ ->search ({}, { columns => { count => $rsrc->storage->_count_select ($rsrc, $attrs) } })
+ -> get_column ('count');
}
sub _bool {
@@ -2298,7 +2310,7 @@
producer => $producer,
name => 'harry',
}, {
- key => 'primary,
+ key => 'primary',
});
@@ -2668,15 +2680,18 @@
=cut
sub as_subselect_rs {
- my $self = shift;
+ my $self = shift;
- return $self->result_source->resultset->search( undef, {
- alias => $self->current_source_alias,
- from => [{
- $self->current_source_alias => $self->as_query,
- -alias => $self->current_source_alias,
- -source_handle => $self->result_source->handle,
- }]
+ my $attrs = $self->_resolved_attrs;
+
+ return $self->result_source->resultset->search( undef, {
+ from => [{
+ $attrs->{alias} => $self->as_query,
+ -alias => $attrs->{alias},
+ -source_handle => $self->result_source->handle,
+ }],
+ map { $_ => $attrs->{$_} } qw/select as alias/
+
});
}
@@ -2702,7 +2717,7 @@
# ->_resolve_join as otherwise they get lost - captainL
my $join = $self->_merge_attr( $attrs->{join}, $attrs->{prefetch} );
- delete @{$attrs}{qw/join prefetch collapse distinct select as columns +select +as +columns/};
+ delete @{$attrs}{qw/join prefetch collapse group_by distinct select as columns +select +as +columns/};
my $seen = { %{ (delete $attrs->{seen_join}) || {} } };
@@ -2728,7 +2743,7 @@
-alias => $attrs->{alias},
$attrs->{alias} => $rs_copy->as_query,
}];
- delete @{$attrs}{@force_subq_attrs, 'where'};
+ delete @{$attrs}{@force_subq_attrs, qw/where bind/};
$seen->{-relation_chain_depth} = 0;
}
elsif ($attrs->{from}) { #shallow copy suffices
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Row.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Row.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Row.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -106,10 +106,10 @@
sub __new_related_find_or_new_helper {
my ($self, $relname, $data) = @_;
+ my $rsrc = $self->result_source;
+
# create a mock-object so all new/set_column component overrides will run:
- my $rel_rs = $self->result_source
- ->related_source($relname)
- ->resultset;
+ my $rel_rs = $rsrc->related_source($relname)->resultset;
my $new_rel_obj = $rel_rs->new_result($data);
my $proc_data = { $new_rel_obj->get_columns };
@@ -117,7 +117,7 @@
MULTICREATE_DEBUG and warn "MC $self constructing $relname via new_result";
return $new_rel_obj;
}
- elsif ($self->result_source->_pk_depends_on($relname, $proc_data )) {
+ elsif ($rsrc->_pk_depends_on($relname, $proc_data )) {
if (! keys %$proc_data) {
# there is nothing to search for - blind create
MULTICREATE_DEBUG and warn "MC $self constructing default-insert $relname";
@@ -132,7 +132,7 @@
return $new_rel_obj;
}
else {
- my $us = $self->source_name;
+ my $us = $rsrc->source_name;
$self->throw_exception ("'$us' neither depends nor is depended on by '$relname', something is wrong...");
}
}
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/OracleJoins.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/OracleJoins.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/OracleJoins.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -5,13 +5,13 @@
use Carp::Clan qw/^DBIx::Class|^SQL::Abstract/;
sub select {
- my ($self, $table, $fields, $where, $order, @rest) = @_;
+ my ($self, $table, $fields, $where, $rs_attrs, @rest) = @_;
if (ref($table) eq 'ARRAY') {
$where = $self->_oracle_joins($where, @{ $table });
}
- return $self->SUPER::select($table, $fields, $where, $order, @rest);
+ return $self->SUPER::select($table, $fields, $where, $rs_attrs, @rest);
}
sub _recurse_from {
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/SQLite.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/SQLite.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks/SQLite.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -6,12 +6,16 @@
#
# SQLite does not understand SELECT ... FOR UPDATE
-# Adjust SQL here instead
+# Disable it here
#
-sub select {
- my $self = shift;
- local $self->{_dbic_rs_attrs}{for} = undef;
- return $self->SUPER::select (@_);
+sub _parse_rs_attrs {
+ my ($self, $attrs) = @_;
+
+ return $self->SUPER::_parse_rs_attrs ($attrs)
+ if ref $attrs ne 'HASH';
+
+ local $attrs->{for};
+ return $self->SUPER::_parse_rs_attrs ($attrs);
}
1;
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/SQLAHacks.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -46,32 +46,168 @@
$self;
}
+# !!! THIS IS ALSO HORRIFIC !!! /me ashamed
+#
+# generate inner/outer select lists for various limit dialects
+# which result in one or more subqueries (e.g. RNO, Top, RowNum)
+# Any non-root-table columns need to have their table qualifier
+# turned into a column alias (otherwise names in subqueries clash
+# and/or lose their source table)
+#
+# returns inner/outer strings of SQL QUOTED selectors with aliases
+# (to be used in whatever select statement), and an alias index hashref
+# of QUOTED SEL => QUOTED ALIAS pairs (to maybe be used for string-subst
+# higher up)
+#
+# If the $scan_order option is supplied, it signals that the limit dialect
+# needs to order the outer side of the query, which in turn means that the
+# inner select needs to bring out columns used in implicit (non-selected)
+# orders, and the order condition itself needs to be realiased to the proper
+# names in the outer query.
+#
+# In this case ($scan_order os true) we also return a hashref (order doesn't
+# matter) of QUOTED EXTRA-SEL => QUOTED ALIAS pairs, which is a list of extra
+# selectors that do *not* exist in the original select list
-# ANSI standard Limit/Offset implementation. DB2 and MSSQL use this
+sub _subqueried_limit_attrs {
+ my ($self, $rs_attrs, $scan_order) = @_;
+
+ croak 'Limit dialect implementation usable only in the context of DBIC (missing $rs_attrs)'
+ unless ref ($rs_attrs) eq 'HASH';
+
+ my ($re_sep, $re_alias) = map { quotemeta $_ } (
+ $self->name_sep || '.',
+ $rs_attrs->{alias},
+ );
+
+ # correlate select and as, build selection index
+ my (@sel, $in_sel_index);
+ for my $i (0 .. $#{$rs_attrs->{select}}) {
+
+ my $s = $rs_attrs->{select}[$i];
+ my $sql_sel = $self->_recurse_fields ($s);
+ my $sql_alias = (ref $s) eq 'HASH' ? $s->{-as} : undef;
+
+
+ push @sel, {
+ sql => $sql_sel,
+ unquoted_sql => do { local $self->{quote_char}; $self->_recurse_fields ($s) },
+ as =>
+ $sql_alias
+ ||
+ $rs_attrs->{as}[$i]
+ ||
+ croak "Select argument $i ($s) without corresponding 'as'"
+ ,
+ };
+
+ $in_sel_index->{$sql_sel}++;
+ $in_sel_index->{$self->_quote ($sql_alias)}++ if $sql_alias;
+
+# this *may* turn out to be necessary, not sure yet
+# my ($sql_unqualified_sel) = $sql_sel =~ / $re_sep (.+) $/x
+# if ! ref $s;
+# $in_sel_index->{$sql_unqualified_sel}++;
+ }
+
+
+ # re-alias and remove any name separators from aliases,
+ # unless we are dealing with the current source alias
+ # (which will transcend the subqueries as it is necessary
+ # for possible further chaining)
+ my (@in_sel, @out_sel, %renamed);
+ for my $node (@sel) {
+ if (List::Util::first { $_ =~ / (?<! $re_alias ) $re_sep /x } ($node->{as}, $node->{unquoted_sql}) ) {
+ $node->{as} =~ s/ $re_sep /__/xg;
+ my $quoted_as = $self->_quote($node->{as});
+ push @in_sel, sprintf '%s AS %s', $node->{sql}, $quoted_as;
+ push @out_sel, $quoted_as;
+ $renamed{$node->{sql}} = $quoted_as;
+ }
+ else {
+ push @in_sel, $node->{sql};
+ push @out_sel, $self->_quote ($node->{as});
+ }
+ }
+
+ my %extra_order_sel;
+ if ($scan_order) {
+ for my $chunk ($self->_order_by_chunks ($rs_attrs->{order_by})) {
+ # order with bind
+ $chunk = $chunk->[0] if (ref $chunk) eq 'ARRAY';
+ $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
+
+ next if $in_sel_index->{$chunk};
+
+ $extra_order_sel{$chunk} ||= $self->_quote (
+ 'ORDER__BY__' . scalar keys %extra_order_sel
+ );
+ }
+ }
+ return (
+ (map { join (', ', @$_ ) } (
+ \@in_sel,
+ \@out_sel)
+ ),
+ \%renamed,
+ keys %extra_order_sel ? \%extra_order_sel : (),
+ );
+}
+
+# ANSI standard Limit/Offset implementation. DB2 and MSSQL >= 2005 use this
sub _RowNumberOver {
- my ($self, $sql, $order, $rows, $offset ) = @_;
+ my ($self, $sql, $rs_attrs, $rows, $offset ) = @_;
- # get the select to make the final amount of columns equal the original one
- my ($select) = $sql =~ /^ \s* SELECT \s+ (.+?) \s+ FROM/ix
+ # mangle the input sql as we will be replacing the selector
+ $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
or croak "Unrecognizable SELECT: $sql";
- # get the order_by only (or make up an order if none exists)
- my $order_by = $self->_order_by(
- (delete $order->{order_by}) || $self->_rno_default_order
+ # get selectors, and scan the order_by (if any)
+ my ($in_sel, $out_sel, $alias_map, $extra_order_sel) = $self->_subqueried_limit_attrs (
+ $rs_attrs, 'scan_order_by',
);
- # whatever is left of the order_by
- my $group_having = $self->_order_by($order);
+ # make up an order if none exists
+ my $requested_order = (delete $rs_attrs->{order_by}) || $self->_rno_default_order;
+ my $rno_ord = $self->_order_by ($requested_order);
- my $qalias = $self->_quote ($self->{_dbic_rs_attrs}{alias});
+ # this is the order supplement magic
+ my $mid_sel = $out_sel;
+ if ($extra_order_sel) {
+ for my $extra_col (sort
+ { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+ keys %$extra_order_sel
+ ) {
+ $in_sel .= sprintf (', %s AS %s',
+ $extra_col,
+ $extra_order_sel->{$extra_col},
+ );
+ $mid_sel .= ', ' . $extra_order_sel->{$extra_col};
+ }
+ }
+
+ # and this is order re-alias magic
+ for ($extra_order_sel, $alias_map) {
+ for my $col (keys %$_) {
+ my $re_col = quotemeta ($col);
+ $rno_ord =~ s/$re_col/$_->{$col}/;
+ }
+ }
+
+ # whatever is left of the order_by (only where is processed at this point)
+ my $group_having = $self->_parse_rs_attrs($rs_attrs);
+
+ my $qalias = $self->_quote ($rs_attrs->{alias});
+ my $idx_name = $self->_quote ('rno__row__index');
+
$sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
-SELECT $select FROM (
- SELECT $qalias.*, ROW_NUMBER() OVER($order_by ) AS rno__row__index FROM (
- ${sql}${group_having}
+SELECT $out_sel FROM (
+ SELECT $mid_sel, ROW_NUMBER() OVER( $rno_ord ) AS $idx_name FROM (
+ SELECT $in_sel ${sql}${group_having}
) $qalias
-) $qalias WHERE rno__row__index BETWEEN %d AND %d
+) $qalias WHERE $idx_name BETWEEN %d AND %d
EOS
@@ -86,7 +222,7 @@
# Informix specific limit, almost like LIMIT/OFFSET
sub _SkipFirst {
- my ($self, $sql, $order, $rows, $offset) = @_;
+ my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
$sql =~ s/^ \s* SELECT \s+ //ix
or croak "Unrecognizable SELECT: $sql";
@@ -98,13 +234,13 @@
,
sprintf ('FIRST %d ', $rows),
$sql,
- $self->_order_by ($order),
+ $self->_parse_rs_attrs ($rs_attrs),
);
}
# Firebird specific limit, reverse of _SkipFirst for Informix
sub _FirstSkip {
- my ($self, $sql, $order, $rows, $offset) = @_;
+ my ($self, $sql, $rs_attrs, $rows, $offset) = @_;
$sql =~ s/^ \s* SELECT \s+ //ix
or croak "Unrecognizable SELECT: $sql";
@@ -116,203 +252,141 @@
: ''
,
$sql,
- $self->_order_by ($order),
+ $self->_parse_rs_attrs ($rs_attrs),
);
}
-# Crappy Top based Limit/Offset support. Legacy from MSSQL.
-sub _Top {
- my ( $self, $sql, $order, $rows, $offset ) = @_;
+# WhOracle limits
+sub _RowNum {
+ my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_;
- # mangle the input sql so it can be properly aliased in the outer queries
- $sql =~ s/^ \s* SELECT \s+ (.+?) \s+ (?=FROM)//ix
+ # mangle the input sql as we will be replacing the selector
+ $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
or croak "Unrecognizable SELECT: $sql";
- my $sql_select = $1;
- my @sql_select = split (/\s*,\s*/, $sql_select);
- # we can't support subqueries (in fact MSSQL can't) - croak
- if (@sql_select != @{$self->{_dbic_rs_attrs}{select}}) {
- croak (sprintf (
- 'SQL SELECT did not parse cleanly - retrieved %d comma separated elements, while '
- . 'the resultset select attribure contains %d elements: %s',
- scalar @sql_select,
- scalar @{$self->{_dbic_rs_attrs}{select}},
- $sql_select,
- ));
- }
+ my ($insel, $outsel) = $self->_subqueried_limit_attrs ($rs_attrs);
- my $name_sep = $self->name_sep || '.';
- my $esc_name_sep = "\Q$name_sep\E";
- my $col_re = qr/ ^ (?: (.+) $esc_name_sep )? ([^$esc_name_sep]+) $ /x;
+ my $qalias = $self->_quote ($rs_attrs->{alias});
+ my $idx_name = $self->_quote ('rownum__index');
+ my $order_group_having = $self->_parse_rs_attrs($rs_attrs);
- my $rs_alias = $self->{_dbic_rs_attrs}{alias};
- my $quoted_rs_alias = $self->_quote ($rs_alias);
+ $sql = sprintf (<<EOS, $offset + 1, $offset + $rows, );
- # construct the new select lists, rename(alias) some columns if necessary
- my (@outer_select, @inner_select, %seen_names, %col_aliases, %outer_col_aliases);
+SELECT $outsel FROM (
+ SELECT $outsel, ROWNUM $idx_name FROM (
+ SELECT $insel ${sql}${order_group_having}
+ ) $qalias
+) $qalias WHERE $idx_name BETWEEN %d AND %d
- for (@{$self->{_dbic_rs_attrs}{select}}) {
- next if ref $_;
- my ($table, $orig_colname) = ( $_ =~ $col_re );
- next unless $table;
- $seen_names{$orig_colname}++;
- }
+EOS
- for my $i (0 .. $#sql_select) {
+ $sql =~ s/\s*\n\s*/ /g; # easier to read in the debugger
+ return $sql;
+}
- my $colsel_arg = $self->{_dbic_rs_attrs}{select}[$i];
- my $colsel_sql = $sql_select[$i];
+# Crappy Top based Limit/Offset support. Legacy for MSSQL < 2005
+sub _Top {
+ my ( $self, $sql, $rs_attrs, $rows, $offset ) = @_;
- # this may or may not work (in case of a scalarref or something)
- my ($table, $orig_colname) = ( $colsel_arg =~ $col_re );
+ # mangle the input sql as we will be replacing the selector
+ $sql =~ s/^ \s* SELECT \s+ .+? \s+ (?= \b FROM \b )//ix
+ or croak "Unrecognizable SELECT: $sql";
- my $quoted_alias;
- # do not attempt to understand non-scalar selects - alias numerically
- if (ref $colsel_arg) {
- $quoted_alias = $self->_quote ('column_' . (@inner_select + 1) );
- }
- # column name seen more than once - alias it
- elsif ($orig_colname &&
- ($seen_names{$orig_colname} && $seen_names{$orig_colname} > 1) ) {
- $quoted_alias = $self->_quote ("${table}__${orig_colname}");
- }
+ # get selectors
+ my ($in_sel, $out_sel, $alias_map, $extra_order_sel)
+ = $self->_subqueried_limit_attrs ($rs_attrs, 'outer_order_by');
- # we did rename - make a record and adjust
- if ($quoted_alias) {
- # alias inner
- push @inner_select, "$colsel_sql AS $quoted_alias";
+ my $requested_order = delete $rs_attrs->{order_by};
- # push alias to outer
- push @outer_select, $quoted_alias;
+ my $order_by_requested = $self->_order_by ($requested_order);
- # Any aliasing accumulated here will be considered
- # both for inner and outer adjustments of ORDER BY
- $self->__record_alias (
- \%col_aliases,
- $quoted_alias,
- $colsel_arg,
- $table ? $orig_colname : undef,
- );
- }
+ # make up an order unless supplied
+ my $inner_order = ($order_by_requested
+ ? $requested_order
+ : [ map
+ { join ('', $rs_attrs->{alias}, $self->{name_sep}||'.', $_ ) }
+ ( $rs_attrs->{_rsroot_source_handle}->resolve->_pri_cols )
+ ]
+ );
- # otherwise just leave things intact inside, and use the abbreviated one outside
- # (as we do not have table names anymore)
- else {
- push @inner_select, $colsel_sql;
+ my ($order_by_inner, $order_by_reversed);
- my $outer_quoted = $self->_quote ($orig_colname); # it was not a duplicate so should just work
- push @outer_select, $outer_quoted;
- $self->__record_alias (
- \%outer_col_aliases,
- $outer_quoted,
- $colsel_arg,
- $table ? $orig_colname : undef,
- );
- }
- }
+ # localise as we already have all the bind values we need
+ {
+ local $self->{order_bind};
+ $order_by_inner = $self->_order_by ($inner_order);
- my $outer_select = join (', ', @outer_select );
- my $inner_select = join (', ', @inner_select );
+ my @out_chunks;
+ for my $ch ($self->_order_by_chunks ($inner_order)) {
+ $ch = $ch->[0] if ref $ch eq 'ARRAY';
- %outer_col_aliases = (%outer_col_aliases, %col_aliases);
+ $ch =~ s/\s+ ( ASC|DESC ) \s* $//ix;
+ my $dir = uc ($1||'ASC');
- # deal with order
- croak '$order supplied to SQLAHacks limit emulators must be a hash'
- if (ref $order ne 'HASH');
+ push @out_chunks, \join (' ', $ch, $dir eq 'ASC' ? 'DESC' : 'ASC' );
+ }
- $order = { %$order }; #copy
-
- my $req_order = $order->{order_by};
-
- # examine normalized version, collapses nesting
- my $limit_order;
- if (scalar $self->_order_by_chunks ($req_order)) {
- $limit_order = $req_order;
+ $order_by_reversed = $self->_order_by (\@out_chunks);
}
- else {
- $limit_order = [ map
- { join ('', $rs_alias, $name_sep, $_ ) }
- ( $self->{_dbic_rs_attrs}{_source_handle}->resolve->primary_columns )
- ];
- }
- my ( $order_by_inner, $order_by_outer ) = $self->_order_directions($limit_order);
- my $order_by_requested = $self->_order_by ($req_order);
+ # this is the order supplement magic
+ my $mid_sel = $out_sel;
+ if ($extra_order_sel) {
+ for my $extra_col (sort
+ { $extra_order_sel->{$a} cmp $extra_order_sel->{$b} }
+ keys %$extra_order_sel
+ ) {
+ $in_sel .= sprintf (', %s AS %s',
+ $extra_col,
+ $extra_order_sel->{$extra_col},
+ );
- # generate the rest
- delete $order->{order_by};
- my $grpby_having = $self->_order_by ($order);
-
- # short circuit for counts - the ordering complexity is needless
- if ($self->{_dbic_rs_attrs}{-for_count_only}) {
- return "SELECT TOP $rows $inner_select $sql $grpby_having $order_by_outer";
+ $mid_sel .= ', ' . $extra_order_sel->{$extra_col};
+ }
}
- # we can't really adjust the order_by columns, as introspection is lacking
- # resort to simple substitution
- for my $col (keys %outer_col_aliases) {
- for ($order_by_requested, $order_by_outer) {
- $_ =~ s/\s+$col\s+/ $outer_col_aliases{$col} /g;
+ # and this is order re-alias magic
+ for my $map ($extra_order_sel, $alias_map) {
+ for my $col (keys %$map) {
+ my $re_col = quotemeta ($col);
+ $_ =~ s/$re_col/$map->{$col}/
+ for ($order_by_reversed, $order_by_requested);
}
}
- for my $col (keys %col_aliases) {
- $order_by_inner =~ s/\s+$col\s+/ $col_aliases{$col} /g;
- }
+ # generate the rest of the sql
+ my $grpby_having = $self->_parse_rs_attrs ($rs_attrs);
- my $inner_lim = $rows + $offset;
+ my $quoted_rs_alias = $self->_quote ($rs_attrs->{alias});
- $sql = "SELECT TOP $inner_lim $inner_select $sql $grpby_having $order_by_inner";
+ $sql = sprintf ('SELECT TOP %d %s %s %s %s',
+ $rows + ($offset||0),
+ $in_sel,
+ $sql,
+ $grpby_having,
+ $order_by_inner,
+ );
- if ($offset) {
- $sql = <<"SQL";
+ $sql = sprintf ('SELECT TOP %d %s FROM ( %s ) %s %s',
+ $rows,
+ $mid_sel,
+ $sql,
+ $quoted_rs_alias,
+ $order_by_reversed,
+ ) if $offset;
- SELECT TOP $rows $outer_select FROM
- (
- $sql
- ) $quoted_rs_alias
- $order_by_outer
-SQL
+ $sql = sprintf ('SELECT TOP %d %s FROM ( %s ) %s %s',
+ $rows,
+ $out_sel,
+ $sql,
+ $quoted_rs_alias,
+ $order_by_requested,
+ ) if ( ($offset && $order_by_requested) || ($mid_sel ne $out_sel) );
- }
-
- if ($order_by_requested) {
- $sql = <<"SQL";
-
- SELECT $outer_select FROM
- ( $sql ) $quoted_rs_alias
- $order_by_requested
-SQL
-
- }
-
- $sql =~ s/\s*\n\s*/ /g; # parsing out multiline statements is harder than a single line
return $sql;
}
-# action at a distance to shorten Top code above
-sub __record_alias {
- my ($self, $register, $alias, $fqcol, $col) = @_;
- # record qualified name
- $register->{$fqcol} = $alias;
- $register->{$self->_quote($fqcol)} = $alias;
-
- return unless $col;
-
- # record unqualified name, undef (no adjustment) if a duplicate is found
- if (exists $register->{$col}) {
- $register->{$col} = undef;
- }
- else {
- $register->{$col} = $alias;
- }
-
- $register->{$self->_quote($col)} = $register->{$col};
-}
-
-
-
# While we're at it, this should make LIMIT queries more efficient,
# without digging into things too deeply
sub _find_syntax {
@@ -320,14 +394,10 @@
return $self->{_cached_syntax} ||= $self->SUPER::_find_syntax($syntax);
}
-my $for_syntax = {
- update => 'FOR UPDATE',
- shared => 'FOR SHARE',
-};
# Quotes table names, handles "limit" dialects (e.g. where rownum between x and
-# y), supports SELECT ... FOR UPDATE and SELECT ... FOR SHARE.
+# y)
sub select {
- my ($self, $table, $fields, $where, $order, @rest) = @_;
+ my ($self, $table, $fields, $where, $rs_attrs, @rest) = @_;
$self->{"${_}_bind"} = [] for (qw/having from order/);
@@ -340,13 +410,10 @@
@rest = (-1) unless defined $rest[0];
croak "LIMIT 0 Does Not Compute" if $rest[0] == 0;
# and anyway, SQL::Abstract::Limit will cause a barf if we don't first
+
my ($sql, @where_bind) = $self->SUPER::select(
- $table, $self->_recurse_fields($fields), $where, $order, @rest
+ $table, $self->_recurse_fields($fields), $where, $rs_attrs, @rest
);
- if (my $for = delete $self->{_dbic_rs_attrs}{for}) {
- $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
- }
-
return wantarray ? ($sql, @{$self->{from_bind}}, @where_bind, @{$self->{having_bind}}, @{$self->{order_bind}} ) : $sql;
}
@@ -390,35 +457,36 @@
sub _emulate_limit {
my $self = shift;
+ # my ( $syntax, $sql, $order, $rows, $offset ) = @_;
+
if ($_[3] == -1) {
- return $_[1].$self->_order_by($_[2]);
+ return $_[1] . $self->_parse_rs_attrs($_[2]);
} else {
return $self->SUPER::_emulate_limit(@_);
}
}
sub _recurse_fields {
- my ($self, $fields, $params) = @_;
+ my ($self, $fields) = @_;
my $ref = ref $fields;
return $self->_quote($fields) unless $ref;
return $$fields if $ref eq 'SCALAR';
if ($ref eq 'ARRAY') {
- return join(', ', map {
- $self->_recurse_fields($_)
- .(exists $self->{rownum_hack_count} && !($params && $params->{no_rownum_hack})
- ? ' AS col'.$self->{rownum_hack_count}++
- : '')
- } @$fields);
+ return join(', ', map { $self->_recurse_fields($_) } @$fields);
}
elsif ($ref eq 'HASH') {
- my %hash = %$fields;
+ my %hash = %$fields; # shallow copy
my $as = delete $hash{-as}; # if supplied
- my ($func, $args) = each %hash;
- delete $hash{$func};
+ my ($func, $args, @toomany) = %hash;
+ # there should be only one pair
+ if (@toomany) {
+ croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
+ }
+
if (lc ($func) eq 'distinct' && ref $args eq 'ARRAY' && @$args > 1) {
croak (
'The select => { distinct => ... } syntax is not supported for multiple columns.'
@@ -435,11 +503,6 @@
: ''
);
- # there should be nothing left
- if (keys %hash) {
- croak "Malformed select argument - too many keys in hash: " . join (',', keys %$fields );
- }
-
return $select;
}
# Is the second check absolutely necessary?
@@ -451,34 +514,55 @@
}
}
-sub _order_by {
+my $for_syntax = {
+ update => 'FOR UPDATE',
+ shared => 'FOR SHARE',
+};
+
+# this used to be a part of _order_by but is broken out for clarity.
+# What we have been doing forever is hijacking the $order arg of
+# SQLA::select to pass in arbitrary pieces of data (first the group_by,
+# then pretty much the entire resultset attr-hash, as more and more
+# things in the SQLA space need to have mopre info about the $rs they
+# create SQL for. The alternative would be to keep expanding the
+# signature of _select with more and more positional parameters, which
+# is just gross. All hail SQLA2!
+sub _parse_rs_attrs {
my ($self, $arg) = @_;
- if (ref $arg eq 'HASH' and keys %$arg and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) {
+ my $sql = '';
- my $ret = '';
+ if (my $g = $self->_recurse_fields($arg->{group_by}, { no_rownum_hack => 1 }) ) {
+ $sql .= $self->_sqlcase(' group by ') . $g;
+ }
- if (my $g = $self->_recurse_fields($arg->{group_by}, { no_rownum_hack => 1 }) ) {
- $ret = $self->_sqlcase(' group by ') . $g;
- }
+ if (defined $arg->{having}) {
+ my ($frag, @bind) = $self->_recurse_where($arg->{having});
+ push(@{$self->{having_bind}}, @bind);
+ $sql .= $self->_sqlcase(' having ') . $frag;
+ }
- if (defined $arg->{having}) {
- my ($frag, @bind) = $self->_recurse_where($arg->{having});
- push(@{$self->{having_bind}}, @bind);
- $ret .= $self->_sqlcase(' having ').$frag;
- }
+ if (defined $arg->{order_by}) {
+ $sql .= $self->_order_by ($arg->{order_by});
+ }
- if (defined $arg->{order_by}) {
- my ($frag, @bind) = $self->SUPER::_order_by($arg->{order_by});
- push(@{$self->{order_bind}}, @bind);
- $ret .= $frag;
- }
+ if (my $for = $arg->{for}) {
+ $sql .= " $for_syntax->{$for}" if $for_syntax->{$for};
+ }
- return $ret;
+ return $sql;
+}
+
+sub _order_by {
+ my ($self, $arg) = @_;
+
+ # check that we are not called in legacy mode (order_by as 4th argument)
+ if (ref $arg eq 'HASH' and not grep { $_ =~ /^-(?:desc|asc)/i } keys %$arg ) {
+ return $self->_parse_rs_attrs ($arg);
}
else {
my ($sql, @bind) = $self->SUPER::_order_by ($arg);
- push(@{$self->{order_bind}}, @bind);
+ push @{$self->{order_bind}}, @bind;
return $sql;
}
}
@@ -596,26 +680,12 @@
}
}
-sub _quote {
- my ($self, $label) = @_;
- return '' unless defined $label;
- return $$label if ref($label) eq 'SCALAR';
- return "*" if $label eq '*';
- return $label unless $self->{quote_char};
- if(ref $self->{quote_char} eq "ARRAY"){
- return $self->{quote_char}->[0] . $label . $self->{quote_char}->[1]
- if !defined $self->{name_sep};
- my $sep = $self->{name_sep};
- return join($self->{name_sep},
- map { $self->{quote_char}->[0] . $_ . $self->{quote_char}->[1] }
- split(/\Q$sep\E/,$label));
- }
- return $self->SUPER::_quote($label);
-}
-
sub limit_dialect {
my $self = shift;
- $self->{limit_dialect} = shift if @_;
+ if (@_) {
+ $self->{limit_dialect} = shift;
+ undef $self->{_cached_syntax};
+ }
return $self->{limit_dialect};
}
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Schema.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Schema.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Schema.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -271,6 +271,10 @@
}
elsif($rs_class ||= $default_resultset_class) {
$class->ensure_class_loaded($rs_class);
+ if(!$rs_class->isa("DBIx::Class::ResultSet")) {
+ carp "load_namespaces found ResultSet class $rs_class that does not subclass DBIx::Class::ResultSet";
+ }
+
$class->_ns_get_rsrc_instance ($result_class)->resultset_class($rs_class);
}
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/MSSQL.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/MSSQL.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -158,7 +158,11 @@
# see if this is an ordered subquery
my $attrs = $_[3];
- if ( scalar $self->_parse_order_by ($attrs->{order_by}) ) {
+ if (
+ $sql !~ /^ \s* SELECT \s+ TOP \s+ \d+ \s+ /xi
+ &&
+ scalar $self->_parse_order_by ($attrs->{order_by})
+ ) {
$self->throw_exception(
'An ordered subselect encountered - this is not safe! Please see "Ordered Subselects" in DBIx::Class::Storage::DBI::MSSQL
') unless $attrs->{unsafe_subselect_ok};
@@ -190,13 +194,9 @@
$self->_get_dbh->do("ROLLBACK TRANSACTION $name");
}
-sub build_datetime_parser {
- my $self = shift;
- my $type = "DateTime::Format::Strptime";
- eval "use ${type}";
- $self->throw_exception("Couldn't load ${type}: $@") if $@;
- return $type->new( pattern => '%Y-%m-%d %H:%M:%S' ); # %F %T
-}
+sub datetime_parser_type {
+ 'DBIx::Class::Storage::DBI::MSSQL::DateTime::Format'
+}
sub sqlt_type { 'SQLServer' }
@@ -235,6 +235,54 @@
return $@ ? 0 : 1;
}
+package # hide from PAUSE
+ DBIx::Class::Storage::DBI::MSSQL::DateTime::Format;
+
+my $datetime_format = '%Y-%m-%d %H:%M:%S.%3N'; # %F %T
+my $smalldatetime_format = '%Y-%m-%d %H:%M:%S';
+
+my ($datetime_parser, $smalldatetime_parser);
+
+sub parse_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $datetime_parser ||= DateTime::Format::Strptime->new(
+ pattern => $datetime_format,
+ on_error => 'croak',
+ );
+ return $datetime_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $datetime_parser ||= DateTime::Format::Strptime->new(
+ pattern => $datetime_format,
+ on_error => 'croak',
+ );
+ return $datetime_parser->format_datetime(shift);
+}
+
+sub parse_smalldatetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $smalldatetime_parser ||= DateTime::Format::Strptime->new(
+ pattern => $smalldatetime_format,
+ on_error => 'croak',
+ );
+ return $smalldatetime_parser->parse_datetime(shift);
+}
+
+sub format_smalldatetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $smalldatetime_parser ||= DateTime::Format::Strptime->new(
+ pattern => $smalldatetime_format,
+ on_error => 'croak',
+ );
+ return $smalldatetime_parser->format_datetime(shift);
+}
+
1;
=head1 NAME
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Replicated.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Replicated.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Replicated.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -308,7 +308,6 @@
is_datatype_numeric
_supports_insert_returning
_count_select
- _subq_count_select
_subq_update_delete
svp_rollback
svp_begin
@@ -343,7 +342,6 @@
_dbh_commit
_execute_array
_placeholders_supported
- _verify_pid
savepoints
_sqlt_minimum_version
_sql_maker_opts
@@ -371,7 +369,19 @@
/],
);
+my @unimplemented = qw(
+ _arm_global_destructor
+ _preserve_foreign_dbh
+ _verify_pid
+ _verify_tid
+);
+for my $method (@unimplemented) {
+ __PACKAGE__->meta->add_method($method, sub {
+ croak "$method must not be called on ".(blessed shift).' objects';
+ });
+}
+
has _master_connect_info_opts =>
(is => 'rw', isa => HashRef, default => sub { {} });
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -8,6 +8,7 @@
DBIx::Class::Storage::DBI::MSSQL
/;
use mro 'c3';
+use Carp::Clan qw/^DBIx::Class/;
sub _rebless {
my $self = shift;
@@ -70,6 +71,73 @@
}
}
+=head2 connect_call_datetime_setup
+
+Used as:
+
+ on_connect_call => 'datetime_setup'
+
+In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set:
+
+ $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z
+
+On connection for use with L<DBIx::Class::InflateColumn::DateTime>
+
+This works for both C<DATETIME> and C<SMALLDATETIME> columns, although
+C<SMALLDATETIME> columns only have minute precision.
+
+=cut
+
+{
+ my $old_dbd_warned = 0;
+
+ sub connect_call_datetime_setup {
+ my $self = shift;
+ my $dbh = $self->_get_dbh;
+
+ if ($dbh->can('syb_date_fmt')) {
+ # amazingly, this works with FreeTDS
+ $dbh->syb_date_fmt('ISO_strict');
+ } elsif (not $old_dbd_warned) {
+ carp "Your DBD::Sybase is too old to support ".
+ "DBIx::Class::InflateColumn::DateTime, please upgrade!";
+ $old_dbd_warned = 1;
+ }
+ }
+}
+
+sub datetime_parser_type {
+ 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::DateTime::Format'
+}
+
+package # hide from PAUSE
+ DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::DateTime::Format;
+
+my $datetime_parse_format = '%Y-%m-%dT%H:%M:%S.%3NZ';
+my $datetime_format_format = '%Y-%m-%d %H:%M:%S.%3N'; # %F %T
+
+my ($datetime_parser, $datetime_formatter);
+
+sub parse_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $datetime_parser ||= DateTime::Format::Strptime->new(
+ pattern => $datetime_parse_format,
+ on_error => 'croak',
+ );
+ return $datetime_parser->parse_datetime(shift);
+}
+
+sub format_datetime {
+ shift;
+ require DateTime::Format::Strptime;
+ $datetime_formatter ||= DateTime::Format::Strptime->new(
+ pattern => $datetime_format_format,
+ on_error => 'croak',
+ );
+ return $datetime_formatter->format_datetime(shift);
+}
+
1;
=head1 NAME
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBI.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -16,6 +16,8 @@
use Data::Dumper::Concise();
use Sub::Name ();
+use File::Path ();
+
__PACKAGE__->mk_group_accessors('simple' =>
qw/_connect_info _dbi_connect_info _dbh _sql_maker _sql_maker_opts _conn_pid
_conn_tid transaction_depth _dbh_autocommit _driver_determined savepoints
@@ -40,7 +42,6 @@
/);
__PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks');
-
# Each of these methods need _determine_driver called before itself
# in order to function reliably. This is a purely DRY optimization
my @rdbms_specific_methods = qw/
@@ -115,9 +116,102 @@
$new->{_in_dbh_do} = 0;
$new->{_dbh_gen} = 0;
+ # read below to see what this does
+ $new->_arm_global_destructor;
+
$new;
}
+# This is hack to work around perl shooting stuff in random
+# order on exit(). If we do not walk the remaining storage
+# objects in an END block, there is a *small but real* chance
+# of a fork()ed child to kill the parent's shared DBI handle,
+# *before perl reaches the DESTROY in this package*
+# Yes, it is ugly and effective.
+{
+ my %seek_and_destroy;
+
+ sub _arm_global_destructor {
+ my $self = shift;
+ my $key = Scalar::Util::refaddr ($self);
+ $seek_and_destroy{$key} = $self;
+ Scalar::Util::weaken ($seek_and_destroy{$key});
+ }
+
+ END {
+ local $?; # just in case the DBI destructor changes it somehow
+
+ # destroy just the object if not native to this process/thread
+ $_->_preserve_foreign_dbh for (grep
+ { defined $_ }
+ values %seek_and_destroy
+ );
+ }
+}
+
+sub DESTROY {
+ my $self = shift;
+
+ # destroy just the object if not native to this process/thread
+ $self->_preserve_foreign_dbh;
+
+ # some databases need this to stop spewing warnings
+ if (my $dbh = $self->_dbh) {
+ local $@;
+ eval {
+ %{ $dbh->{CachedKids} } = ();
+ $dbh->disconnect;
+ };
+ }
+
+ $self->_dbh(undef);
+}
+
+sub _preserve_foreign_dbh {
+ my $self = shift;
+
+ return unless $self->_dbh;
+
+ $self->_verify_tid;
+
+ return unless $self->_dbh;
+
+ $self->_verify_pid;
+
+}
+
+# handle pid changes correctly - do not destroy parent's connection
+sub _verify_pid {
+ my $self = shift;
+
+ return if ( defined $self->_conn_pid and $self->_conn_pid == $$ );
+
+ $self->_dbh->{InactiveDestroy} = 1;
+ $self->_dbh(undef);
+ $self->{_dbh_gen}++;
+
+ return;
+}
+
+# very similar to above, but seems to FAIL if I set InactiveDestroy
+sub _verify_tid {
+ my $self = shift;
+
+ if ( ! defined $self->_conn_tid ) {
+ return; # no threads
+ }
+ elsif ( $self->_conn_tid == threads->tid ) {
+ return; # same thread
+ }
+
+ #$self->_dbh->{InactiveDestroy} = 1; # why does t/51threads.t fail...?
+ $self->_dbh(undef);
+ $self->{_dbh_gen}++;
+
+ return;
+}
+
+
=head2 connect_info
This method is normally called by L<DBIx::Class::Schema/connection>, which
@@ -803,19 +897,11 @@
sub _seems_connected {
my $self = shift;
+ $self->_preserve_foreign_dbh;
+
my $dbh = $self->_dbh
or return 0;
- if(defined $self->_conn_tid && $self->_conn_tid != threads->tid) {
- $self->_dbh(undef);
- $self->{_dbh_gen}++;
- return 0;
- }
- else {
- $self->_verify_pid;
- return 0 if !$self->_dbh;
- }
-
return $dbh->FETCH('Active');
}
@@ -827,20 +913,6 @@
return $dbh->ping;
}
-# handle pid changes correctly
-# NOTE: assumes $self->_dbh is a valid $dbh
-sub _verify_pid {
- my ($self) = @_;
-
- return if defined $self->_conn_pid && $self->_conn_pid == $$;
-
- $self->_dbh->{InactiveDestroy} = 1;
- $self->_dbh(undef);
- $self->{_dbh_gen}++;
-
- return;
-}
-
sub ensure_connected {
my ($self) = @_;
@@ -873,7 +945,7 @@
# this is the internal "get dbh or connect (don't check)" method
sub _get_dbh {
my $self = shift;
- $self->_verify_pid if $self->_dbh;
+ $self->_preserve_foreign_dbh;
$self->_populate_dbh unless $self->_dbh;
return $self->_dbh;
}
@@ -998,8 +1070,9 @@
# try to use dsn to not require being connected, the driver may still
# force a connection in _rebless to determine version
# (dsn may not be supplied at all if all we do is make a mock-schema)
- my $dsn = $self->_dbi_connect_info->[0] || '';
+ my $dsn = $self->_dbi_connect_info->[0] || $ENV{DBI_DSN} || '';
($driver) = $dsn =~ /dbi:([^:]+):/i;
+ $driver ||= $ENV{DBI_DRIVER};
}
}
@@ -1769,31 +1842,18 @@
sub _select {
my $self = shift;
-
- # localization is neccessary as
- # 1) there is no infrastructure to pass this around before SQLA2
- # 2) _select_args sets it and _prep_for_execute consumes it
- my $sql_maker = $self->sql_maker;
- local $sql_maker->{_dbic_rs_attrs};
-
- return $self->_execute($self->_select_args(@_));
+ $self->_execute($self->_select_args(@_));
}
sub _select_args_to_query {
my $self = shift;
- # localization is neccessary as
- # 1) there is no infrastructure to pass this around before SQLA2
- # 2) _select_args sets it and _prep_for_execute consumes it
- my $sql_maker = $self->sql_maker;
- local $sql_maker->{_dbic_rs_attrs};
-
- # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $order, $rows, $offset)
+ # my ($op, $bind, $ident, $bind_attrs, $select, $cond, $rs_attrs, $rows, $offset)
# = $self->_select_args($ident, $select, $cond, $attrs);
my ($op, $bind, $ident, $bind_attrs, @args) =
$self->_select_args(@_);
- # my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, [ $select, $cond, $order, $rows, $offset ]);
+ # my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, [ $select, $cond, $rs_attrs, $rows, $offset ]);
my ($sql, $prepared_bind) = $self->_prep_for_execute($op, $bind, $ident, \@args);
$prepared_bind ||= [];
@@ -1806,16 +1866,16 @@
sub _select_args {
my ($self, $ident, $select, $where, $attrs) = @_;
+ my $sql_maker = $self->sql_maker;
my ($alias2source, $rs_alias) = $self->_resolve_ident_sources ($ident);
- my $sql_maker = $self->sql_maker;
- $sql_maker->{_dbic_rs_attrs} = {
+ $attrs = {
%$attrs,
select => $select,
from => $ident,
where => $where,
$rs_alias && $alias2source->{$rs_alias}
- ? ( _source_handle => $alias2source->{$rs_alias}->handle )
+ ? ( _rsroot_source_handle => $alias2source->{$rs_alias}->handle )
: ()
,
};
@@ -1871,18 +1931,7 @@
#limited has_many
( $attrs->{rows} && keys %{$attrs->{collapse}} )
||
- # limited prefetch with RNO subqueries
- (
- $attrs->{rows}
- &&
- $sql_maker->limit_dialect eq 'RowNumberOver'
- &&
- $attrs->{_prefetch_select}
- &&
- @{$attrs->{_prefetch_select}}
- )
- ||
- # grouped prefetch
+ # grouped prefetch (to satisfy group_by == select)
( $attrs->{group_by}
&&
@{$attrs->{group_by}}
@@ -1895,39 +1944,6 @@
($ident, $select, $where, $attrs)
= $self->_adjust_select_args_for_complex_prefetch ($ident, $select, $where, $attrs);
}
-
- elsif (
- ($attrs->{rows} || $attrs->{offset})
- &&
- $sql_maker->limit_dialect eq 'RowNumberOver'
- &&
- (ref $ident eq 'ARRAY' && @$ident > 1) # indicates a join
- &&
- scalar $self->_parse_order_by ($attrs->{order_by})
- ) {
- # the RNO limit dialect above mangles the SQL such that the join gets lost
- # wrap a subquery here
-
- push @limit, delete @{$attrs}{qw/rows offset/};
-
- my $subq = $self->_select_args_to_query (
- $ident,
- $select,
- $where,
- $attrs,
- );
-
- $ident = {
- -alias => $attrs->{alias},
- -source_handle => $ident->[0]{-source_handle},
- $attrs->{alias} => $subq,
- };
-
- # all part of the subquery now
- delete @{$attrs}{qw/order_by group_by having/};
- $where = undef;
- }
-
elsif (! $attrs->{software_limit} ) {
push @limit, $attrs->{rows}, $attrs->{offset};
}
@@ -1945,12 +1961,7 @@
# invoked, and that's just bad...
###
- my $order = { map
- { $attrs->{$_} ? ( $_ => $attrs->{$_} ) : () }
- (qw/order_by group_by having/ )
- };
-
- return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $order, @limit);
+ return ('select', $attrs->{bind}, $ident, $bind_attrs, $select, $where, $attrs, @limit);
}
# Returns a counting SELECT for a simple count
@@ -1962,47 +1973,7 @@
return { count => '*' };
}
-# Returns a SELECT which will end up in the subselect
-# There may or may not be a group_by, as the subquery
-# might have been called to accomodate a limit
-#
-# Most databases would be happy with whatever ends up
-# here, but some choke in various ways.
-#
-sub _subq_count_select {
- my ($self, $source, $rs_attrs) = @_;
- if (my $groupby = $rs_attrs->{group_by}) {
-
- my $avail_columns = $self->_resolve_column_info ($rs_attrs->{from});
-
- my $sel_index;
- for my $sel (@{$rs_attrs->{select}}) {
- if (ref $sel eq 'HASH' and $sel->{-as}) {
- $sel_index->{$sel->{-as}} = $sel;
- }
- }
-
- my @selection;
- for my $g_part (@$groupby) {
- if (ref $g_part or $avail_columns->{$g_part}) {
- push @selection, $g_part;
- }
- elsif ($sel_index->{$g_part}) {
- push @selection, $sel_index->{$g_part};
- }
- else {
- $self->throw_exception ("group_by criteria '$g_part' not contained within current resultset source(s)");
- }
- }
-
- return \@selection;
- }
-
- my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
- return @pcols ? \@pcols : [ 1 ];
-}
-
sub source_bind_attributes {
my ($self, $source) = @_;
@@ -2333,6 +2304,9 @@
unless ($dir) {
carp "No directory given, using ./\n";
$dir = './';
+ } else {
+ -d $dir or File::Path::mkpath($dir)
+ or $self->throw_exception("create_ddl_dir: $! creating dir '$dir'");
}
$self->throw_exception ("Directory '$dir' does not exist\n") unless(-d $dir);
@@ -2648,23 +2622,6 @@
return $alias;
}
-sub DESTROY {
- my $self = shift;
-
- $self->_verify_pid if $self->_dbh;
-
- # some databases need this to stop spewing warnings
- if (my $dbh = $self->_dbh) {
- local $@;
- eval {
- %{ $dbh->{CachedKids} } = ();
- $dbh->disconnect;
- };
- }
-
- $self->_dbh(undef);
-}
-
1;
=head1 USAGE NOTES
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBIHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBIHacks.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class/Storage/DBIHacks.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -94,6 +94,8 @@
}
push @$inner_select, $sel;
+
+ push @{$inner_attrs->{as}}, $attrs->{as}[$i];
}
# construct the inner $from for the subquery
Modified: DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class.pm 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/lib/DBIx/Class.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -384,6 +384,8 @@
zamolxes: Bogdan Lucaciu <bogdan at wiz.ro>
+Possum: Daniel LeWarne <possum at cpan.org>
+
=head1 COPYRIGHT
Copyright (c) 2005 - 2010 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
Modified: DBIx-Class/0.08/branches/filter_column/t/39load_namespaces_1.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/39load_namespaces_1.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/39load_namespaces_1.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -7,8 +7,6 @@
use lib qw(t/lib);
use DBICTest; # do not remove even though it is not used
-plan tests => 8;
-
my $warnings;
eval {
local $SIG{__WARN__} = sub { $warnings .= shift };
@@ -16,9 +14,11 @@
use base qw/DBIx::Class::Schema/;
__PACKAGE__->load_namespaces;
};
-ok(!$@) or diag $@;
-like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/);
+ok(!$@, 'load_namespaces doesnt die') or diag $@;
+like($warnings, qr/load_namespaces found ResultSet class C with no corresponding Result class/, 'Found warning about extra ResultSet classes');
+like($warnings, qr/load_namespaces found ResultSet class DBICNSTest::ResultSet::D that does not subclass DBIx::Class::ResultSet/, 'Found warning about ResultSets with incorrect subclass');
+
my $source_a = DBICNSTest->source('A');
isa_ok($source_a, 'DBIx::Class::ResultSource::Table');
my $rset_a = DBICNSTest->resultset('A');
@@ -31,5 +31,7 @@
for my $moniker (qw/A B/) {
my $class = "DBICNSTest::Result::$moniker";
- ok(!defined($class->result_source_instance->source_name));
+ ok(!defined($class->result_source_instance->source_name), "Source name of $moniker not defined");
}
+
+done_testing;
Deleted: DBIx-Class/0.08/branches/filter_column/t/41orrible.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/41orrible.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/41orrible.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -1,89 +0,0 @@
-use strict;
-use warnings;
-
-use Test::More;
-use DBIx::Class::SQLAHacks::OracleJoins;
-
-use lib qw(t/lib);
-use DBICTest; # do not remove even though it is not used
-use DBIC::SqlMakerTest;
-
-plan tests => 4;
-
-my $sa = new DBIx::Class::SQLAHacks::OracleJoins;
-
-$sa->limit_dialect('RowNum');
-
-is($sa->select('rubbish',
- [ 'foo.id', 'bar.id', \'TO_CHAR(foo.womble, "blah")' ],
- undef, undef, 1, 3),
- 'SELECT * FROM
-(
- SELECT A.*, ROWNUM r FROM
- (
- SELECT foo.id AS col1, bar.id AS col2, TO_CHAR(foo.womble, "blah") AS col3 FROM rubbish
- ) A
- WHERE ROWNUM < 5
-) B
-WHERE r >= 4
-', 'Munged stuff to make Oracle not explode');
-
-# test WhereJoins
-# search with undefined or empty $cond
-
-# my ($self, $table, $fields, $where, $order, @rest) = @_;
-my ($sql, @bind) = $sa->select(
- [
- { me => "cd" },
- [
- { "-join_type" => "LEFT", artist => "artist" },
- { "artist.artistid" => "me.artist" },
- ],
- ],
- [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
- undef,
- undef
-);
-is_same_sql_bind(
- $sql, \@bind,
- 'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( artist.artistid(+) = me.artist )', [],
- 'WhereJoins search with empty where clause'
-);
-
-($sql, @bind) = $sa->select(
- [
- { me => "cd" },
- [
- { "-join_type" => "", artist => "artist" },
- { "artist.artistid" => "me.artist" },
- ],
- ],
- [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
- { 'artist.artistid' => 3 },
- undef
-);
-is_same_sql_bind(
- $sql, \@bind,
- 'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid = me.artist ) AND ( artist.artistid = ? ) ) )', [3],
- 'WhereJoins search with where clause'
-);
-
-($sql, @bind) = $sa->select(
- [
- { me => "cd" },
- [
- { "-join_type" => "LEFT", artist => "artist" },
- { "artist.artistid" => "me.artist" },
- ],
- ],
- [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
- [{ 'artist.artistid' => 3 }, { 'me.cdid' => 5 }],
- undef
-);
-is_same_sql_bind(
- $sql, \@bind,
- 'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid(+) = me.artist ) AND ( ( ( artist.artistid = ? ) OR ( me.cdid = ? ) ) ) ) )', [3, 5],
- 'WhereJoins search with or in where clause'
-);
-
-
Modified: DBIx-Class/0.08/branches/filter_column/t/72pg.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/72pg.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/72pg.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -238,12 +238,15 @@
$schema2->source("Artist")->name("dbic_t_schema.artist");
$schema->txn_do( sub {
- my $artist = $schema->resultset('Artist')->search(
+ my $rs = $schema->resultset('Artist')->search(
{
artistid => 1
},
$t->{update_lock} ? { for => 'update' } : {}
- )->first;
+ );
+ ok ($rs->count, 'Count works');
+
+ my $artist = $rs->next;
is($artist->artistid, 1, "select returns artistid = 1");
$timed_out = 0;
Modified: DBIx-Class/0.08/branches/filter_column/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/746mssql.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/746mssql.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -59,8 +59,6 @@
{ on_connect_call => 'use_dynamic_cursors' },
{},
);
-my $new;
-
# test Auto-PK with different options
for my $opts (@opts) {
SKIP: {
@@ -77,384 +75,408 @@
$schema->resultset('Artist')->search({ name => 'foo' })->delete;
- $new = $schema->resultset('Artist')->create({ name => 'foo' });
+ my $new = $schema->resultset('Artist')->create({ name => 'foo' });
ok($new->artistid > 0, "Auto-PK worked");
}
}
-$seen_id{$new->artistid}++;
-# test LIMIT support
-for (1..6) {
- $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ });
- is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" );
- $seen_id{$new->artistid}++;
-}
-my $it = $schema->resultset('Artist')->search( {}, {
- rows => 3,
- order_by => 'artistid',
-});
+# Test populate
-is( $it->count, 3, "LIMIT count ok" );
-is( $it->next->name, "foo", "iterator->next ok" );
-$it->next;
-is( $it->next->name, "Artist 2", "iterator->next ok" );
-is( $it->next, undef, "next past end of resultset ok" );
-
-# test GUID columns
-
-$schema->storage->dbh_do (sub {
+{
+ $schema->storage->dbh_do (sub {
my ($storage, $dbh) = @_;
- eval { $dbh->do("DROP TABLE artist") };
+ eval { $dbh->do("DROP TABLE owners") };
+ eval { $dbh->do("DROP TABLE books") };
$dbh->do(<<'SQL');
-CREATE TABLE artist (
- artistid UNIQUEIDENTIFIER NOT NULL,
+CREATE TABLE books (
+ id INT IDENTITY (1, 1) NOT NULL,
+ source VARCHAR(100),
+ owner INT,
+ title VARCHAR(10),
+ price INT NULL
+)
+
+CREATE TABLE owners (
+ id INT IDENTITY (1, 1) NOT NULL,
name VARCHAR(100),
- rank INT NOT NULL DEFAULT '13',
- charfield CHAR(10) NULL,
- a_guid UNIQUEIDENTIFIER,
- primary key(artistid)
)
SQL
-});
-# start disconnected to make sure insert works on an un-reblessed storage
-$schema = DBICTest::Schema->connect($dsn, $user, $pass);
+ });
-my $row;
-lives_ok {
- $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
-} 'created a row with a GUID';
+ lives_ok ( sub {
+ # start a new connection, make sure rebless works
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+ $schema->populate ('Owners', [
+ [qw/id name /],
+ [qw/1 wiggle/],
+ [qw/2 woggle/],
+ [qw/3 boggle/],
+ [qw/4 fRIOUX/],
+ [qw/5 fRUE/],
+ [qw/6 fREW/],
+ [qw/7 fROOH/],
+ [qw/8 fISMBoC/],
+ [qw/9 station/],
+ [qw/10 mirror/],
+ [qw/11 dimly/],
+ [qw/12 face_to_face/],
+ [qw/13 icarus/],
+ [qw/14 dream/],
+ [qw/15 dyrstyggyr/],
+ ]);
+ }, 'populate with PKs supplied ok' );
-ok(
- eval { $row->artistid },
- 'row has GUID PK col populated',
-);
-diag $@ if $@;
-ok(
- eval { $row->a_guid },
- 'row has a GUID col with auto_nextval populated',
-);
-diag $@ if $@;
+ lives_ok (sub {
+ # start a new connection, make sure rebless works
+ # test an insert with a supplied identity, followed by one without
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+ for (2, 1) {
+ my $id = $_ * 20 ;
+ $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
+ $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
+ }
+ }, 'create with/without PKs ok' );
-my $row_from_db = $schema->resultset('ArtistGUID')
- ->search({ name => 'mtfnpy' })->first;
+ is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
-is $row_from_db->artistid, $row->artistid,
- 'PK GUID round trip';
+ lives_ok ( sub {
+ # start a new connection, make sure rebless works
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+ $schema->populate ('BooksInLibrary', [
+ [qw/source owner title /],
+ [qw/Library 1 secrets0/],
+ [qw/Library 1 secrets1/],
+ [qw/Eatery 1 secrets2/],
+ [qw/Library 2 secrets3/],
+ [qw/Library 3 secrets4/],
+ [qw/Eatery 3 secrets5/],
+ [qw/Library 4 secrets6/],
+ [qw/Library 5 secrets7/],
+ [qw/Eatery 5 secrets8/],
+ [qw/Library 6 secrets9/],
+ [qw/Library 7 secrets10/],
+ [qw/Eatery 7 secrets11/],
+ [qw/Library 8 secrets12/],
+ ]);
+ }, 'populate without PKs supplied ok' );
+}
-is $row_from_db->a_guid, $row->a_guid,
- 'NON-PK GUID round trip';
+# test simple, complex LIMIT and limited prefetch support, with both dialects and quote combinations (if possible)
+for my $dialect (
+ 'Top',
+ ($schema->storage->_server_info->{normalized_dbms_version} || 0 ) >= 9
+ ? ('RowNumberOver')
+ : ()
+ ,
+) {
+ for my $quoted (0, 1) {
-# test MONEY type
-$schema->storage->dbh_do (sub {
- my ($storage, $dbh) = @_;
- eval { $dbh->do("DROP TABLE money_test") };
- $dbh->do(<<'SQL');
-CREATE TABLE money_test (
- id INT IDENTITY PRIMARY KEY,
- amount MONEY NULL
-)
-SQL
-});
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+ limit_dialect => $dialect,
+ $quoted
+ ? ( quote_char => [ qw/ [ ] / ], name_sep => '.' )
+ : ()
+ ,
+ });
-my $rs = $schema->resultset('Money');
+ my $test_type = "Dialect:$dialect Quoted:$quoted";
-lives_ok {
- $row = $rs->create({ amount => 100 });
-} 'inserted a money value';
+ # basic limit support
+ TODO: {
+ my $art_rs = $schema->resultset ('Artist');
+ $art_rs->delete;
+ $art_rs->create({ name => 'Artist ' . $_ }) for (1..6);
-cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
+ my $it = $schema->resultset('Artist')->search( {}, {
+ rows => 4,
+ offset => 3,
+ order_by => 'artistid',
+ });
-lives_ok {
- $row->update({ amount => 200 });
-} 'updated a money value';
+ is( $it->count, 3, "$test_type: LIMIT count ok" );
-cmp_ok $rs->find($row->id)->amount, '==', 200,
- 'updated money value round-trip';
+ local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+ if $dialect eq 'Top';
-lives_ok {
- $row->update({ amount => undef });
-} 'updated a money value to NULL';
+ is( $it->next->name, 'Artist 4', "$test_type: iterator->next ok" );
+ $it->next;
+ is( $it->next->name, 'Artist 6', "$test_type: iterator->next ok" );
+ is( $it->next, undef, "$test_type: next past end of resultset ok" );
+ }
-is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip';
+ # plain ordered subqueries throw
+ throws_ok (sub {
+ $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
+ }, qr/ordered subselect encountered/, "$test_type: Ordered Subselect detection throws ok");
-$schema->storage->dbh_do (sub {
- my ($storage, $dbh) = @_;
- eval { $dbh->do("DROP TABLE owners") };
- eval { $dbh->do("DROP TABLE books") };
- $dbh->do(<<'SQL');
-CREATE TABLE books (
- id INT IDENTITY (1, 1) NOT NULL,
- source VARCHAR(100),
- owner INT,
- title VARCHAR(10),
- price INT NULL
-)
+ # make sure ordered subselects *somewhat* work
+ {
+ my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+ my $sealed_owners = $owners->as_subselect_rs;
-CREATE TABLE owners (
- id INT IDENTITY (1, 1) NOT NULL,
- name VARCHAR(100),
-)
-SQL
+ is_deeply (
+ [ map { $_->name } ($sealed_owners->all) ],
+ [ map { $_->name } ($owners->all) ],
+ "$test_type: Sort preserved from within a subquery",
+ );
+ }
-});
+ {
+ my $book_owner_ids = $schema->resultset ('BooksInLibrary')->search ({}, {
+ rows => 6,
+ offset => 2,
+ join => 'owner',
+ distinct => 1,
+ order_by => 'owner.name',
+ unsafe_subselect_ok => 1
+ })->get_column ('owner');
-lives_ok ( sub {
- # start a new connection, make sure rebless works
- my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
- $schema->populate ('Owners', [
- [qw/id name /],
- [qw/1 wiggle/],
- [qw/2 woggle/],
- [qw/3 boggle/],
- [qw/4 fRIOUX/],
- [qw/5 fRUE/],
- [qw/6 fREW/],
- [qw/7 fROOH/],
- [qw/8 fISMBoC/],
- [qw/9 station/],
- [qw/10 mirror/],
- [qw/11 dimly/],
- [qw/12 face_to_face/],
- [qw/13 icarus/],
- [qw/14 dream/],
- [qw/15 dyrstyggyr/],
- ]);
-}, 'populate with PKs supplied ok' );
+ my @ids = $book_owner_ids->all;
+ is (@ids, 6, 'Limit works');
-lives_ok (sub {
- # start a new connection, make sure rebless works
- # test an insert with a supplied identity, followed by one without
- my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
- for (2, 1) {
- my $id = $_ * 20 ;
- $schema->resultset ('Owners')->create ({ id => $id, name => "troglodoogle $id" });
- $schema->resultset ('Owners')->create ({ name => "troglodoogle " . ($id + 1) });
- }
-}, 'create with/without PKs ok' );
+ my $book_owners = $schema->resultset ('Owners')->search ({
+ id => { -in => $book_owner_ids->as_query }
+ });
-is ($schema->resultset ('Owners')->count, 19, 'owner rows really in db' );
+ TODO: {
+ local $TODO = "Correlated limited IN subqueries will probably never preserve order";
-lives_ok ( sub {
- # start a new connection, make sure rebless works
- my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
- $schema->populate ('BooksInLibrary', [
- [qw/source owner title /],
- [qw/Library 1 secrets0/],
- [qw/Library 1 secrets1/],
- [qw/Eatery 1 secrets2/],
- [qw/Library 2 secrets3/],
- [qw/Library 3 secrets4/],
- [qw/Eatery 3 secrets5/],
- [qw/Library 4 secrets6/],
- [qw/Library 5 secrets7/],
- [qw/Eatery 5 secrets8/],
- [qw/Library 6 secrets9/],
- [qw/Library 7 secrets10/],
- [qw/Eatery 7 secrets11/],
- [qw/Library 8 secrets12/],
- ]);
-}, 'populate without PKs supplied ok' );
+ is_deeply (
+ [ map { $_->id } ($book_owners->all) ],
+ [ $book_owner_ids->all ],
+ "$test_type: Sort is preserved across IN subqueries",
+ );
+ }
+ }
-# plain ordered subqueries throw
-throws_ok (sub {
- $schema->resultset('Owners')->search ({}, { order_by => 'name' })->as_query
-}, qr/ordered subselect encountered/, 'Ordered Subselect detection throws ok');
+ # still even with lost order of IN, we should be getting correct
+ # sets
+ {
+ my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+ my $corelated_owners = $owners->result_source->resultset->search (
+ {
+ id => { -in => $owners->get_column('id')->as_query },
+ },
+ {
+ order_by => 'name' #reorder because of what is shown above
+ },
+ );
-# make sure ordered subselects *somewhat* work
-{
- my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
+ is (
+ join ("\x00", map { $_->name } ($corelated_owners->all) ),
+ join ("\x00", map { $_->name } ($owners->all) ),
+ "$test_type: With an outer order_by, everything still matches",
+ );
+ }
- my $al = $owners->current_source_alias;
- my $sealed_owners = $owners->result_source->resultset->search (
- {},
+ # make sure right-join-side single-prefetch ordering limit works
{
- alias => $al,
- from => [{
- -alias => $al,
- -source_handle => $owners->result_source->handle,
- $al => $owners->as_query,
- }],
- },
- );
+ my $rs = $schema->resultset ('BooksInLibrary')->search (
+ {
+ 'owner.name' => { '!=', 'woggle' },
+ },
+ {
+ prefetch => 'owner',
+ order_by => 'owner.name',
+ }
+ );
+ # this is the order in which they should come from the above query
+ my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
- is_deeply (
- [ map { $_->name } ($sealed_owners->all) ],
- [ map { $_->name } ($owners->all) ],
- 'Sort preserved from within a subquery',
- );
-}
+ is ($rs->all, 8, "$test_type: Correct amount of objects from right-sorted joined resultset");
+ is_deeply (
+ [map { $_->owner->name } ($rs->all) ],
+ \@owner_names,
+ "$test_type: Prefetched rows were properly ordered"
+ );
-TODO: {
- local $TODO = "This porbably will never work, but it isn't critical either afaik";
+ my $limited_rs = $rs->search ({}, {rows => 6, offset => 2, unsafe_subselect_ok => 1});
+ is ($limited_rs->count, 6, "$test_type: Correct count of limited right-sorted joined resultset");
+ is ($limited_rs->count_rs->next, 6, "$test_type: Correct count_rs of limited right-sorted joined resultset");
- my $book_owner_ids = $schema->resultset ('BooksInLibrary')
- ->search ({}, { join => 'owner', distinct => 1, order_by => 'owner.name', unsafe_subselect_ok => 1 })
- ->get_column ('owner');
+ my $queries;
+ $schema->storage->debugcb(sub { $queries++; });
+ $schema->storage->debug(1);
- my $book_owners = $schema->resultset ('Owners')->search ({
- id => { -in => $book_owner_ids->as_query }
- });
+ is_deeply (
+ [map { $_->owner->name } ($limited_rs->all) ],
+ [@owner_names[2 .. 7]],
+ "$test_type: Prefetch-limited rows were properly ordered"
+ );
+ is ($queries, 1, "$test_type: Only one query with prefetch");
- is_deeply (
- [ map { $_->id } ($book_owners->all) ],
- [ $book_owner_ids->all ],
- 'Sort is preserved across IN subqueries',
- );
-}
+ $schema->storage->debugcb(undef);
+ $schema->storage->debug(0);
-# This is known not to work - thus the negative test
-{
- my $owners = $schema->resultset ('Owners')->search ({}, { order_by => 'name', offset => 2, rows => 3, unsafe_subselect_ok => 1 });
- my $corelated_owners = $owners->result_source->resultset->search (
- {
- id => { -in => $owners->get_column('id')->as_query },
- },
- {
- order_by => 'name' #reorder because of what is shown above
- },
- );
+ is_deeply (
+ [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
+ [@owner_names[2 .. 7]],
+ "$test_type: Rows are still properly ordered after search_related",
+ );
+ }
- cmp_ok (
- join ("\x00", map { $_->name } ($corelated_owners->all) ),
- 'ne',
- join ("\x00", map { $_->name } ($owners->all) ),
- 'Sadly sort not preserved from within a corelated subquery',
- );
+ # try a ->has_many direction with duplicates
+ my $owners = $schema->resultset ('Owners')->search (
+ {
+ 'books.id' => { '!=', undef },
+ 'me.name' => { '!=', 'somebogusstring' },
+ },
+ {
+ prefetch => 'books',
+ order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
+ rows => 3, # 8 results total
+ unsafe_subselect_ok => 1,
+ },
+ );
- cmp_ok (
- join ("\x00", sort map { $_->name } ($corelated_owners->all) ),
- 'ne',
- join ("\x00", sort map { $_->name } ($owners->all) ),
- 'Which in fact gives a completely wrong dataset',
- );
-}
+ my ($sql, @bind) = @${$owners->page(3)->as_query};
+ is_deeply (
+ \@bind,
+ [ ([ 'me.name' => 'somebogusstring' ], [ test => 'xxx' ]) x 2 ], # double because of the prefetch subq
+ );
+ is ($owners->page(1)->all, 3, "$test_type: has_many prefetch returns correct number of rows");
+ is ($owners->page(1)->count, 3, "$test_type: has-many prefetch returns correct count");
-# make sure right-join-side single-prefetch ordering limit works
-{
- my $rs = $schema->resultset ('BooksInLibrary')->search (
- {
- 'owner.name' => { '!=', 'woggle' },
- },
- {
- prefetch => 'owner',
- order_by => 'owner.name',
+ is ($owners->page(3)->count, 2, "$test_type: has-many prefetch returns correct count");
+ TODO: {
+ local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+ if $dialect eq 'Top';
+ is ($owners->page(3)->all, 2, "$test_type: has_many prefetch returns correct number of rows");
+ is ($owners->page(3)->count_rs->next, 2, "$test_type: has-many prefetch returns correct count_rs");
}
- );
- # this is the order in which they should come from the above query
- my @owner_names = qw/boggle fISMBoC fREW fRIOUX fROOH fRUE wiggle wiggle/;
- is ($rs->all, 8, 'Correct amount of objects from right-sorted joined resultset');
- is_deeply (
- [map { $_->owner->name } ($rs->all) ],
- \@owner_names,
- 'Rows were properly ordered'
- );
- my $limited_rs = $rs->search ({}, {rows => 7, offset => 2, unsafe_subselect_ok => 1});
- is ($limited_rs->count, 6, 'Correct count of limited right-sorted joined resultset');
- is ($limited_rs->count_rs->next, 6, 'Correct count_rs of limited right-sorted joined resultset');
+ # try a ->belongs_to direction (no select collapse, group_by should work)
+ my $books = $schema->resultset ('BooksInLibrary')->search (
+ {
+ 'owner.name' => [qw/wiggle woggle/],
+ },
+ {
+ distinct => 1,
+ having => \['1 = ?', [ test => 1 ] ], #test having propagation
+ prefetch => 'owner',
+ rows => 2, # 3 results total
+ order_by => { -desc => 'me.owner' },
+ unsafe_subselect_ok => 1,
+ },
+ );
- my $queries;
- $schema->storage->debugcb(sub { $queries++; });
- $schema->storage->debug(1);
+ ($sql, @bind) = @${$books->page(3)->as_query};
+ is_deeply (
+ \@bind,
+ [
+ # inner
+ [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ], [ test => '1' ],
+ # outer
+ [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ],
+ ],
+ );
- is_deeply (
- [map { $_->owner->name } ($limited_rs->all) ],
- [@owner_names[2 .. 7]],
- 'Limited rows were properly ordered'
- );
- is ($queries, 1, 'Only one query with prefetch');
+ is ($books->page(1)->all, 2, "$test_type: Prefetched grouped search returns correct number of rows");
+ is ($books->page(1)->count, 2, "$test_type: Prefetched grouped search returns correct count");
- $schema->storage->debugcb(undef);
- $schema->storage->debug(0);
+ is ($books->page(2)->count, 1, "$test_type: Prefetched grouped search returns correct count");
+ TODO: {
+ local $TODO = "Top-limit does not work when your limit ends up past the resultset"
+ if $dialect eq 'Top';
+ is ($books->page(2)->all, 1, "$test_type: Prefetched grouped search returns correct number of rows");
+ is ($books->page(2)->count_rs->next, 1, "$test_type: Prefetched grouped search returns correct count_rs");
+ }
+ }
+}
- is_deeply (
- [map { $_->name } ($limited_rs->search_related ('owner')->all) ],
- [@owner_names[2 .. 7]],
- 'Rows are still properly ordered after search_related'
+# test GUID columns
+{
+ $schema->storage->dbh_do (sub {
+ my ($storage, $dbh) = @_;
+ eval { $dbh->do("DROP TABLE artist") };
+ $dbh->do(<<'SQL');
+CREATE TABLE artist (
+ artistid UNIQUEIDENTIFIER NOT NULL,
+ name VARCHAR(100),
+ rank INT NOT NULL DEFAULT '13',
+ charfield CHAR(10) NULL,
+ a_guid UNIQUEIDENTIFIER,
+ primary key(artistid)
+)
+SQL
+ });
+
+ # start disconnected to make sure insert works on an un-reblessed storage
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+ my $row;
+ lives_ok {
+ $row = $schema->resultset('ArtistGUID')->create({ name => 'mtfnpy' })
+ } 'created a row with a GUID';
+
+ ok(
+ eval { $row->artistid },
+ 'row has GUID PK col populated',
);
-}
+ diag $@ if $@;
+ ok(
+ eval { $row->a_guid },
+ 'row has a GUID col with auto_nextval populated',
+ );
+ diag $@ if $@;
-#
-# try a prefetch on tables with identically named columns
-#
+ my $row_from_db = $schema->resultset('ArtistGUID')
+ ->search({ name => 'mtfnpy' })->first;
-# set quote char - make sure things work while quoted
-$schema->storage->_sql_maker->{quote_char} = [qw/[ ]/];
-$schema->storage->_sql_maker->{name_sep} = '.';
+ is $row_from_db->artistid, $row->artistid,
+ 'PK GUID round trip';
+ is $row_from_db->a_guid, $row->a_guid,
+ 'NON-PK GUID round trip';
+}
+
+# test MONEY type
{
- # try a ->has_many direction
- my $owners = $schema->resultset ('Owners')->search (
- {
- 'books.id' => { '!=', undef },
- 'me.name' => { '!=', 'somebogusstring' },
- },
- {
- prefetch => 'books',
- order_by => { -asc => \['name + ?', [ test => 'xxx' ]] }, # test bindvar propagation
- rows => 3, # 8 results total
- unsafe_subselect_ok => 1,
- },
- );
+ $schema->storage->dbh_do (sub {
+ my ($storage, $dbh) = @_;
+ eval { $dbh->do("DROP TABLE money_test") };
+ $dbh->do(<<'SQL');
+CREATE TABLE money_test (
+ id INT IDENTITY PRIMARY KEY,
+ amount MONEY NULL
+)
+SQL
+ });
- my ($sql, @bind) = @${$owners->page(3)->as_query};
- is_deeply (
- \@bind,
- [ ([ 'me.name' => 'somebogusstring' ], [ test => 'xxx' ]) x 2 ], # double because of the prefetch subq
- );
+ my $rs = $schema->resultset('Money');
+ my $row;
- is ($owners->page(1)->all, 3, 'has_many prefetch returns correct number of rows');
- is ($owners->page(1)->count, 3, 'has-many prefetch returns correct count');
+ lives_ok {
+ $row = $rs->create({ amount => 100 });
+ } 'inserted a money value';
- is ($owners->page(3)->all, 2, 'has_many prefetch returns correct number of rows');
- is ($owners->page(3)->count, 2, 'has-many prefetch returns correct count');
- is ($owners->page(3)->count_rs->next, 2, 'has-many prefetch returns correct count_rs');
+ cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
+ lives_ok {
+ $row->update({ amount => 200 });
+ } 'updated a money value';
- # try a ->belongs_to direction (no select collapse, group_by should work)
- my $books = $schema->resultset ('BooksInLibrary')->search (
- {
- 'owner.name' => [qw/wiggle woggle/],
- },
- {
- distinct => 1,
- having => \['1 = ?', [ test => 1 ] ], #test having propagation
- prefetch => 'owner',
- rows => 2, # 3 results total
- order_by => { -desc => 'me.owner' },
- unsafe_subselect_ok => 1,
- },
- );
+ cmp_ok $rs->find($row->id)->amount, '==', 200,
+ 'updated money value round-trip';
- ($sql, @bind) = @${$books->page(3)->as_query};
- is_deeply (
- \@bind,
- [
- # inner
- [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ], [ test => '1' ],
- # outer
- [ 'owner.name' => 'wiggle' ], [ 'owner.name' => 'woggle' ], [ source => 'Library' ],
- ],
- );
+ lives_ok {
+ $row->update({ amount => undef });
+ } 'updated a money value to NULL';
- is ($books->page(1)->all, 2, 'Prefetched grouped search returns correct number of rows');
- is ($books->page(1)->count, 2, 'Prefetched grouped search returns correct count');
-
- is ($books->page(2)->all, 1, 'Prefetched grouped search returns correct number of rows');
- is ($books->page(2)->count, 1, 'Prefetched grouped search returns correct count');
- is ($books->page(2)->count_rs->next, 1, 'Prefetched grouped search returns correct count_rs');
+ is $rs->find($row->id)->amount, undef,'updated money value to NULL round-trip';
}
+
done_testing;
# clean up our mess
Modified: DBIx-Class/0.08/branches/filter_column/t/90join_torture.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/90join_torture.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/90join_torture.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -146,7 +146,7 @@
JOIN cd cd ON cd.cdid = me.cd_id
JOIN artist artist_2 ON artist_2.artistid = cd.artist
GROUP BY me.cd_id
- ) count_subq
+ ) me
)',
[],
);
Modified: DBIx-Class/0.08/branches/filter_column/t/93single_accessor_object.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/93single_accessor_object.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/93single_accessor_object.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -45,20 +45,20 @@
$schema = DBICTest->init_schema();
{
- my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
- my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982, genreid => undef });
+ my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
+ my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982, genreid => undef });
- ok(!defined($cd->get_column('genreid')), 'genreid is NULL'); #no accessor was defined for this column
- ok(!defined($cd->genre), 'genre accessor returns undef');
+ ok(!defined($cd->get_column('genreid')), 'genreid is NULL'); #no accessor was defined for this column
+ ok(!defined($cd->genre), 'genre accessor returns undef');
}
$schema = DBICTest->init_schema();
{
- my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
- my $genre = $schema->resultset('Genre')->create({ genreid => 88, name => 'disco' });
- my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982 });
+ my $artist = $schema->resultset('Artist')->create({ artistid => 666, name => 'bad religion' });
+ my $genre = $schema->resultset('Genre')->create({ genreid => 88, name => 'disco' });
+ my $cd = $schema->resultset('CD')->create({ cdid => 187, artist => 1, title => 'how could hell be any worse?', year => 1982 });
- dies_ok { $cd->genre } 'genre accessor throws without column';
+ dies_ok { $cd->genre } 'genre accessor throws without column';
}
Modified: DBIx-Class/0.08/branches/filter_column/t/cdbi/columns_as_hashes.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/cdbi/columns_as_hashes.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/cdbi/columns_as_hashes.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -26,7 +26,7 @@
my $rating = $waves->{rating};
$waves->Rating("PG");
is $rating, "R", 'evaluation of column value is not deferred';
- } qr{^Column 'rating' of 'Film/$waves' was fetched as a hash at \Q$0};
+ } qr{^Column 'rating' of 'Film/$waves' was fetched as a hash at\b};
warnings_like {
is $waves->{title}, $waves->Title, "columns can be accessed as hashes";
Modified: DBIx-Class/0.08/branches/filter_column/t/count/count_rs.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/count/count_rs.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/count/count_rs.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -54,7 +54,7 @@
JOIN cd disc ON disc.cdid = tracks.cd
WHERE ( ( position = ? OR position = ? ) )
LIMIT 3 OFFSET 8
- ) count_subq
+ ) tracks
)',
[ [ position => 1 ], [ position => 2 ] ],
'count_rs db-side limit applied',
@@ -88,7 +88,7 @@
JOIN artist artist ON artist.artistid = cds.artist
WHERE tracks.position = ? OR tracks.position = ?
GROUP BY cds.cdid
- ) count_subq
+ ) cds
',
[ qw/'1' '2'/ ],
'count softlimit applied',
@@ -109,7 +109,7 @@
WHERE tracks.position = ? OR tracks.position = ?
GROUP BY cds.cdid
LIMIT 3 OFFSET 4
- ) count_subq
+ ) cds
)',
[ [ 'tracks.position' => 1 ], [ 'tracks.position' => 2 ] ],
'count_rs db-side limit applied',
Modified: DBIx-Class/0.08/branches/filter_column/t/count/prefetch.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/count/prefetch.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/count/prefetch.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -31,7 +31,7 @@
JOIN artist artist ON artist.artistid = cds.artist
WHERE tracks.position = ? OR tracks.position = ?
GROUP BY cds.cdid
- ) count_subq
+ ) cds
)',
[ map { [ 'tracks.position' => $_ ] } (1, 2) ],
);
@@ -63,7 +63,7 @@
WHERE ( genre.name = ? )
GROUP BY genre.genreid
)
- count_subq
+ genre
)',
[ [ 'genre.name' => 'emo' ] ],
);
Modified: DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_mssql.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_mssql.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_mssql.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -3,59 +3,79 @@
use Test::More;
use Test::Exception;
+use Scope::Guard ();
use lib qw(t/lib);
use DBICTest;
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
+# use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else
+BEGIN {
+ if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) {
+ unshift @INC, $_ for split /:/, $lib_dirs;
+ }
+}
-if (not ($dsn && $user)) {
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_ODBC_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/};
+
+if (not ($dsn || $dsn2)) {
plan skip_all =>
- 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN}, _USER and _PASS to run this test' .
+ 'Set $ENV{DBICTEST_MSSQL_ODBC_DSN} and/or $ENV{DBICTEST_MSSQL_DSN} _USER '
+ .'and _PASS to run this test' .
"\nWarning: This test drops and creates a table called 'track'";
} else {
eval "use DateTime; use DateTime::Format::Strptime;";
if ($@) {
plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing';
}
- else {
- plan tests => 4 * 2; # (tests * dt_types)
- }
}
-my $schema = DBICTest::Schema->clone;
+my @connect_info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
-$schema->connection($dsn, $user, $pass);
-$schema->storage->ensure_connected;
+my $schema;
+for my $connect_info (@connect_info) {
+ my ($dsn, $user, $pass) = @$connect_info;
+
+ next unless $dsn;
+
+ $schema = DBICTest::Schema->connect($dsn, $user, $pass, {
+ on_connect_call => 'datetime_setup'
+ });
+
+ my $guard = Scope::Guard->new(\&cleanup);
+
# coltype, column, datehash
-my @dt_types = (
- ['DATETIME',
- 'last_updated_at',
- {
- year => 2004,
- month => 8,
- day => 21,
- hour => 14,
- minute => 36,
- second => 48,
- nanosecond => 500000000,
- }],
- ['SMALLDATETIME', # minute precision
- 'small_dt',
- {
- year => 2004,
- month => 8,
- day => 21,
- hour => 14,
- minute => 36,
- }],
-);
+ my @dt_types = (
+ ['DATETIME',
+ 'last_updated_at',
+ {
+ year => 2004,
+ month => 8,
+ day => 21,
+ hour => 14,
+ minute => 36,
+ second => 48,
+ nanosecond => 500000000,
+ }],
+ ['SMALLDATETIME', # minute precision
+ 'small_dt',
+ {
+ year => 2004,
+ month => 8,
+ day => 21,
+ hour => 14,
+ minute => 36,
+ }],
+ );
-for my $dt_type (@dt_types) {
- my ($type, $col, $sample_dt) = @$dt_type;
+ for my $dt_type (@dt_types) {
+ my ($type, $col, $sample_dt) = @$dt_type;
- eval { $schema->storage->dbh->do("DROP TABLE track") };
- $schema->storage->dbh->do(<<"SQL");
+ eval { $schema->storage->dbh->do("DROP TABLE track") };
+ $schema->storage->dbh->do(<<"SQL");
CREATE TABLE track (
trackid INT IDENTITY PRIMARY KEY,
cd INT,
@@ -63,23 +83,30 @@
$col $type,
)
SQL
- ok(my $dt = DateTime->new($sample_dt));
+ ok(my $dt = DateTime->new($sample_dt));
- my $row;
- ok( $row = $schema->resultset('Track')->create({
- $col => $dt,
- cd => 1,
- }));
- ok( $row = $schema->resultset('Track')
- ->search({ trackid => $row->trackid }, { select => [$col] })
- ->first
- );
- is( $row->$col, $dt, 'DateTime roundtrip' );
+ my $row;
+ ok( $row = $schema->resultset('Track')->create({
+ $col => $dt,
+ cd => 1,
+ }));
+ ok( $row = $schema->resultset('Track')
+ ->search({ trackid => $row->trackid }, { select => [$col] })
+ ->first
+ );
+ is( $row->$col, $dt, "$type roundtrip" );
+
+ cmp_ok( $row->$col->nanosecond, '==', $sample_dt->{nanosecond},
+ 'DateTime fractional portion roundtrip' )
+ if exists $sample_dt->{nanosecond};
+ }
}
+done_testing;
+
# clean up our mess
-END {
- if (my $dbh = eval { $schema->storage->_dbh }) {
+sub cleanup {
+ if (my $dbh = eval { $schema->storage->dbh }) {
$dbh->do('DROP TABLE track');
}
}
Modified: DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_sybase.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_sybase.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/inflate/datetime_sybase.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -70,7 +70,11 @@
->search({ trackid => $row->trackid }, { select => [$col] })
->first
);
- is( $row->$col, $dt, 'DateTime roundtrip' );
+ is( $row->$col, $dt, "$type roundtrip" );
+
+ is( $row->$col->nanosecond, $dt->nanosecond,
+ 'fractional DateTime portion roundtrip' )
+ if $dt->nanosecond > 0;
}
# test a computed datetime column
Added: DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/Result/D.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/Result/D.pm (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/Result/D.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,5 @@
+package DBICNSTest::Result::D;
+use base qw/DBIx::Class::Core/;
+__PACKAGE__->table('d');
+__PACKAGE__->add_columns('d');
+1;
Added: DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/ResultSet/D.pm
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/ResultSet/D.pm (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/lib/DBICNSTest/ResultSet/D.pm 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,2 @@
+package DBICNSTest::ResultSet::D;
+1;
Modified: DBIx-Class/0.08/branches/filter_column/t/prefetch/grouped.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/prefetch/grouped.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/prefetch/grouped.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -76,7 +76,7 @@
WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
GROUP BY me.cd
)
- count_subq
+ me
)',
[ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
'count() query generated expected SQL',
@@ -151,7 +151,7 @@
WHERE ( me.cdid IS NOT NULL )
GROUP BY me.cdid
LIMIT 2
- ) count_subq
+ ) me
)',
[],
'count() query generated expected SQL',
@@ -262,7 +262,7 @@
WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
GROUP BY SUBSTR(me.cd, 1, 1)
)
- count_subq
+ me
)',
[ map { [ 'me.cd' => $_] } ($cd_rs->get_column ('cdid')->all) ],
'count() query generated expected SQL',
Added: DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rno.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rno.t (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rno.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,74 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $schema = DBICTest->init_schema;
+
+$schema->storage->_sql_maker->limit_dialect ('RowNumberOver');
+
+my $rs_selectas_col = $schema->resultset ('BooksInLibrary')->search ({}, {
+ '+select' => ['owner.name'],
+ '+as' => ['owner.name'],
+ join => 'owner',
+ rows => 1,
+});
+
+is_same_sql_bind(
+ $rs_selectas_col->as_query,
+ '(
+ SELECT id, source, owner, title, price,
+ owner__name
+ FROM (
+ SELECT id, source, owner, title, price,
+ owner__name,
+ ROW_NUMBER() OVER( ) AS rno__row__index
+ FROM (
+ SELECT me.id, me.source, me.owner, me.title, me.price,
+ owner.name AS owner__name
+ FROM books me
+ JOIN owners owner ON owner.id = me.owner
+ WHERE ( source = ? )
+ ) me
+ ) me
+ WHERE rno__row__index BETWEEN 1 AND 1
+ )',
+ [ [ 'source', 'Library' ] ],
+);
+
+$schema->storage->_sql_maker->quote_char ([qw/ [ ] /]);
+$schema->storage->_sql_maker->name_sep ('.');
+
+my $rs_selectas_rel = $schema->resultset ('BooksInLibrary')->search ({}, {
+ '+select' => ['owner.name'],
+ '+as' => ['owner_name'],
+ join => 'owner',
+ rows => 1,
+});
+
+is_same_sql_bind(
+ $rs_selectas_rel->as_query,
+ '(
+ SELECT [id], [source], [owner], [title], [price],
+ [owner_name]
+ FROM (
+ SELECT [id], [source], [owner], [title], [price],
+ [owner_name],
+ ROW_NUMBER() OVER( ) AS [rno__row__index]
+ FROM (
+ SELECT [me].[id], [me].[source], [me].[owner], [me].[title], [me].[price],
+ [owner].[name] AS [owner_name]
+ FROM [books] [me]
+ JOIN [owners] [owner] ON [owner].[id] = [me].[owner]
+ WHERE ( [source] = ? )
+ ) [me]
+ ) [me]
+ WHERE [rno__row__index] BETWEEN 1 AND 1
+ )',
+ [ [ 'source', 'Library' ] ],
+);
+
+done_testing;
Copied: DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rownum.t (from rev 9134, DBIx-Class/0.08/branches/filter_column/t/41orrible.t)
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rownum.t (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/rownum.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,35 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $s = DBICTest->init_schema (no_deploy => 1, );
+$s->storage->sql_maker->limit_dialect ('RowNum');
+
+my $rs = $s->resultset ('CD');
+
+is_same_sql_bind (
+ $rs->search ({}, { rows => 1, offset => 3,columns => [
+ { id => 'foo.id' },
+ { 'bar.id' => 'bar.id' },
+ { bleh => \ 'TO_CHAR (foo.womble, "blah")' },
+ ]})->as_query,
+ '(SELECT id, bar__id, bleh
+ FROM (
+ SELECT id, bar__id, bleh, ROWNUM rownum__index
+ FROM (
+ SELECT foo.id AS id, bar.id AS bar__id, TO_CHAR(foo.womble, "blah") AS bleh
+ FROM cd me
+ ) me
+ ) me
+ WHERE rownum__index BETWEEN 4 AND 4
+ )',
+ [],
+ 'Rownum subsel aliasing works correctly'
+);
+
+done_testing;
Modified: DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/toplimit.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/toplimit.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/sqlahacks/limit_dialects/toplimit.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -14,139 +14,184 @@
delete $schema->storage->_sql_maker->{_cached_syntax};
$schema->storage->_sql_maker->limit_dialect ('Top');
-my $rs = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 1, offset => 3 });
+my $books_45_and_owners = $schema->resultset ('BooksInLibrary')->search ({}, { prefetch => 'owner', rows => 2, offset => 3 });
-sub default_test_order {
- my $order_by = shift;
- is_same_sql_bind(
- $rs->search ({}, {order_by => $order_by})->as_query,
- "(SELECT
- TOP 1 me__id, source, owner, title, price, owner__id, name FROM
- (SELECT
- TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name
- FROM books me
- JOIN owners owner ON
- owner.id = me.owner
- WHERE ( source = ? )
- ORDER BY me__id ASC
- ) me ORDER BY me__id DESC
- )",
+for my $null_order (
+ undef,
+ '',
+ {},
+ [],
+ [{}],
+) {
+ my $rs = $books_45_and_owners->search ({}, {order_by => $null_order });
+ is_same_sql_bind(
+ $rs->as_query,
+ '(SELECT TOP 2
+ id, source, owner, title, price, owner__id, owner__name
+ FROM (
+ SELECT TOP 5
+ me.id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name AS owner__name
+ FROM books me
+ JOIN owners owner ON owner.id = me.owner
+ WHERE ( source = ? )
+ ORDER BY me.id
+ ) me
+ ORDER BY me.id DESC
+ )',
[ [ source => 'Library' ] ],
);
}
-sub test_order {
- my $args = shift;
- my $req_order = $args->{order_req}
- ? "ORDER BY $args->{order_req}"
- : ''
- ;
-
- is_same_sql_bind(
- $rs->search ({}, {order_by => $args->{order_by}})->as_query,
- "(SELECT
- me__id, source, owner, title, price, owner__id, name FROM
- (SELECT
- TOP 1 me__id, source, owner, title, price, owner__id, name FROM
- (SELECT
- TOP 4 me.id AS me__id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name FROM
- books me
- JOIN owners owner ON owner.id = me.owner
- WHERE ( source = ? )
- ORDER BY $args->{order_inner}
- ) me ORDER BY $args->{order_outer}
- ) me $req_order
- )",
- [ [ source => 'Library' ] ],
- );
-}
-
-my @tests = (
+for my $ord_set (
{
order_by => \'foo DESC',
- order_req => 'foo DESC',
order_inner => 'foo DESC',
- order_outer => 'foo ASC'
+ order_outer => 'ORDER__BY__1 ASC',
+ order_req => 'ORDER__BY__1 DESC',
+ exselect_outer => 'ORDER__BY__1',
+ exselect_inner => 'foo AS ORDER__BY__1',
},
{
order_by => { -asc => 'foo' },
- order_req => 'foo ASC',
order_inner => 'foo ASC',
- order_outer => 'foo DESC',
+ order_outer => 'ORDER__BY__1 DESC',
+ order_req => 'ORDER__BY__1 ASC',
+ exselect_outer => 'ORDER__BY__1',
+ exselect_inner => 'foo AS ORDER__BY__1',
},
{
- order_by => 'foo',
- order_req => 'foo',
- order_inner => 'foo ASC',
- order_outer => 'foo DESC',
+ order_by => { -desc => 'foo' },
+ order_inner => 'foo DESC',
+ order_outer => 'ORDER__BY__1 ASC',
+ order_req => 'ORDER__BY__1 DESC',
+ exselect_outer => 'ORDER__BY__1',
+ exselect_inner => 'foo AS ORDER__BY__1',
},
{
- order_by => [ qw{ foo bar} ],
- order_req => 'foo, bar',
- order_inner => 'foo ASC, bar ASC',
- order_outer => 'foo DESC, bar DESC',
+ order_by => 'foo',
+ order_inner => 'foo',
+ order_outer => 'ORDER__BY__1 DESC',
+ order_req => 'ORDER__BY__1',
+ exselect_outer => 'ORDER__BY__1',
+ exselect_inner => 'foo AS ORDER__BY__1',
},
{
- order_by => { -desc => 'foo' },
- order_req => 'foo DESC',
- order_inner => 'foo DESC',
- order_outer => 'foo ASC',
+ order_by => [ qw{ foo me.owner} ],
+ order_inner => 'foo, me.owner',
+ order_outer => 'ORDER__BY__1 DESC, me.owner DESC',
+ order_req => 'ORDER__BY__1, me.owner',
+ exselect_outer => 'ORDER__BY__1',
+ exselect_inner => 'foo AS ORDER__BY__1',
},
{
order_by => ['foo', { -desc => 'bar' } ],
- order_req => 'foo, bar DESC',
- order_inner => 'foo ASC, bar DESC',
- order_outer => 'foo DESC, bar ASC',
+ order_inner => 'foo, bar DESC',
+ order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 ASC',
+ order_req => 'ORDER__BY__1, ORDER__BY__2 DESC',
+ exselect_outer => 'ORDER__BY__1, ORDER__BY__2',
+ exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2',
},
{
order_by => { -asc => [qw{ foo bar }] },
- order_req => 'foo ASC, bar ASC',
order_inner => 'foo ASC, bar ASC',
- order_outer => 'foo DESC, bar DESC',
+ order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 DESC',
+ order_req => 'ORDER__BY__1 ASC, ORDER__BY__2 ASC',
+ exselect_outer => 'ORDER__BY__1, ORDER__BY__2',
+ exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2',
},
{
order_by => [
- { -asc => 'foo' },
+ 'foo',
{ -desc => [qw{bar}] },
- { -asc => [qw{hello sensors}]},
+ { -asc => [qw{me.owner sensors}]},
],
- order_req => 'foo ASC, bar DESC, hello ASC, sensors ASC',
- order_inner => 'foo ASC, bar DESC, hello ASC, sensors ASC',
- order_outer => 'foo DESC, bar ASC, hello DESC, sensors DESC',
+ order_inner => 'foo, bar DESC, me.owner ASC, sensors ASC',
+ order_outer => 'ORDER__BY__1 DESC, ORDER__BY__2 ASC, me.owner DESC, ORDER__BY__3 DESC',
+ order_req => 'ORDER__BY__1, ORDER__BY__2 DESC, me.owner ASC, ORDER__BY__3 ASC',
+ exselect_outer => 'ORDER__BY__1, ORDER__BY__2, ORDER__BY__3',
+ exselect_inner => 'foo AS ORDER__BY__1, bar AS ORDER__BY__2, sensors AS ORDER__BY__3',
},
-);
+) {
+ my $o_sel = $ord_set->{exselect_outer}
+ ? ', ' . $ord_set->{exselect_outer}
+ : ''
+ ;
+ my $i_sel = $ord_set->{exselect_inner}
+ ? ', ' . $ord_set->{exselect_inner}
+ : ''
+ ;
-my @default_tests = ( undef, '', {}, [] );
+ is_same_sql_bind(
+ $books_45_and_owners->search ({}, {order_by => $ord_set->{order_by}})->as_query,
+ "(SELECT TOP 2
+ id, source, owner, title, price, owner__id, owner__name
+ FROM (
+ SELECT TOP 2
+ id, source, owner, title, price, owner__id, owner__name$o_sel
+ FROM (
+ SELECT TOP 5
+ me.id, me.source, me.owner, me.title, me.price, owner.id AS owner__id, owner.name AS owner__name$i_sel
+ FROM books me
+ JOIN owners owner ON owner.id = me.owner
+ WHERE ( source = ? )
+ ORDER BY $ord_set->{order_inner}
+ ) me
+ ORDER BY $ord_set->{order_outer}
+ ) me
+ ORDER BY $ord_set->{order_req}
+ )",
+ [ [ source => 'Library' ] ],
+ );
+}
-plan (tests => scalar @tests + scalar @default_tests + 1);
-
-test_order ($_) for @tests;
-default_test_order ($_) for @default_tests;
-
-
+# with groupby
is_same_sql_bind (
- $rs->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
-'(SELECT
-me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name FROM
- ( SELECT
- id, source, owner, title, price FROM
- ( SELECT
- TOP 1 id, source, owner, title, price FROM
- ( SELECT
- TOP 4 me.id, me.source, me.owner, me.title, me.price FROM
- books me JOIN
- owners owner ON owner.id = me.owner
- WHERE ( source = ? )
- GROUP BY title
- ORDER BY title ASC
- ) me
- ORDER BY title DESC
+ $books_45_and_owners->search ({}, { group_by => 'title', order_by => 'title' })->as_query,
+ '(SELECT me.id, me.source, me.owner, me.title, me.price, owner.id, owner.name
+ FROM (
+ SELECT TOP 2 id, source, owner, title, price
+ FROM (
+ SELECT TOP 2
+ id, source, owner, title, price, ORDER__BY__1
+ FROM (
+ SELECT TOP 5
+ me.id, me.source, me.owner, me.title, me.price, title AS ORDER__BY__1
+ FROM books me
+ JOIN owners owner ON owner.id = me.owner
+ WHERE ( source = ? )
+ GROUP BY title
+ ORDER BY title
+ ) me
+ ORDER BY ORDER__BY__1 DESC
+ ) me
+ ORDER BY ORDER__BY__1
) me
- ORDER BY title
- ) me JOIN
- owners owner ON owner.id = me.owner WHERE
- ( source = ? )
- ORDER BY title)' ,
+ JOIN owners owner ON owner.id = me.owner
+ WHERE ( source = ? )
+ ORDER BY title
+ )',
[ [ source => 'Library' ], [ source => 'Library' ] ],
);
+
+# test deprecated column mixing over join boundaries
+my $rs_selectas_top = $schema->resultset ('BooksInLibrary')->search ({}, {
+ '+select' => ['owner.name'],
+ '+as' => ['owner_name'],
+ join => 'owner',
+ rows => 1
+});
+
+is_same_sql_bind( $rs_selectas_top->search({})->as_query,
+ '(SELECT
+ TOP 1 me.id, me.source, me.owner, me.title, me.price,
+ owner.name AS owner_name
+ FROM books me
+ JOIN owners owner ON owner.id = me.owner
+ WHERE ( source = ? )
+ ORDER BY me.id
+ )',
+ [ [ 'source', 'Library' ] ],
+ );
+
+done_testing;
Copied: DBIx-Class/0.08/branches/filter_column/t/sqlahacks/oraclejoin.t (from rev 9134, DBIx-Class/0.08/branches/filter_column/t/41orrible.t)
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/sqlahacks/oraclejoin.t (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/sqlahacks/oraclejoin.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,71 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBIx::Class::SQLAHacks::OracleJoins;
+use DBICTest;
+use DBIC::SqlMakerTest;
+
+my $sa = new DBIx::Class::SQLAHacks::OracleJoins;
+
+# search with undefined or empty $cond
+
+# my ($self, $table, $fields, $where, $order, @rest) = @_;
+my ($sql, @bind) = $sa->select(
+ [
+ { me => "cd" },
+ [
+ { "-join_type" => "LEFT", artist => "artist" },
+ { "artist.artistid" => "me.artist" },
+ ],
+ ],
+ [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+ undef,
+ undef
+);
+is_same_sql_bind(
+ $sql, \@bind,
+ 'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( artist.artistid(+) = me.artist )', [],
+ 'WhereJoins search with empty where clause'
+);
+
+($sql, @bind) = $sa->select(
+ [
+ { me => "cd" },
+ [
+ { "-join_type" => "", artist => "artist" },
+ { "artist.artistid" => "me.artist" },
+ ],
+ ],
+ [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+ { 'artist.artistid' => 3 },
+ undef
+);
+is_same_sql_bind(
+ $sql, \@bind,
+ 'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid = me.artist ) AND ( artist.artistid = ? ) ) )', [3],
+ 'WhereJoins search with where clause'
+);
+
+($sql, @bind) = $sa->select(
+ [
+ { me => "cd" },
+ [
+ { "-join_type" => "LEFT", artist => "artist" },
+ { "artist.artistid" => "me.artist" },
+ ],
+ ],
+ [ 'cd.cdid', 'cd.artist', 'cd.title', 'cd.year', 'artist.artistid', 'artist.name' ],
+ [{ 'artist.artistid' => 3 }, { 'me.cdid' => 5 }],
+ undef
+);
+is_same_sql_bind(
+ $sql, \@bind,
+ 'SELECT cd.cdid, cd.artist, cd.title, cd.year, artist.artistid, artist.name FROM cd me, artist artist WHERE ( ( ( artist.artistid(+) = me.artist ) AND ( ( ( artist.artistid = ? ) OR ( me.cdid = ? ) ) ) ) )', [3, 5],
+ 'WhereJoins search with or in where clause'
+);
+
+done_testing;
+
Modified: DBIx-Class/0.08/branches/filter_column/t/sqlahacks/sql_maker/sql_maker_quote.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/sqlahacks/sql_maker/sql_maker_quote.t 2010-05-05 09:50:25 UTC (rev 9306)
+++ DBIx-Class/0.08/branches/filter_column/t/sqlahacks/sql_maker/sql_maker_quote.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -48,7 +48,7 @@
'artist.name' => 'Caterwauler McCrae',
'me.year' => 2001
},
- [],
+ {},
undef,
undef
);
@@ -80,7 +80,7 @@
'me.year'
],
undef,
- 'year DESC',
+ { order_by => 'year DESC' },
undef,
undef
);
@@ -105,10 +105,10 @@
'me.year'
],
undef,
- [
+ { order_by => [
'year DESC',
'title ASC'
- ],
+ ]},
undef,
undef
);
@@ -133,7 +133,7 @@
'me.year'
],
undef,
- { -desc => 'year' },
+ { order_by => { -desc => 'year' } },
undef,
undef
);
@@ -158,10 +158,10 @@
'me.year'
],
undef,
- [
+ { order_by => [
{ -desc => 'year' },
- { -asc => 'title' }
- ],
+ { -asc => 'title' },
+ ]},
undef,
undef
);
@@ -188,7 +188,7 @@
'me.year'
],
undef,
- \'year DESC',
+ { order_by => \'year DESC' },
undef,
undef
);
@@ -213,10 +213,10 @@
'me.year'
],
undef,
- [
+ { order_by => [
\'year DESC',
\'title ASC'
- ],
+ ]},
undef,
undef
);
@@ -283,9 +283,9 @@
'me.*'
],
undef,
- [],
undef,
- undef
+ undef,
+ undef,
);
is_same_sql_bind(
@@ -328,9 +328,9 @@
'artist.name' => 'Caterwauler McCrae',
'me.year' => 2001
},
- [],
undef,
- undef
+ undef,
+ undef,
);
is_same_sql_bind(
Added: DBIx-Class/0.08/branches/filter_column/t/storage/dbi_env.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/storage/dbi_env.t (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/storage/dbi_env.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,90 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use lib qw(t/lib);
+use DBICTest;
+use Test::More;
+use Test::Exception;
+
+BEGIN { delete @ENV{qw(DBI_DSN DBI_DRIVER)} }
+
+my $schema;
+
+DBICTest->init_schema(sqlite_use_file => 1);
+
+my $dbname = DBICTest->_sqlite_dbname(sqlite_use_file => 1);
+
+sub count_sheep {
+ my $schema = shift;
+ scalar $schema->resultset('Artist')->search( { name => "Exploding Sheep" } )
+ ->all;
+}
+
+$schema = DBICTest::Schema->connect("dbi::$dbname");
+throws_ok { count_sheep($schema) } qr{I can't work out what driver to use},
+ 'Driver in DSN empty';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$schema = DBICTest::Schema->connect("dbi:Test_NonExistant_DBD:$dbname");
+throws_ok { count_sheep($schema) }
+ qr{Can't locate DBD/Test_NonExistant_DBD\.pm in \@INC},
+ "Driver class doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi::$dbname";
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) } qr{I can't work out what driver to use},
+ "Driver class not defined in DBI_DSN either.";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi:Test_NonExistant_DBD2:$dbname";
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) }
+ qr{Can't locate DBD/Test_NonExistant_DBD2\.pm in \@INC},
+ "Driver class defined in DBI_DSN doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi::$dbname";
+$ENV{DBI_DRIVER} = 'Test_NonExistant_DBD3';
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) }
+ qr{Can't locate DBD/Test_NonExistant_DBD3\.pm in \@INC},
+ "Driver class defined in DBI_DRIVER doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+$ENV{DBI_DSN} = "dbi:Test_NonExistant_DBD4:$dbname";
+$schema = DBICTest::Schema->connect;
+throws_ok { count_sheep($schema) }
+qr{Can't locate DBD/Test_NonExistant_DBD4\.pm in \@INC},
+ "Driver class defined in DBI_DSN doesn't exist";
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI';
+
+delete @ENV{qw(DBI_DSN DBI_DRIVER)};
+
+$schema = DBICTest::Schema->connect("dbi:SQLite:$dbname");
+lives_ok { count_sheep($schema) } 'SQLite passed to connect_info';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+$ENV{DBI_DRIVER} = 'SQLite';
+$schema = DBICTest::Schema->connect("dbi::$dbname");
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DRIVER';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+delete $ENV{DBI_DRIVER};
+$ENV{DBI_DSN} = "dbi:SQLite:$dbname";
+$schema = DBICTest::Schema->connect;
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DSN';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+$ENV{DBI_DRIVER} = 'SQLite';
+$schema = DBICTest::Schema->connect;
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DSN (and DBI_DRIVER)';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+$ENV{DBI_DSN} = "dbi::$dbname";
+$ENV{DBI_DRIVER} = 'SQLite';
+$schema = DBICTest::Schema->connect;
+lives_ok { count_sheep($schema) } 'SQLite in DBI_DRIVER (not DBI_DSN)';
+isa_ok $schema->storage, 'DBIx::Class::Storage::DBI::SQLite';
+
+done_testing;
Added: DBIx-Class/0.08/branches/filter_column/t/storage/deploy.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/storage/deploy.t (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/storage/deploy.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,32 @@
+use strict;
+use warnings;
+
+use Test::More;
+
+use lib qw(t/lib);
+use DBICTest;
+
+use File::Spec;
+use File::Path qw/ mkpath rmtree /;
+
+
+my $schema = DBICTest->init_schema();
+
+my $var = File::Spec->catfile(qw| t var create_ddl_dir |);
+-d $var
+ or mkpath($var)
+ or die "can't create $var";
+
+my $test_dir_1 = File::Spec->catdir( $var, 'test1', 'foo', 'bar' );
+rmtree( $test_dir_1 ) if -d $test_dir_1;
+$schema->create_ddl_dir( undef, undef, $test_dir_1 );
+
+ok( -d $test_dir_1, 'create_ddl_dir did a mkpath on its target dir' );
+ok( scalar( glob $test_dir_1.'/*.sql' ), 'there are sql files in there' );
+
+TODO: {
+ local $TODO = 'we should probably add some tests here for actual deployability of the DDL?';
+ ok( 0 );
+}
+
+done_testing;
Added: DBIx-Class/0.08/branches/filter_column/t/storage/global_destruction.t
===================================================================
--- DBIx-Class/0.08/branches/filter_column/t/storage/global_destruction.t (rev 0)
+++ DBIx-Class/0.08/branches/filter_column/t/storage/global_destruction.t 2010-05-05 09:52:30 UTC (rev 9307)
@@ -0,0 +1,57 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+
+use lib qw(t/lib);
+use DBICTest;
+
+for my $type (qw/PG MYSQL/) {
+
+ SKIP: {
+ skip "Skipping $type tests without DBICTEST_${type}_DSN", 1
+ unless $ENV{"DBICTEST_${type}_DSN"};
+
+ my $schema = DBICTest::Schema->connect (@ENV{map { "DBICTEST_${type}_${_}" } qw/DSN USER PASS/});
+
+ # emulate a singleton-factory, just cache the object *somewhere in a different package*
+ # to induce out-of-order destruction
+ $DBICTest::FakeSchemaFactory::schema = $schema;
+
+ # so we can see the retry exceptions (if any)
+ $ENV{DBIC_DBIRETRY_DEBUG} = 1;
+
+ ok (!$schema->storage->connected, "$type: start disconnected");
+
+ lives_ok (sub {
+ $schema->txn_do (sub {
+
+ ok ($schema->storage->connected, "$type: transaction starts connected");
+
+ my $pid = fork();
+ SKIP: {
+ skip "Fork failed: $!", 1 if (! defined $pid);
+
+ if ($pid) {
+ note "Parent $$ sleeping...";
+ wait();
+ note "Parent $$ woken up after child $pid exit";
+ }
+ else {
+ note "Child $$ terminating";
+ exit 0;
+ }
+
+ ok ($schema->storage->connected, "$type: parent still connected (in txn_do)");
+ }
+ });
+ });
+
+ ok ($schema->storage->connected, "$type: parent still connected (outside of txn_do)");
+
+ undef $DBICTest::FakeSchemaFactory::schema;
+ }
+}
+
+done_testing;
More information about the Bast-commits
mailing list