[Bast-commits] r8568 - in DBIx-Class/0.08/branches/prefetch: .
lib/DBIx lib/DBIx/Class lib/DBIx/Class/InflateColumn
lib/DBIx/Class/Relationship lib/DBIx/Class/Schema
lib/DBIx/Class/Storage lib/DBIx/Class/Storage/DBI
lib/DBIx/Class/Storage/DBI/ODBC lib/DBIx/Class/Storage/DBI/Oracle
lib/DBIx/Class/Storage/DBI/Sybase maint t t/bind t/cdbi
t/delete t/inflate t/lib t/lib/DBICTest/Schema t/prefetch
t/search t/sqlahacks/limit_dialects t/storage
ribasushi at dev.catalyst.perl.org
ribasushi at dev.catalyst.perl.org
Sat Feb 6 01:55:26 GMT 2010
Author: ribasushi
Date: 2010-02-06 01:55:26 +0000 (Sat, 06 Feb 2010)
New Revision: 8568
Added:
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Informix.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/ODBC/SQL_Anywhere.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm
DBIx-Class/0.08/branches/prefetch/t/748informix.t
DBIx-Class/0.08/branches/prefetch/t/749sybase_asa.t
DBIx-Class/0.08/branches/prefetch/t/delete/complex.t
DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase_asa.t
DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/ComputedColumn.pm
Modified:
DBIx-Class/0.08/branches/prefetch/
DBIx-Class/0.08/branches/prefetch/Changes
DBIx-Class/0.08/branches/prefetch/Makefile.PL
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Componentised.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Relationship/Base.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSet.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSetColumn.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/SQLAHacks.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Schema/Versioned.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Replicated.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBIHacks.pm
DBIx-Class/0.08/branches/prefetch/maint/gen-schema.pl
DBIx-Class/0.08/branches/prefetch/t/73oracle.t
DBIx-Class/0.08/branches/prefetch/t/745db2.t
DBIx-Class/0.08/branches/prefetch/t/746mssql.t
DBIx-Class/0.08/branches/prefetch/t/746sybase.t
DBIx-Class/0.08/branches/prefetch/t/85utf8.t
DBIx-Class/0.08/branches/prefetch/t/88result_set_column.t
DBIx-Class/0.08/branches/prefetch/t/93autocast.t
DBIx-Class/0.08/branches/prefetch/t/98savepoints.t
DBIx-Class/0.08/branches/prefetch/t/bind/attribute.t
DBIx-Class/0.08/branches/prefetch/t/cdbi/22-deflate_order.t
DBIx-Class/0.08/branches/prefetch/t/from_subquery.t
DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase.t
DBIx-Class/0.08/branches/prefetch/t/lib/sqlite.sql
DBIx-Class/0.08/branches/prefetch/t/prefetch/grouped.t
DBIx-Class/0.08/branches/prefetch/t/search/subquery.t
DBIx-Class/0.08/branches/prefetch/t/sqlahacks/limit_dialects/toplimit.t
DBIx-Class/0.08/branches/prefetch/t/storage/debug.t
Log:
r8466 at Thesaurus (orig r8453): ribasushi | 2010-01-27 12:33:33 +0100
DSNs can not be empty
r8471 at Thesaurus (orig r8458): frew | 2010-01-27 21:38:42 +0100
fix silly multipk bug
r8472 at Thesaurus (orig r8459): ribasushi | 2010-01-28 11:13:16 +0100
Consolidate insert_bulk guards (and make them show up correctly in the trace)
r8473 at Thesaurus (orig r8460): ribasushi | 2010-01-28 11:28:30 +0100
Fix bogus test DDL
r8480 at Thesaurus (orig r8467): ribasushi | 2010-01-28 22:11:59 +0100
r8381 at Thesaurus (orig r8368): moses | 2010-01-18 16:41:38 +0100
Test commit
r8425 at Thesaurus (orig r8412): ribasushi | 2010-01-22 11:25:01 +0100
Informix test + cleanups
r8428 at Thesaurus (orig r8415): ribasushi | 2010-01-22 11:59:25 +0100
Initial informix support
r8482 at Thesaurus (orig r8469): ribasushi | 2010-01-28 22:19:23 +0100
Informix changes
r8483 at Thesaurus (orig r8470): ribasushi | 2010-01-29 12:01:41 +0100
Require non-warning-spewing MooseX::Types
r8484 at Thesaurus (orig r8471): ribasushi | 2010-01-29 12:15:15 +0100
Enhance warning test a bit (seems to fail on 5.8)
r8485 at Thesaurus (orig r8472): ribasushi | 2010-01-29 13:00:54 +0100
Fugly 5.8 workaround
r8494 at Thesaurus (orig r8481): frew | 2010-01-31 06:47:42 +0100
cleanup (3 arg open, 1 grep instead of 3)
r8496 at Thesaurus (orig r8483): ribasushi | 2010-01-31 10:04:43 +0100
better skip message
r8510 at Thesaurus (orig r8497): caelum | 2010-02-01 12:07:13 +0100
throw exception on attempt to insert a blob with DBD::Oracle == 1.23
r8511 at Thesaurus (orig r8498): caelum | 2010-02-01 12:12:48 +0100
add RT link for Oracle blob bug in DBD::Oracle == 1.23
r8527 at Thesaurus (orig r8514): caelum | 2010-02-02 23:20:17 +0100
r22968 at hlagh (orig r8502): caelum | 2010-02-02 05:30:47 -0500
branch to support Sybase SQL Anywhere
r22971 at hlagh (orig r8505): caelum | 2010-02-02 07:21:13 -0500
ASA last_insert_id and limit support, still needs BLOB support
r22972 at hlagh (orig r8506): caelum | 2010-02-02 08:33:57 -0500
deref table name if needed, check all columns for identity column not just PK
r22973 at hlagh (orig r8507): caelum | 2010-02-02 08:48:11 -0500
test blobs, they work, didn't have to do anything
r22974 at hlagh (orig r8508): caelum | 2010-02-02 09:15:44 -0500
fix stupid identity bug, test empty insert (works), test DTs (not working yet)
r22976 at hlagh (orig r8510): caelum | 2010-02-02 14:31:00 -0500
rename ::Sybase::ASA to ::SQLAnywhere, per mst
r22978 at hlagh (orig r8512): caelum | 2010-02-02 17:02:29 -0500
DT inflation now works
r22979 at hlagh (orig r8513): caelum | 2010-02-02 17:18:06 -0500
minor POD update
r8528 at Thesaurus (orig r8515): caelum | 2010-02-02 23:23:26 +0100
r22895 at hlagh (orig r8473): caelum | 2010-01-30 03:57:26 -0500
branch to fix computed columns in Sybase ASE
r22911 at hlagh (orig r8489): caelum | 2010-01-31 07:18:33 -0500
empty insert into a Sybase table with computed columns and either data_type => undef or default_value => SCALARREF works now
r22912 at hlagh (orig r8490): caelum | 2010-01-31 07:39:32 -0500
add POD about computed columns and timestamps for Sybase
r22918 at hlagh (orig r8496): caelum | 2010-02-01 05:09:07 -0500
update POD about Schema::Loader for Sybase
r8531 at Thesaurus (orig r8518): ribasushi | 2010-02-02 23:57:27 +0100
r8512 at Thesaurus (orig r8499): boghead | 2010-02-01 23:38:13 +0100
- Creating a branch for adding _post_inflate_datetime and _pre_deflate_datetime to
InflateColumn::DateTime
r8513 at Thesaurus (orig r8500): boghead | 2010-02-01 23:42:14 +0100
- Add _post_inflate_datetime and _pre_deflate_datetime to InflateColumn::DateTime to allow
for modifying DateTime objects after inflation or before deflation.
r8524 at Thesaurus (orig r8511): boghead | 2010-02-02 22:59:28 +0100
- Simplify by allowing moving column_info depreciated {extra}{timezone} data to
{timezone} (and the same with locale)
r8533 at Thesaurus (orig r8520): caelum | 2010-02-03 05:19:59 +0100
support for Sybase SQL Anywhere through ODBC
r8536 at Thesaurus (orig r8523): ribasushi | 2010-02-03 08:27:54 +0100
Changes
r8537 at Thesaurus (orig r8524): ribasushi | 2010-02-03 08:31:20 +0100
Quote fail
r8538 at Thesaurus (orig r8525): caelum | 2010-02-03 13:21:37 +0100
test DT inflation for Sybase SQL Anywhere over ODBC too
r8539 at Thesaurus (orig r8526): caelum | 2010-02-03 17:36:39 +0100
minor code cleanup for SQL Anywhere last_insert_id
r8540 at Thesaurus (orig r8527): ribasushi | 2010-02-04 11:28:33 +0100
Fix bug reported by tommyt
r8548 at Thesaurus (orig r8535): ribasushi | 2010-02-04 14:34:45 +0100
Prepare for new SQLA release
r8560 at Thesaurus (orig r8547): ribasushi | 2010-02-05 08:59:04 +0100
Refactor some evil code
r8565 at Thesaurus (orig r8552): ribasushi | 2010-02-05 17:00:12 +0100
Looks like RSC is finally (halfway) fixed
r8566 at Thesaurus (orig r8553): ribasushi | 2010-02-05 17:07:13 +0100
RSC subquery can not include the prefetch
r8567 at Thesaurus (orig r8554): ribasushi | 2010-02-05 17:10:29 +0100
Fix typo and borked test
r8569 at Thesaurus (orig r8556): ribasushi | 2010-02-05 17:33:12 +0100
Release 0.08116
r8571 at Thesaurus (orig r8558): ribasushi | 2010-02-05 18:01:33 +0100
No idea how I missed all these fails...
r8572 at Thesaurus (orig r8559): ribasushi | 2010-02-05 18:13:34 +0100
Release 0.08117
r8574 at Thesaurus (orig r8561): ribasushi | 2010-02-05 18:51:12 +0100
Try to distinguish trunk from official versions
r8580 at Thesaurus (orig r8567): gshank | 2010-02-05 22:29:24 +0100
add doc on 'where' attribute
Property changes on: DBIx-Class/0.08/branches/prefetch
___________________________________________________________________
Name: svk:merge
- 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:8450
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
+ 168d5346-440b-0410-b799-f706be625ff1:/DBIx-Class-current:2207
462d4d0c-b505-0410-bf8e-ce8f877b3390:/local/bast/DBIx-Class:3159
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/cookbook_fixes:7657
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7959
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/resultsetcolumn_custom_columns:5160
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/sqla_1.50_compat:5414
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/branches/void_populate_resultset_cond:7935
4d5fae46-8e6a-4e08-abee-817e9fb894a2:/local/bast/DBIx-Class/0.08/trunk:7982
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class:32260
9c88509d-e914-0410-b01c-b9530614cbfe:/local/DBIx-Class-CDBICompat:54993
9c88509d-e914-0410-b01c-b9530614cbfe:/vendor/DBIx-Class:31122
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_column_attr:10946
ab17426e-7cd3-4704-a2a2-80b7c0a611bb:/local/dbic_trunk:11788
bd5ac9a7-f185-4d95-9186-dbb8b392a572:/local/os/bast/DBIx-Class/0.08/trunk:2798
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/_abandoned_but_possibly_useful/table_name_ref:7266
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ado_mssql:7886
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/autocast:7418
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/belongs_to_null_col_fix:5244
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cdbicompat_integration:4160
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/column_attr:5074
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/complex_join_rels:4589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connect_info_hash:7435
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/connected_schema_leak:8264
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/cookbook_fixes:7479
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_distinct:6218
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/count_rs:6741
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/create_scalarref_rt51559:8027
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/diamond_relationships:6310
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/discard_changes_replication_fix:7252
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/file_column:3920
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/fix-update-and-delete-as_query:6162
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/get_inflated_columns_rt46953:7964
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_has_many_join:7382
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/grouped_prefetch:6885
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/ic_dt_post_inflate:8517
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/informix:8434
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/is_resultset_paginated:7769
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/joined_count:6323
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mc_fixes:6645
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_limit_regression:8278
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_money_type:7096
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_rno_pagination:8054
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_storage_minor_refactor:7210
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mssql_top_fixes:6971
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multi_stuff:5565
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multicreate_fixes:7275
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/multiple_version_upgrade:8429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mysql_ansi:7175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/mystery_join:6589
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/new_replication_transaction_fixup:7058
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/no_duplicate_indexes_for_pk_cols:8373
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/normalize_connect_info:8274
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/null_column_regression:8314
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_connect_call:6854
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/on_disconnect_do:3694
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle-tweaks:6222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_sequence:4173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/oracle_shorten_aliases:8234
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/order_by_refactor:6475
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/parser_fk_index:4485
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/pg_unqualified_schema:7842
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch-group_by:7917
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_bug-unqualified_column_in_search_related_cond:7900
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_limit:6724
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_pager:8431
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/prefetch_redux:7206
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/reduce_pings:7261
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/replication_dedux:4600
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rsrc_in_storage:6577
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/rt_bug_41083:5437
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/savepoints:4223
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/search_related_prefetch:6818
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqla_1.50_compat:5321
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sqlt_parser_view:8145
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-ms-access:4142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/storage-tweaks:6262
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subclassed_rsset:5930
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/subquery:5617
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/syb_connected:6919
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase:7682
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_asa:8513
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulk_insert:7679
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_bulkinsert_support:7796
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_computed_columns:8496
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_mssql:6125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_refactor:7940
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/sybase_support:7797
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/table_name_ref:7132
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/top_limit_altfix:6429
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/type_aware_update:6619
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/unresolvable_prefetch:6949
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioned_enhancements:4125
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/versioning:4578
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/view_rels:7908
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/views:5585
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/branches/void_populate_resultset_cond:7944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/0.08108_prerelease_please_do_not_pull_into_it:7008
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/tags/pre_0.08109_please_do_not_merge:7336
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/DBIx-Class/0.08/trunk:8567
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-C3:318
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-current:2222
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-joins:173
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class-resultset:570
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/datetime:1716
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_compat:1855
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/find_unique_query_fixes:2142
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/inflate:1988
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/many_to_many:2025
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/re_refactor_bugfix:1944
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/reorganize_tests:1827
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset-new-refactor:1766
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_2_electric_boogaloo:2175
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/resultset_cleanup:2102
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/branches/DBIx-Class/sqlt_tests_refactor:2043
bd8105ee-0ff8-0310-8827-fb3f25b6796d:/trunk/DBIx-Class:3606
fe160bb6-dc1c-0410-9f2b-d64a711b54a5:/local/DBIC-trunk-0.08:10510
Modified: DBIx-Class/0.08/branches/prefetch/Changes
===================================================================
--- DBIx-Class/0.08/branches/prefetch/Changes 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/Changes 2010-02-06 01:55:26 UTC (rev 8568)
@@ -1,10 +1,13 @@
Revision history for DBIx::Class
+0.08117 2010-02-05 17:10:00 (UTC)
- Perl 5.8.1 is now the minimum supported version
- Massive optimization of the join resolution code - now joins
will be removed from the resulting SQL if DBIC can prove they
are not referenced by anything
- Subqueries no longer marked experimental
+ - Support for Informix RDBMS (limit/offset and auto-inc columns)
+ - Support for Sybase SQLAnywhere, both native and via ODBC
- might_have/has_one now warn if applied calling class's column
has is_nullable set to true.
- Fixed regression in deploy() with a {sources} table limit applied
@@ -13,8 +16,12 @@
parsed by SQL::Translator::Parser::DBIx::Class
- Stop the SQLT parser from auto-adding indexes identical to the
Primary Key
+ - InflateColumn::DateTime refactoring to allow fine grained method
+ overloads
- Fix ResultSetColumn improperly selecting more than the requested
column when +columns/+select is present
+ - Fix failure when update/delete of resultsets with complex WHERE
+ SQLA structures
- Fix regression in context sensitiveness of deployment_statements
- Fix regression resulting in overcomplicated query on
search_related from prefetching resultsets
Modified: DBIx-Class/0.08/branches/prefetch/Makefile.PL
===================================================================
--- DBIx-Class/0.08/branches/prefetch/Makefile.PL 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/Makefile.PL 2010-02-06 01:55:26 UTC (rev 8568)
@@ -39,14 +39,14 @@
requires 'Module::Find' => '0.06';
requires 'Path::Class' => '0.16';
requires 'Scope::Guard' => '0.03';
-requires 'SQL::Abstract' => '1.60';
+requires 'SQL::Abstract' => '1.61';
requires 'SQL::Abstract::Limit' => '0.13';
requires 'Sub::Name' => '0.04';
requires 'Data::Dumper::Concise' => '1.000';
my %replication_requires = (
'Moose', => '0.90',
- 'MooseX::Types', => '0.16',
+ 'MooseX::Types', => '0.21',
'namespace::clean' => '0.11',
'Hash::Merge', => '0.11',
);
@@ -123,6 +123,11 @@
'DateTime::Format::Sybase' => 0,
) : ()
,
+ grep $_, @ENV{qw/DBICTEST_SYBASE_ASA_DSN DBICTEST_SYBASE_ASA_ODBC_DSN/}
+ ? (
+ 'DateTime::Format::Strptime' => 0,
+ ) : ()
+ ,
);
#************************************************************************#
# Make ABSOLUTELY SURE that nothing on the list above is a real require, #
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Componentised.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Componentised.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Componentised.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -17,18 +17,24 @@
no strict 'refs';
for my $comp (reverse @_) {
- if (
- $comp->isa ('DBIx::Class::UTF8Columns')
- and
- my @broken = grep { $_ ne 'DBIx::Class::Row' and defined ${"${_}::"}{store_column} } (@present_components)
- ) {
+
+ if ($comp->isa ('DBIx::Class::UTF8Columns') ) {
+ require B;
+ my @broken;
+
+ for (@present_components) {
+ my $cref = $_->can ('store_column')
+ or next;
+ push @broken, $_ if B::svref_2object($cref)->STASH->NAME ne 'DBIx::Class::Row';
+ }
+
carp "Incorrect loading order of $comp by ${target} will affect other components overriding store_column ("
. join (', ', @broken)
- .'). Refer to the documentation of DBIx::Class::UTF8Columns for more info';
+ .'). Refer to the documentation of DBIx::Class::UTF8Columns for more info'
+ if @broken;
}
- else {
- unshift @present_components, $comp;
- }
+
+ unshift @present_components, $comp;
}
$class->next::method($target, @_);
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Core.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -2,7 +2,6 @@
use strict;
use warnings;
-no warnings 'qw';
use base qw/DBIx::Class/;
@@ -12,7 +11,8 @@
PK::Auto
PK
Row
- ResultSourceProxy::Table/);
+ ResultSourceProxy::Table
+/);
1;
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/InflateColumn/DateTime.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -136,23 +136,18 @@
}
}
- my $timezone;
if ( defined $info->{extra}{timezone} ) {
carp "Putting timezone into extra => { timezone => '...' } has been deprecated, ".
"please put it directly into the '$column' column definition.";
- $timezone = $info->{extra}{timezone};
+ $info->{timezone} = $info->{extra}{timezone} unless defined $info->{timezone};
}
- my $locale;
if ( defined $info->{extra}{locale} ) {
carp "Putting locale into extra => { locale => '...' } has been deprecated, ".
"please put it directly into the '$column' column definition.";
- $locale = $info->{extra}{locale};
+ $info->{locale} = $info->{extra}{locale} unless defined $info->{locale};
}
- $locale = $info->{locale} if defined $info->{locale};
- $timezone = $info->{timezone} if defined $info->{timezone};
-
my $undef_if_invalid = $info->{datetime_undef_if_invalid};
if ($type eq 'datetime' || $type eq 'date' || $type eq 'timestamp') {
@@ -178,21 +173,12 @@
$self->throw_exception ("Error while inflating ${value} for ${column} on ${self}: $err");
}
- $dt->set_time_zone($timezone) if $timezone;
- $dt->set_locale($locale) if $locale;
- return $dt;
+ return $obj->_post_inflate_datetime( $dt, \%info );
},
deflate => sub {
my ($value, $obj) = @_;
- if ($timezone) {
- carp "You're using a floating timezone, please see the documentation of"
- . " DBIx::Class::InflateColumn::DateTime for an explanation"
- if ref( $value->time_zone ) eq 'DateTime::TimeZone::Floating'
- and not $info{floating_tz_ok}
- and not $ENV{DBIC_FLOATING_TZ_OK};
- $value->set_time_zone($timezone);
- $value->set_locale($locale) if $locale;
- }
+
+ $value = $obj->_pre_deflate_datetime( $value, \%info );
$obj->_deflate_from_datetime( $value, \%info );
},
}
@@ -224,6 +210,33 @@
shift->result_source->storage->datetime_parser (@_);
}
+sub _post_inflate_datetime {
+ my( $self, $dt, $info ) = @_;
+
+ $dt->set_time_zone($info->{timezone}) if defined $info->{timezone};
+ $dt->set_locale($info->{locale}) if defined $info->{locale};
+
+ return $dt;
+}
+
+sub _pre_deflate_datetime {
+ my( $self, $dt, $info ) = @_;
+
+ if (defined $info->{timezone}) {
+ carp "You're using a floating timezone, please see the documentation of"
+ . " DBIx::Class::InflateColumn::DateTime for an explanation"
+ if ref( $dt->time_zone ) eq 'DateTime::TimeZone::Floating'
+ and not $info->{floating_tz_ok}
+ and not $ENV{DBIC_FLOATING_TZ_OK};
+
+ $dt->set_time_zone($info->{timezone});
+ }
+
+ $dt->set_locale($info->{locale}) if defined $info->{locale};
+
+ return $dt;
+}
+
1;
__END__
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Relationship/Base.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Relationship/Base.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Relationship/Base.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -30,6 +30,8 @@
__PACKAGE__->add_relationship('relname', 'Foreign::Class', $cond, $attrs);
+=head3 condition
+
The condition needs to be an L<SQL::Abstract>-style representation of the
join between the tables. When resolving the condition for use in a C<JOIN>,
keys using the pseudo-table C<foreign> are resolved to mean "the Table on the
@@ -67,10 +69,19 @@
To add an C<OR>ed condition, use an arrayref of hashrefs. See the
L<SQL::Abstract> documentation for more details.
-In addition to the
-L<standard ResultSet attributes|DBIx::Class::ResultSet/ATTRIBUTES>,
-the following attributes are also valid:
+=head3 attributes
+The L<standard ResultSet attributes|DBIx::Class::ResultSet/ATTRIBUTES> may
+be used as relationship attributes. In particular, the 'where' attribute is
+useful for filtering relationships:
+
+ __PACKAGE__->has_many( 'valid_users', 'MyApp::Schema::User',
+ { 'foreign.user_id' => 'self.user_id' },
+ { where => { valid => 1 } }
+ );
+
+The following attributes are also valid:
+
=over 4
=item join_type
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSet.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSet.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSet.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -2817,14 +2817,10 @@
my %already_grouped = map { $_ => 1 } (@{$attrs->{group_by}});
my $storage = $self->result_source->schema->storage;
- my $sql_maker = $storage->sql_maker;
- local $sql_maker->{quote_char}; #disable quoting
my $rs_column_list = $storage->_resolve_column_info ($attrs->{from});
- my @chunks = $sql_maker->_order_by_chunks ($attrs->{order_by});
- for my $chunk (map { ref $_ ? @$_ : $_ } (@chunks) ) {
- $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
+ for my $chunk ($storage->_parse_order_by($attrs->{order_by})) {
if ($rs_column_list->{$chunk} && not $already_grouped{$chunk}++) {
push @{$attrs->{group_by}}, $chunk;
}
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSetColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSetColumn.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/ResultSetColumn.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -45,9 +45,45 @@
$rs->throw_exception('column must be supplied') unless $column;
my $orig_attrs = $rs->_resolved_attrs;
- my $new_parent_rs = $rs->search_rs;
+
+ # If $column can be found in the 'as' list of the parent resultset, use the
+ # corresponding element of its 'select' list (to keep any custom column
+ # definition set up with 'select' or '+select' attrs), otherwise use $column
+ # (to create a new column definition on-the-fly).
+ my $as_list = $orig_attrs->{as} || [];
+ my $select_list = $orig_attrs->{select} || [];
+ my $as_index = List::Util::first { ($as_list->[$_] || "") eq $column } 0..$#$as_list;
+ my $select = defined $as_index ? $select_list->[$as_index] : $column;
+
+ my $new_parent_rs;
+ # analyze the order_by, and see if it is done over a function/nonexistentcolumn
+ # if this is the case we will need to wrap a subquery since the result of RSC
+ # *must* be a single column select
+ my %collist = map { $_ => 1 } ($rs->result_source->columns, $column);
+ if (
+ scalar grep
+ { ! $collist{$_} }
+ ( $rs->result_source->schema->storage->_parse_order_by ($orig_attrs->{order_by} ) )
+ ) {
+ my $alias = $rs->current_source_alias;
+ # nuke the prefetch before collapsing to sql
+ my $subq_rs = $rs->search;
+ $subq_rs->{attrs}{join} = $subq_rs->_merge_attr( $subq_rs->{attrs}{join}, delete $subq_rs->{attrs}{prefetch} );
+
+ $new_parent_rs = $rs->result_source->resultset->search ( {}, {
+ alias => $alias,
+ from => [{
+ $alias => $subq_rs->as_query,
+ -alias => $alias,
+ -source_handle => $rs->result_source->handle,
+ }]
+ });
+ }
+
+ $new_parent_rs ||= $rs->search_rs;
my $new_attrs = $new_parent_rs->{attrs} ||= {};
+ # FIXME - this should go away when the chaining branch is merged
# since what we do is actually chain to the original resultset, we need to throw
# away all selectors (otherwise they'll chain)
delete $new_attrs->{$_} for (qw/columns +columns select +select as +as cols include_columns/);
@@ -56,17 +92,8 @@
# rs via the _resolved_attrs trick - we need to retain the separation between
# +select/+as and select/as. At the same time we want to preserve any joins that the
# prefetch would otherwise generate.
- $new_attrs->{join} = $rs->_merge_attr( delete $new_attrs->{join}, delete $new_attrs->{prefetch} );
+ $new_attrs->{join} = $rs->_merge_attr( $new_attrs->{join}, delete $new_attrs->{prefetch} );
- # If $column can be found in the 'as' list of the parent resultset, use the
- # corresponding element of its 'select' list (to keep any custom column
- # definition set up with 'select' or '+select' attrs), otherwise use $column
- # (to create a new column definition on-the-fly).
- my $as_list = $orig_attrs->{as} || [];
- my $select_list = $orig_attrs->{select} || [];
- my $as_index = List::Util::first { ($as_list->[$_] || "") eq $column } 0..$#$as_list;
- my $select = defined $as_index ? $select_list->[$as_index] : $column;
-
# {collapse} would mean a has_many join was injected, which in turn means
# we need to group *IF WE CAN* (only if the column in question is unique)
if (!$new_attrs->{group_by} && keys %{$orig_attrs->{collapse}}) {
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/SQLAHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/SQLAHacks.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/SQLAHacks.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -84,6 +84,24 @@
return undef;
}
+# Informix specific limit, almost like LIMIT/OFFSET
+sub _SkipFirst {
+ my ($self, $sql, $order, $rows, $offset) = @_;
+
+ $sql =~ s/^ \s* SELECT \s+ //ix
+ or croak "Unrecognizable SELECT: $sql";
+
+ return sprintf ('SELECT %s%s%s%s',
+ $offset
+ ? sprintf ('SKIP %d ', $offset)
+ : ''
+ ,
+ sprintf ('FIRST %d ', $rows),
+ $sql,
+ $self->_order_by ($order),
+ );
+}
+
# Crappy Top based Limit/Offset support. Legacy from MSSQL.
sub _Top {
my ( $self, $sql, $order, $rows, $offset ) = @_;
@@ -389,7 +407,7 @@
$self->_sqlcase($func),
$self->_recurse_fields($args),
$as
- ? sprintf (' %s %s', $self->_sqlcase('as'), $as)
+ ? sprintf (' %s %s', $self->_sqlcase('as'), $self->_quote ($as) )
: ''
);
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Schema/Versioned.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Schema/Versioned.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Schema/Versioned.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -150,13 +150,13 @@
and we can safely deploy the DDL to it. However things are not always so simple.
if you want to initialise a pre-existing database where the DDL is not the same
-as the DDL for your current schema version then you will need a diff which
+as the DDL for your current schema version then you will need a diff which
converts the database's DDL to the current DDL. The best way to do this is
to get a dump of the database schema (without data) and save that in your
SQL directory as version 0.000 (the filename must be as with
-L<DBIx::Class::Schema/ddl_filename>) then create a diff using your create DDL
+L<DBIx::Class::Schema/ddl_filename>) then create a diff using your create DDL
script given above from version 0.000 to the current version. Then hand check
-and if necessary edit the resulting diff to ensure that it will apply. Once you have
+and if necessary edit the resulting diff to ensure that it will apply. Once you have
done all that you can do this:
if (!$schema->get_db_version()) {
@@ -168,7 +168,7 @@
$schema->upgrade();
In the case of an unversioned database the above code will create the
-dbix_class_schema_versions table and write version 0.000 to it, then
+dbix_class_schema_versions table and write version 0.000 to it, then
upgrade will then apply the diff we talked about creating in the previous paragraph
and then you're good to go.
@@ -399,7 +399,7 @@
}
# strangely the first time this is called can
- # differ to subsequent times. so we call it
+ # differ to subsequent times. so we call it
# here to be sure.
# XXX - just fix it
$self->storage->sqlt_type;
@@ -435,7 +435,7 @@
allows you to run your upgrade any way you please, you can call C<run_upgrade>
any number of times to run the actual SQL commands, and in between you can
sandwich your data upgrading. For example, first run all the B<CREATE>
-commands, then migrate your data from old to new tables/formats, then
+commands, then migrate your data from old to new tables/formats, then
issue the DROP commands when you are finished. Will run the whole file as it is by default.
=cut
@@ -469,7 +469,7 @@
$self->_filedata([ grep { $_ !~ /$stm/i } @{$self->_filedata} ]);
for (@statements)
- {
+ {
$self->storage->debugobj->query_start($_) if $self->storage->debug;
$self->apply_statement($_);
$self->storage->debugobj->query_end($_) if $self->storage->debug;
@@ -641,7 +641,7 @@
$tr->parser->($tr, $$data);
}
- my $diff = SQL::Translator::Diff::schema_diff($db_tr->schema, $db,
+ my $diff = SQL::Translator::Diff::schema_diff($db_tr->schema, $db,
$dbic_tr->schema, $db,
{ ignore_constraint_names => 1, ignore_index_names => 1, caseopt => 1 });
@@ -704,14 +704,17 @@
my $self = shift;
my $file = shift || return;
- my $fh;
- open $fh, "<$file" or carp("Can't open upgrade file, $file ($!)");
- my @data = split(/\n/, join('', <$fh>));
- @data = grep(!/^--/, @data);
- @data = split(/;/, join('', @data));
- close($fh);
- @data = grep { $_ && $_ !~ /^-- / } @data;
- @data = grep { $_ !~ /^(BEGIN|BEGIN TRANSACTION|COMMIT)/m } @data;
+ open my $fh, '<', $file or carp("Can't open upgrade file, $file ($!)");
+ my @data = split /\n/, join '', <$fh>;
+ close $fh;
+
+ @data = grep {
+ $_ &&
+ !/^--/ &&
+ !/^(BEGIN|BEGIN TRANSACTION|COMMIT)/m
+ } split /;/,
+ join '', @data;
+
return \@data;
}
Added: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Informix.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Informix.pm (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Informix.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,57 @@
+package DBIx::Class::Storage::DBI::Informix;
+use strict;
+use warnings;
+
+use base qw/DBIx::Class::Storage::DBI/;
+
+use mro 'c3';
+
+__PACKAGE__->mk_group_accessors('simple' => '__last_insert_id');
+
+sub _execute {
+ my $self = shift;
+ my ($op) = @_;
+ my ($rv, $sth, @rest) = $self->next::method(@_);
+ if ($op eq 'insert') {
+ $self->__last_insert_id($sth->{ix_sqlerrd}[1]);
+ }
+ return (wantarray ? ($rv, $sth, @rest) : $rv);
+}
+
+sub last_insert_id {
+ shift->__last_insert_id;
+}
+
+sub _sql_maker_opts {
+ my ( $self, $opts ) = @_;
+
+ if ( $opts ) {
+ $self->{_sql_maker_opts} = { %$opts };
+ }
+
+ return { limit_dialect => 'SkipFirst', %{$self->{_sql_maker_opts}||{}} };
+}
+
+1;
+
+__END__
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::Informix - Base Storage Class for INFORMIX Support
+
+=head1 SYNOPSIS
+
+=head1 DESCRIPTION
+
+This class implements storage-specific support for Informix
+
+=head1 AUTHORS
+
+See L<DBIx::Class/CONTRIBUTORS>
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/MSSQL.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -190,7 +190,7 @@
# see if this is an ordered subquery
my $attrs = $_[3];
- if ( scalar $self->sql_maker->_order_by_chunks ($attrs->{order_by}) ) {
+ if ( scalar $self->_parse_order_by ($attrs->{order_by}) ) {
$self->throw_exception(
'An ordered subselect encountered - this is not safe! Please see "Ordered Subselects" in DBIx::Class::Storage::DBI::MSSQL
') unless $attrs->{unsafe_subselect_ok};
Added: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/ODBC/SQL_Anywhere.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/ODBC/SQL_Anywhere.pm (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/ODBC/SQL_Anywhere.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,28 @@
+package DBIx::Class::Storage::DBI::ODBC::SQL_Anywhere;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Storage::DBI::SQLAnywhere/;
+use mro 'c3';
+
+1;
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::ODBC::SQL_Anywhere - Driver for using Sybase SQL
+Anywhere through ODBC
+
+=head1 SYNOPSIS
+
+All functionality is provided by L<DBIx::Class::Storage::DBI::SQLAnywhere>, see
+that module for details.
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -252,6 +252,13 @@
my %column_bind_attrs = $self->bind_attribute_by_data_type($data_type);
if ($data_type =~ /^[BC]LOB$/i) {
+ if ($DBD::Oracle::VERSION eq '1.23') {
+ $self->throw_exception(
+"BLOB/CLOB support in DBD::Oracle == 1.23 is broken, use an earlier or later ".
+"version.\n\nSee: https://rt.cpan.org/Public/Bug/Display.html?id=46016\n"
+ );
+ }
+
$column_bind_attrs{'ora_type'} = uc($data_type) eq 'CLOB'
? DBD::Oracle::ORA_CLOB()
: DBD::Oracle::ORA_BLOB()
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Replicated.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Replicated.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Replicated.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -8,7 +8,7 @@
my %replication_required = (
'Moose' => '0.90',
- 'MooseX::Types' => '0.16',
+ 'MooseX::Types' => '0.21',
'namespace::clean' => '0.11',
'Hash::Merge' => '0.11'
);
@@ -121,7 +121,7 @@
Replicated Storage has additional requirements not currently part of L<DBIx::Class>
Moose => '0.90',
- MooseX::Types => '0.16',
+ MooseX::Types => '0.21',
namespace::clean => '0.11',
Hash::Merge => '0.11'
Added: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/SQLAnywhere.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,125 @@
+package DBIx::Class::Storage::DBI::SQLAnywhere;
+
+use strict;
+use warnings;
+use base qw/DBIx::Class::Storage::DBI/;
+use mro 'c3';
+use List::Util ();
+
+__PACKAGE__->mk_group_accessors(simple => qw/
+ _identity
+/);
+
+=head1 NAME
+
+DBIx::Class::Storage::DBI::SQLAnywhere - Driver for Sybase SQL Anywhere
+
+=head1 DESCRIPTION
+
+This class implements autoincrements for Sybase SQL Anywhere, selects the
+RowNumberOver limit implementation and provides
+L<DBIx::Class::InflateColumn::DateTime> support.
+
+You need the C<DBD::SQLAnywhere> driver that comes with the SQL Anywhere
+distribution, B<NOT> the one on CPAN. It is usually under a path such as:
+
+ /opt/sqlanywhere11/sdk/perl
+
+Recommended L<DBIx::Class::Storage::DBI/connect_info> settings:
+
+ on_connect_call => 'datetime_setup'
+
+=head1 METHODS
+
+=cut
+
+sub last_insert_id { shift->_identity }
+
+sub insert {
+ my $self = shift;
+ my ($source, $to_insert) = @_;
+
+ my $identity_col = List::Util::first {
+ $source->column_info($_)->{is_auto_increment}
+ } $source->columns;
+
+ if ($identity_col && (not exists $to_insert->{$identity_col})) {
+ my $dbh = $self->_get_dbh;
+ my $table_name = $source->from;
+ $table_name = $$table_name if ref $table_name;
+
+ my ($identity) = $dbh->selectrow_array("SELECT GET_IDENTITY('$table_name')");
+
+ $to_insert->{$identity_col} = $identity;
+
+ $self->_identity($identity);
+ }
+
+ return $self->next::method(@_);
+}
+
+# this sub stolen from DB2
+
+sub _sql_maker_opts {
+ my ( $self, $opts ) = @_;
+
+ if ( $opts ) {
+ $self->{_sql_maker_opts} = { %$opts };
+ }
+
+ return { limit_dialect => 'RowNumberOver', %{$self->{_sql_maker_opts}||{}} };
+}
+
+# this sub stolen from MSSQL
+
+sub build_datetime_parser {
+ my $self = shift;
+ my $type = "DateTime::Format::Strptime";
+ eval "use ${type}";
+ $self->throw_exception("Couldn't load ${type}: $@") if $@;
+ return $type->new( pattern => '%Y-%m-%d %H:%M:%S.%6N' );
+}
+
+=head2 connect_call_datetime_setup
+
+Used as:
+
+ on_connect_call => 'datetime_setup'
+
+In L<DBIx::Class::Storage::DBI/connect_info> to set the date and timestamp
+formats (as temporary options for the session) for use with
+L<DBIx::Class::InflateColumn::DateTime>.
+
+The C<TIMESTAMP> data type supports up to 6 digits after the decimal point for
+second precision. The full precision is used.
+
+The C<DATE> data type supposedly stores hours and minutes too, according to the
+documentation, but I could not get that to work. It seems to only store the
+date.
+
+You will need the L<DateTime::Format::Strptime> module for inflation to work.
+
+=cut
+
+sub connect_call_datetime_setup {
+ my $self = shift;
+
+ $self->_do_query(
+ "set temporary option timestamp_format = 'yyyy-mm-dd hh:mm:ss.ssssss'"
+ );
+ $self->_do_query(
+ "set temporary option date_format = 'yyyy-mm-dd hh:mm:ss.ssssss'"
+ );
+}
+
+1;
+
+=head1 AUTHOR
+
+See L<DBIx::Class/AUTHOR> and L<DBIx::Class/CONTRIBUTORS>.
+
+=head1 LICENSE
+
+You may distribute this code under the same terms as Perl itself.
+
+=cut
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -353,10 +353,19 @@
# check for empty insert
# INSERT INTO foo DEFAULT VALUES -- does not work with Sybase
- # try to insert explicit 'DEFAULT's instead (except for identity)
+ # try to insert explicit 'DEFAULT's instead (except for identity, timestamp
+ # and computed columns)
if (not %$to_insert) {
for my $col ($source->columns) {
next if $col eq $identity_col;
+
+ my $info = $source->column_info($col);
+
+ next if ref $info->{default_value} eq 'SCALAR'
+ || (exists $info->{data_type} && (not defined $info->{data_type}));
+
+ next if $info->{data_type} && $info->{data_type} =~ /^timestamp\z/i;
+
$to_insert->{$col} = \'DEFAULT';
}
}
@@ -935,13 +944,9 @@
=head1 Schema::Loader Support
-There is an experimental branch of L<DBIx::Class::Schema::Loader> that will
-allow you to dump a schema from most (if not all) versions of Sybase.
+As of version C<0.05000>, L<DBIx::Class::Schema::Loader> should work well with
+most (if not all) versions of Sybase ASE.
-It is available via subversion from:
-
- http://dev.catalyst.perl.org/repos/bast/branches/DBIx-Class-Schema-Loader/current/
-
=head1 FreeTDS
This driver supports L<DBD::Sybase> compiled against FreeTDS
@@ -1093,6 +1098,42 @@
When inserting IMAGE columns using this method, you'll need to use
L</connect_call_blob_setup> as well.
+=head1 COMPUTED COLUMNS
+
+If you have columns such as:
+
+ created_dtm AS getdate()
+
+represent them in your Result classes as:
+
+ created_dtm => {
+ data_type => undef,
+ default_value => \'getdate()',
+ is_nullable => 0,
+ }
+
+The C<data_type> must exist and must be C<undef>. Then empty inserts will work
+on tables with such columns.
+
+=head1 TIMESTAMP COLUMNS
+
+C<timestamp> columns in Sybase ASE are not really timestamps, see:
+L<http://dba.fyicenter.com/Interview-Questions/SYBASE/The_timestamp_datatype_in_Sybase_.html>.
+
+They should be defined in your Result classes as:
+
+ ts => {
+ data_type => 'timestamp',
+ is_nullable => 0,
+ inflate_datetime => 0,
+ }
+
+The C<<inflate_datetime => 0>> is necessary if you use
+L<DBIx::Class::InflateColumn::DateTime>, and most people do, and still want to
+be able to read these values.
+
+The values will come back as hexadecimal.
+
=head1 TODO
=over
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBI.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -531,7 +531,7 @@
@args = @args[0,1,2];
}
- $info{arguments} = \@args;
+ $info{arguments} = \@args;
my @storage_opts = grep exists $attrs{$_},
@storage_options, 'cursor_class';
@@ -1468,9 +1468,13 @@
);
}
+ # neither _execute_array, nor _execute_inserts_with_no_binds are
+ # atomic (even if _execute _array is a single call). Thus a safety
+ # scope guard
+ my $guard = $self->txn_scope_guard unless $self->{transaction_depth} != 0;
+
$self->_query_start( $sql, ['__BULK__'] );
my $sth = $self->sth($sql);
-
my $rv = do {
if ($empty_bind) {
# bind_param_array doesn't work if there are no binds
@@ -1484,14 +1488,15 @@
$self->_query_end( $sql, ['__BULK__'] );
+
+ $guard->commit if $guard;
+
return (wantarray ? ($rv, $sth, @bind) : $rv);
}
sub _execute_array {
my ($self, $source, $sth, $bind, $cols, $data, @extra) = @_;
- my $guard = $self->txn_scope_guard unless $self->{transaction_depth} != 0;
-
## This must be an arrayref, else nothing works!
my $tuple_status = [];
@@ -1540,9 +1545,6 @@
}),
);
}
-
- $guard->commit if $guard;
-
return $rv;
}
@@ -1555,8 +1557,6 @@
sub _dbh_execute_inserts_with_no_binds {
my ($self, $sth, $count) = @_;
- my $guard = $self->txn_scope_guard unless $self->{transaction_depth} != 0;
-
eval {
my $dbh = $self->_get_dbh;
local $dbh->{RaiseError} = 1;
@@ -1572,13 +1572,11 @@
$self->throw_exception($exception) if $exception;
- $guard->commit if $guard;
-
return $count;
}
sub update {
- my ($self, $source, @args) = @_;
+ my ($self, $source, @args) = @_;
my $bind_attrs = $self->source_bind_attributes($source);
@@ -1677,11 +1675,12 @@
my $row_cnt = '0E0';
my $subrs_cur = $rs->cursor;
- while (my @pks = $subrs_cur->next) {
+ my @all_pk = $subrs_cur->all;
+ for my $pks ( @all_pk) {
my $cond;
for my $i (0.. $#pcols) {
- $cond->{$pcols[$i]} = $pks[$i];
+ $cond->{$pcols[$i]} = $pks->[$i];
}
$self->$op (
@@ -1834,7 +1833,7 @@
&&
(ref $ident eq 'ARRAY' && @$ident > 1) # indicates a join
&&
- scalar $sql_maker->_order_by_chunks ($attrs->{order_by})
+ scalar $self->_parse_order_by ($attrs->{order_by})
) {
# the RNO limit dialect above mangles the SQL such that the join gets lost
# wrap a subquery here
@@ -2587,7 +2586,10 @@
# some databases need this to stop spewing warnings
if (my $dbh = $self->_dbh) {
local $@;
- eval { $dbh->disconnect };
+ eval {
+ %{ $dbh->{CachedKids} } = ();
+ $dbh->disconnect;
+ };
}
$self->_dbh(undef);
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBIHacks.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBIHacks.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class/Storage/DBIHacks.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -228,10 +228,7 @@
my $group_by_sql = $sql_maker->_order_by({
map { $_ => $attrs->{$_} } qw/group_by having/
});
- my @order_by_chunks = (map
- { ref $_ ? $_->[0] : $_ }
- $sql_maker->_order_by_chunks ($attrs->{order_by})
- );
+ my @order_by_chunks = ($self->_parse_order_by ($attrs->{order_by}) );
# match every alias to the sql chunks above
for my $alias (keys %$alias_list) {
@@ -459,13 +456,17 @@
for (my $i = 0; $i < @cond; $i++) {
my $entry = $cond[$i];
my $hash;
- if (ref $entry eq 'HASH') {
+ my $ref = ref $entry;
+ if ($ref eq 'HASH' or $ref eq 'ARRAY') {
$hash = $self->_strip_cond_qualifiers($entry);
}
- else {
+ elsif (! $ref) {
$entry =~ /([^.]+)$/;
$hash->{$1} = $cond[++$i];
}
+ else {
+ $self->throw_exception ("_strip_cond_qualifiers() is unable to handle a condition reftype $ref");
+ }
push @{$cond->{-and}}, $hash;
}
}
@@ -483,5 +484,21 @@
return $cond;
}
+sub _parse_order_by {
+ my ($self, $order_by) = @_;
+ return scalar $self->sql_maker->_order_by_chunks ($order_by)
+ unless wantarray;
+
+ my $sql_maker = $self->sql_maker;
+ local $sql_maker->{quote_char}; #disable quoting
+ my @chunks;
+ for my $chunk (map { ref $_ ? @$_ : $_ } ($sql_maker->_order_by_chunks ($order_by) ) ) {
+ $chunk =~ s/\s+ (?: ASC|DESC ) \s* $//ix;
+ push @chunks, $chunk;
+ }
+
+ return @chunks;
+}
+
1;
Modified: DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/lib/DBIx/Class.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -25,7 +25,7 @@
# Always remember to do all digits for the version even if they're 0
# i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
# brain damage and presumably various other packaging systems too
-$VERSION = '0.08115';
+$VERSION = '0.08117_01';
$VERSION = eval $VERSION; # numify for warning-free dev releases
@@ -227,6 +227,8 @@
bluefeet: Aran Deltac <bluefeet at cpan.org>
+boghead: Bryan Beeley <cpan at beeley.org>
+
bricas: Brian Cassidy <bricas at cpan.org>
brunov: Bruno Vecchi <vecchi.b at gmail.com>
Modified: DBIx-Class/0.08/branches/prefetch/maint/gen-schema.pl
===================================================================
--- DBIx-Class/0.08/branches/prefetch/maint/gen-schema.pl 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/maint/gen-schema.pl 2010-02-06 01:55:26 UTC (rev 8568)
@@ -8,4 +8,10 @@
use SQL::Translator;
my $schema = DBICTest::Schema->connect;
-print scalar ($schema->storage->deployment_statements($schema, 'SQLite'));
+print scalar ($schema->storage->deployment_statements(
+ $schema,
+ 'SQLite',
+ undef,
+ undef,
+ { producer_args => { no_transaction => 1 } }
+));
Modified: DBIx-Class/0.08/branches/prefetch/t/73oracle.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/73oracle.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/73oracle.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -229,9 +229,6 @@
is($st->pkid1, 55, "Oracle Auto-PK without trigger: First primary key set manually");
SKIP: {
- skip 'buggy BLOB support in DBD::Oracle 1.23', 8
- if $DBD::Oracle::VERSION == 1.23;
-
my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
$binstr{'large'} = $binstr{'small'} x 1024;
@@ -242,6 +239,14 @@
my $rs = $schema->resultset('BindType');
my $id = 0;
+ if ($DBD::Oracle::VERSION eq '1.23') {
+ throws_ok { $rs->create({ id => 1, blob => $binstr{large} }) }
+ qr/broken/,
+ 'throws on blob insert with DBD::Oracle == 1.23';
+
+ skip 'buggy BLOB support in DBD::Oracle 1.23', 7;
+ }
+
foreach my $type (qw( blob clob )) {
foreach my $size (qw( small large )) {
$id++;
Modified: DBIx-Class/0.08/branches/prefetch/t/745db2.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/745db2.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/745db2.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -1,5 +1,5 @@
use strict;
-use warnings;
+use warnings;
use Test::More;
use Test::Exception;
@@ -24,17 +24,17 @@
my $ars = $schema->resultset('Artist');
is ( $ars->count, 0, 'No rows at first' );
-# test primary key handling
+# test primary key handling
my $new = $ars->create({ name => 'foo' });
ok($new->artistid, "Auto-PK worked");
-# test explicit key spec
+# test explicit key spec
$new = $ars->create ({ name => 'bar', artistid => 66 });
is($new->artistid, 66, 'Explicit PK worked');
$new->discard_changes;
is($new->artistid, 66, 'Explicit PK assigned');
-# test populate
+# test populate
lives_ok (sub {
my @pop;
for (1..2) {
@@ -43,7 +43,7 @@
$ars->populate (\@pop);
});
-# test populate with explicit key
+# test populate with explicit key
lives_ok (sub {
my @pop;
for (1..2) {
@@ -51,11 +51,11 @@
}
$ars->populate (\@pop);
});
-
-# count what we did so far
+
+# count what we did so far
is ($ars->count, 6, 'Simple count works');
-# test LIMIT support
+# test LIMIT support
my $lim = $ars->search( {},
{
rows => 3,
@@ -63,10 +63,10 @@
order_by => 'artistid'
}
);
-is( $lim->count, 2, 'LIMIT+OFFSET count ok' );
+is( $lim->count, 2, 'ROWS+OFFSET count ok' );
is( $lim->all, 2, 'Number of ->all objects matches count' );
-# test iterator
+# test iterator
$lim->reset;
is( $lim->next->artistid, 101, "iterator->next ok" );
is( $lim->next->artistid, 102, "iterator->next ok" );
@@ -87,12 +87,12 @@
'charfield' => {
'data_type' => 'CHAR',
'is_nullable' => 1,
- 'size' => 10
+ 'size' => 10
},
'rank' => {
'data_type' => 'INTEGER',
'is_nullable' => 1,
- 'size' => 10
+ 'size' => 10
},
};
Modified: DBIx-Class/0.08/branches/prefetch/t/746mssql.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/746mssql.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/746mssql.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -425,7 +425,7 @@
having => \['1 = ?', [ test => 1 ] ], #test having propagation
prefetch => 'owner',
rows => 2, # 3 results total
- order_by => { -desc => 'owner' },
+ order_by => { -desc => 'me.owner' },
unsafe_subselect_ok => 1,
},
);
Modified: DBIx-Class/0.08/branches/prefetch/t/746sybase.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/746sybase.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/746sybase.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -9,7 +9,7 @@
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
-my $TESTS = 63 + 2;
+my $TESTS = 66 + 2;
if (not ($dsn && $user)) {
plan skip_all =>
@@ -575,6 +575,35 @@
'updated money value to NULL round-trip'
);
diag $@ if $@;
+
+# Test computed columns and timestamps
+ $schema->storage->dbh_do (sub {
+ my ($storage, $dbh) = @_;
+ eval { $dbh->do("DROP TABLE computed_column_test") };
+ $dbh->do(<<'SQL');
+CREATE TABLE computed_column_test (
+ id INT IDENTITY PRIMARY KEY,
+ a_computed_column AS getdate(),
+ a_timestamp timestamp,
+ charfield VARCHAR(20) DEFAULT 'foo'
+)
+SQL
+ });
+
+ require DBICTest::Schema::ComputedColumn;
+ $schema->register_class(
+ ComputedColumn => 'DBICTest::Schema::ComputedColumn'
+ );
+
+ ok (($rs = $schema->resultset('ComputedColumn')),
+ 'got rs for ComputedColumn');
+
+ lives_ok { $row = $rs->create({}) }
+ 'empty insert for a table with computed columns survived';
+
+ lives_ok {
+ $row->update({ charfield => 'bar' })
+ } 'update of a table with computed columns survived';
}
is $ping_count, 0, 'no pings';
@@ -583,6 +612,6 @@
END {
if (my $dbh = eval { $schema->storage->_dbh }) {
eval { $dbh->do("DROP TABLE $_") }
- for qw/artist bindtype_test money_test/;
+ for qw/artist bindtype_test money_test computed_column_test/;
}
}
Copied: DBIx-Class/0.08/branches/prefetch/t/748informix.t (from rev 8445, DBIx-Class/0.08/branches/prefetch/t/745db2.t)
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/748informix.t (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/t/748informix.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,82 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_INFORMIX_${_}" } qw/DSN USER PASS/};
+
+#warn "$dsn $user $pass";
+
+plan skip_all => 'Set $ENV{DBICTEST_INFORMIX_DSN}, _USER and _PASS to run this test'
+ unless ($dsn && $user);
+
+my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+my $dbh = $schema->storage->dbh;
+
+eval { $dbh->do("DROP TABLE artist") };
+
+$dbh->do("CREATE TABLE artist (artistid SERIAL, name VARCHAR(255), charfield CHAR(10), rank INTEGER DEFAULT 13);");
+
+my $ars = $schema->resultset('Artist');
+is ( $ars->count, 0, 'No rows at first' );
+
+# test primary key handling
+my $new = $ars->create({ name => 'foo' });
+ok($new->artistid, "Auto-PK worked");
+
+# test explicit key spec
+$new = $ars->create ({ name => 'bar', artistid => 66 });
+is($new->artistid, 66, 'Explicit PK worked');
+$new->discard_changes;
+is($new->artistid, 66, 'Explicit PK assigned');
+
+# test populate
+lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_$_" };
+ }
+ $ars->populate (\@pop);
+});
+
+# test populate with explicit key
+lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
+ }
+ $ars->populate (\@pop);
+});
+
+# count what we did so far
+is ($ars->count, 6, 'Simple count works');
+
+# test LIMIT support
+my $lim = $ars->search( {},
+ {
+ rows => 3,
+ offset => 4,
+ order_by => 'artistid'
+ }
+);
+is( $lim->count, 2, 'ROWS+OFFSET count ok' );
+is( $lim->all, 2, 'Number of ->all objects matches count' );
+
+# test iterator
+$lim->reset;
+is( $lim->next->artistid, 101, "iterator->next ok" );
+is( $lim->next->artistid, 102, "iterator->next ok" );
+is( $lim->next, undef, "next past end of resultset ok" );
+
+
+done_testing;
+
+# clean up our mess
+END {
+ my $dbh = eval { $schema->storage->_dbh };
+ $dbh->do("DROP TABLE artist") if $dbh;
+}
Added: DBIx-Class/0.08/branches/prefetch/t/749sybase_asa.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/749sybase_asa.t (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/t/749sybase_asa.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,150 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+# tests stolen from 748informix.t
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_ASA_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_SYBASE_ASA_ODBC_${_}" } qw/DSN USER PASS/};
+
+plan skip_all => <<'EOF' unless $dsn || $dsn2;
+Set $ENV{DBICTEST_SYBASE_ASA_DSN} and/or $ENV{DBICTEST_SYBASE_ASA_ODBC_DSN},
+_USER and _PASS to run these tests
+EOF
+
+my @info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
+
+my @handles_to_clean;
+
+foreach my $info (@info) {
+ my ($dsn, $user, $pass) = @$info;
+
+ next unless $dsn;
+
+ my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
+
+ my $dbh = $schema->storage->dbh;
+
+ push @handles_to_clean, $dbh;
+
+ eval { $dbh->do("DROP TABLE artist") };
+
+ $dbh->do(<<EOF);
+ CREATE TABLE artist (
+ artistid INT IDENTITY PRIMARY KEY,
+ name VARCHAR(255) NULL,
+ charfield CHAR(10) NULL,
+ rank INT DEFAULT 13
+ )
+EOF
+
+ my $ars = $schema->resultset('Artist');
+ is ( $ars->count, 0, 'No rows at first' );
+
+# test primary key handling
+ my $new = $ars->create({ name => 'foo' });
+ ok($new->artistid, "Auto-PK worked");
+
+# test explicit key spec
+ $new = $ars->create ({ name => 'bar', artistid => 66 });
+ is($new->artistid, 66, 'Explicit PK worked');
+ $new->discard_changes;
+ is($new->artistid, 66, 'Explicit PK assigned');
+
+# test populate
+ lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_$_" };
+ }
+ $ars->populate (\@pop);
+ });
+
+# test populate with explicit key
+ lives_ok (sub {
+ my @pop;
+ for (1..2) {
+ push @pop, { name => "Artist_expkey_$_", artistid => 100 + $_ };
+ }
+ $ars->populate (\@pop);
+ });
+
+# count what we did so far
+ is ($ars->count, 6, 'Simple count works');
+
+# test LIMIT support
+ my $lim = $ars->search( {},
+ {
+ rows => 3,
+ offset => 4,
+ order_by => 'artistid'
+ }
+ );
+ is( $lim->count, 2, 'ROWS+OFFSET count ok' );
+ is( $lim->all, 2, 'Number of ->all objects matches count' );
+
+# test iterator
+ $lim->reset;
+ is( $lim->next->artistid, 101, "iterator->next ok" );
+ is( $lim->next->artistid, 102, "iterator->next ok" );
+ is( $lim->next, undef, "next past end of resultset ok" );
+
+# test empty insert
+ {
+ local $ars->result_source->column_info('artistid')->{is_auto_increment} = 0;
+
+ lives_ok { $ars->create({}) }
+ 'empty insert works';
+ }
+
+# test blobs (stolen from 73oracle.t)
+ eval { $dbh->do('DROP TABLE bindtype_test') };
+ $dbh->do(qq[
+ CREATE TABLE bindtype_test
+ (
+ id INT NOT NULL PRIMARY KEY,
+ bytea INT NULL,
+ blob LONG BINARY NULL,
+ clob LONG VARCHAR NULL
+ )
+ ],{ RaiseError => 1, PrintError => 1 });
+
+ my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) );
+ $binstr{'large'} = $binstr{'small'} x 1024;
+
+ my $maxloblen = length $binstr{'large'};
+ local $dbh->{'LongReadLen'} = $maxloblen;
+
+ my $rs = $schema->resultset('BindType');
+ my $id = 0;
+
+ foreach my $type (qw( blob clob )) {
+ foreach my $size (qw( small large )) {
+ $id++;
+
+# turn off horrendous binary DBIC_TRACE output
+ local $schema->storage->{debug} = 0;
+
+ lives_ok { $rs->create( { 'id' => $id, $type => $binstr{$size} } ) }
+ "inserted $size $type without dying";
+
+ ok($rs->find($id)->$type eq $binstr{$size}, "verified inserted $size $type" );
+ }
+ }
+}
+
+done_testing;
+
+# clean up our mess
+END {
+ foreach my $dbh (@handles_to_clean) {
+ eval { $dbh->do("DROP TABLE $_") } for qw/artist bindtype_test/;
+ }
+}
Modified: DBIx-Class/0.08/branches/prefetch/t/85utf8.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/85utf8.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/85utf8.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -7,22 +7,23 @@
use DBICTest;
use utf8;
-warning_like (sub {
+warning_like (
+ sub {
+ package A::Comp;
+ use base 'DBIx::Class';
+ sub store_column { shift->next::method (@_) };
+ 1;
- package A::Comp;
- use base 'DBIx::Class';
- sub store_column { shift->next::method (@_) };
- 1;
+ package A::Test;
+ use base 'DBIx::Class::Core';
+ __PACKAGE__->load_components(qw(UTF8Columns +A::Comp));
+ 1;
+ },
+ qr/Incorrect loading order of DBIx::Class::UTF8Columns.+affect other components overriding store_column \(A::Comp\)/,
+ 'incorrect order warning issued',
+);
- package A::Test;
- use base 'DBIx::Class::Core';
- __PACKAGE__->load_components(qw(UTF8Columns +A::Comp));
- 1;
-}, qr/Incorrect loading order of DBIx::Class::UTF8Columns/ );
-
-
my $schema = DBICTest->init_schema();
-
DBICTest::Schema::CD->load_components('UTF8Columns');
DBICTest::Schema::CD->utf8_columns('title');
Class::C3->reinitialize();
Modified: DBIx-Class/0.08/branches/prefetch/t/88result_set_column.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/88result_set_column.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/88result_set_column.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -55,11 +55,11 @@
# test +select/+as for single column
my $psrs = $schema->resultset('CD')->search({},
{
- '+select' => \'COUNT(*)',
- '+as' => 'count'
+ '+select' => \'MAX(year)',
+ '+as' => 'last_year'
}
);
-lives_ok(sub { $psrs->get_column('count')->next }, '+select/+as additional column "count" present (scalar)');
+lives_ok(sub { $psrs->get_column('last_year')->next }, '+select/+as additional column "last_year" present (scalar)');
dies_ok(sub { $psrs->get_column('noSuchColumn')->next }, '+select/+as nonexistent column throws exception');
# test +select/+as for overriding a column
@@ -75,11 +75,11 @@
# test +select/+as for multiple columns
$psrs = $schema->resultset('CD')->search({},
{
- '+select' => [ \'COUNT(*)', 'title' ],
- '+as' => [ 'count', 'addedtitle' ]
+ '+select' => [ \'LENGTH(title) AS title_length', 'title' ],
+ '+as' => [ 'tlength', 'addedtitle' ]
}
);
-lives_ok(sub { $psrs->get_column('count')->next }, '+select/+as multiple additional columns, "count" column present');
+lives_ok(sub { $psrs->get_column('tlength')->next }, '+select/+as multiple additional columns, "tlength" column present');
lives_ok(sub { $psrs->get_column('addedtitle')->next }, '+select/+as multiple additional columns, "addedtitle" column present');
# test that +select/+as specs do not leak
@@ -98,13 +98,28 @@
);
is_same_sql_bind (
- $psrs->get_column('count')->as_query,
- '(SELECT COUNT(*) FROM cd me)',
+ $psrs->get_column('tlength')->as_query,
+ '(SELECT LENGTH(title) AS title_length FROM cd me)',
[],
'Correct SQL for get_column/+as func'
);
+# test that order_by over a function forces a subquery
+lives_ok ( sub {
+ is_deeply (
+ [ $psrs->search ({}, { order_by => { -desc => 'title_length' } })->get_column ('title')->all ],
+ [
+ "Generic Manufactured Singles",
+ "Come Be Depressed With Us",
+ "Caterwaulin' Blues",
+ "Spoonful of bees",
+ "Forkful of bees",
+ ],
+ 'Subquery count induced by aliased ordering function',
+ );
+});
+# test for prefetch not leaking
{
my $rs = $schema->resultset("CD")->search({}, { prefetch => 'artist' });
my $rsc = $rs->get_column('year');
Modified: DBIx-Class/0.08/branches/prefetch/t/93autocast.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/93autocast.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/93autocast.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -71,7 +71,7 @@
WHERE
cdid > CAST(? AS INT)
AND tracks.last_updated_at IS NOT NULL
- AND tracks.last_updated_on < CAST (? AS yyy)
+ AND tracks.last_updated_on < CAST (? AS DateTime)
AND tracks.position = ?
AND tracks.single_track = CAST(? AS INT)
)',
Modified: DBIx-Class/0.08/branches/prefetch/t/98savepoints.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/98savepoints.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/98savepoints.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -8,11 +8,11 @@
my ($create_sql, $dsn, $user, $pass);
-if (exists $ENV{DBICTEST_PG_DSN}) {
+if ($ENV{DBICTEST_PG_DSN}) {
($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
$create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))";
-} elsif (exists $ENV{DBICTEST_MYSQL_DSN}) {
+} elsif ($ENV{DBICTEST_MYSQL_DSN}) {
($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
$create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB";
Modified: DBIx-Class/0.08/branches/prefetch/t/bind/attribute.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/bind/attribute.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/bind/attribute.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -38,7 +38,7 @@
->search({ artistid => 1});
is ( $rs->count, 1, 'where/bind first' );
-
+
$rs = $schema->resultset('Artist')->search({ artistid => 1})
->search({}, $where_bind);
@@ -76,7 +76,7 @@
$rs = $schema->resultset('Complex')->search({}, { bind => [ 1999 ] })->search({}, { where => \"title LIKE ?", bind => [ 'Spoon%' ] });
is_same_sql_bind(
$rs->as_query,
- "(SELECT me.artistid, me.name, me.rank, me.charfield FROM (SELECT a.*, cd.cdid AS cdid, cd.title AS title, cd.year AS year FROM artist a JOIN cd ON cd.artist = a.artistid WHERE cd.year = ?) WHERE title LIKE ?)",
+ "(SELECT me.artistid, me.name, me.rank, me.charfield FROM (SELECT a.*, cd.cdid AS cdid, cd.title AS title, cd.year AS year FROM artist a JOIN cd ON cd.artist = a.artistid WHERE cd.year = ?) me WHERE title LIKE ?)",
[
[ '!!dummy' => '1999' ],
[ '!!dummy' => 'Spoon%' ]
@@ -105,7 +105,7 @@
$rs = $schema->resultset('CustomSql')->search({}, { bind => [ 1999 ] })->search({}, { where => \"title LIKE ?", bind => [ 'Spoon%' ] });
is_same_sql_bind(
$rs->as_query,
- "(SELECT me.artistid, me.name, me.rank, me.charfield FROM (SELECT a.*, cd.cdid AS cdid, cd.title AS title, cd.year AS year FROM artist a JOIN cd ON cd.artist = a.artistid WHERE cd.year = ?) WHERE title LIKE ?)",
+ "(SELECT me.artistid, me.name, me.rank, me.charfield FROM (SELECT a.*, cd.cdid AS cdid, cd.title AS title, cd.year AS year FROM artist a JOIN cd ON cd.artist = a.artistid WHERE cd.year = ?) me WHERE title LIKE ?)",
[
[ '!!dummy' => '1999' ],
[ '!!dummy' => 'Spoon%' ]
Modified: DBIx-Class/0.08/branches/prefetch/t/cdbi/22-deflate_order.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/cdbi/22-deflate_order.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/cdbi/22-deflate_order.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -9,15 +9,17 @@
next;
}
+plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
+ unless ($ENV{DBICTEST_MYSQL_DSN} && $ENV{DBICTEST_MYSQL_USER});
+
eval { require Time::Piece::MySQL };
plan skip_all => "Need Time::Piece::MySQL for this test" if $@;
+plan tests => 3;
+
use lib 't/cdbi/testlib';
-eval { require 't/cdbi/testlib/Log.pm' };
-plan skip_all => "Need MySQL for this test" if $@;
+use_ok ('Log');
-plan tests => 2;
-
package main;
my $log = Log->insert( { message => 'initial message' } );
Added: DBIx-Class/0.08/branches/prefetch/t/delete/complex.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/delete/complex.t (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/t/delete/complex.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,35 @@
+use strict;
+use warnings;
+
+use Test::More;
+use lib qw(t/lib);
+use DBICTest;
+
+my $schema = DBICTest->init_schema();
+my $artist_rs = $schema->resultset ('Artist');
+
+my $init_count = $artist_rs->count;
+ok ($init_count, 'Some artists is database');
+
+$artist_rs->populate ([
+ {
+ name => 'foo',
+ },
+ {
+ name => 'bar',
+ }
+]);
+
+is ($artist_rs->count, $init_count + 2, '2 Artists created');
+
+$artist_rs->search ({
+ -and => [
+ { 'me.artistid' => { '!=', undef } },
+ [ { 'me.name' => 'foo' }, { 'me.name' => 'bar' } ],
+ ],
+})->delete;
+
+is ($artist_rs->count, $init_count, 'Correct amount of artists deleted');
+
+done_testing;
+
Modified: DBIx-Class/0.08/branches/prefetch/t/from_subquery.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/from_subquery.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/from_subquery.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -20,7 +20,7 @@
is_same_sql_bind(
$cdrs2->as_query,
- "(SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE artist_id IN ( SELECT id FROM artist me LIMIT 1 ))",
+ "(SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE artist_id IN ( SELECT id FROM artist me LIMIT 1 ))",
[],
);
}
@@ -73,7 +73,9 @@
is_same_sql_bind(
$rs->as_query,
- "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE ( id > ? ) ) cd2)",
+ "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( id > ? )
+ ) cd2)",
[
[ 'id', 20 ]
],
@@ -119,11 +121,11 @@
is_same_sql_bind(
$rs->as_query,
- "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track
- FROM
- (SELECT cd3.cdid,cd3.artist,cd3.title,cd3.year,cd3.genreid,cd3.single_track
- FROM
- (SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track
+ "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track
+ FROM
+ (SELECT cd3.cdid, cd3.artist, cd3.title, cd3.year, cd3.genreid, cd3.single_track
+ FROM
+ (SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
FROM cd me WHERE ( id < ? ) ) cd3
WHERE ( id > ? ) ) cd2)",
[
@@ -163,7 +165,9 @@
is_same_sql_bind(
$rs->as_query,
- "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE ( title = ? ) ) cd2)",
+ "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( title = ? )
+ ) cd2)",
[ [ 'title', 'Thriller' ] ],
);
}
Modified: DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -17,9 +17,6 @@
if ($@) {
plan skip_all => 'needs DateTime and DateTime::Format::Sybase for testing';
}
- else {
- plan tests => (4 * 2 * 2) + 2; # (tests * dt_types * storage_types) + storage_tests
- }
}
my @storage_types = (
@@ -57,9 +54,9 @@
$schema->storage->dbh->do(<<"SQL");
CREATE TABLE track (
trackid INT IDENTITY PRIMARY KEY,
- cd INT,
- position INT,
- $col $type,
+ cd INT NULL,
+ position INT NULL,
+ $col $type NULL
)
SQL
ok(my $dt = DateTime::Format::Sybase->parse_datetime($sample_dt));
@@ -75,8 +72,33 @@
);
is( $row->$col, $dt, 'DateTime roundtrip' );
}
+
+ # test a computed datetime column
+ eval { $schema->storage->dbh->do("DROP TABLE track") };
+ $schema->storage->dbh->do(<<"SQL");
+CREATE TABLE track (
+ trackid INT IDENTITY PRIMARY KEY,
+ cd INT NULL,
+ position INT NULL,
+ title VARCHAR(100) NULL,
+ last_updated_on DATETIME NULL,
+ last_updated_at AS getdate(),
+ small_dt SMALLDATETIME NULL
+)
+SQL
+
+ my $now = DateTime->now;
+ sleep 1;
+ my $new_row = $schema->resultset('Track')->create({});
+ $new_row->discard_changes;
+
+ lives_and {
+ cmp_ok (($new_row->last_updated_at - $now)->seconds, '>=', 1)
+ } 'getdate() computed column works';
}
+done_testing;
+
# clean up our mess
END {
if (my $dbh = eval { $schema->storage->_dbh }) {
Added: DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase_asa.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase_asa.t (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/t/inflate/datetime_sybase_asa.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,86 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use lib qw(t/lib);
+use DBICTest;
+
+my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_ASA_${_}" } qw/DSN USER PASS/};
+my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_SYBASE_ASA_ODBC_${_}" } qw/DSN USER PASS/};
+
+if (not ($dsn || $dsn2)) {
+ plan skip_all => <<'EOF';
+Set $ENV{DBICTEST_SYBASE_ASA_DSN} and/or $ENV{DBICTEST_SYBASE_ASA_ODBC_DSN}
+_USER and _PASS to run this test'.
+Warning: This test drops and creates a table called 'track'";
+EOF
+} else {
+ eval "use DateTime; use DateTime::Format::Strptime;";
+ if ($@) {
+ plan skip_all => 'needs DateTime and DateTime::Format::Strptime for testing';
+ }
+}
+
+my @info = (
+ [ $dsn, $user, $pass ],
+ [ $dsn2, $user2, $pass2 ],
+);
+
+my @handles_to_clean;
+
+foreach my $info (@info) {
+ my ($dsn, $user, $pass) = @$info;
+
+ next unless $dsn;
+
+ my $schema = DBICTest::Schema->clone;
+
+ $schema->connection($dsn, $user, $pass, {
+ on_connect_call => [ 'datetime_setup' ],
+ });
+
+ push @handles_to_clean, $schema->storage->dbh;
+
+# coltype, col, date
+ my @dt_types = (
+ ['TIMESTAMP', 'last_updated_at', '2004-08-21 14:36:48.080444'],
+# date only (but minute precision according to ASA docs)
+ ['DATE', 'small_dt', '2004-08-21 00:00:00.000000'],
+ );
+
+ for my $dt_type (@dt_types) {
+ my ($type, $col, $sample_dt) = @$dt_type;
+
+ eval { $schema->storage->dbh->do("DROP TABLE track") };
+ $schema->storage->dbh->do(<<"SQL");
+ CREATE TABLE track (
+ trackid INT IDENTITY PRIMARY KEY,
+ cd INT,
+ position INT,
+ $col $type,
+ )
+SQL
+ ok(my $dt = $schema->storage->datetime_parser->parse_datetime($sample_dt));
+
+ my $row;
+ ok( $row = $schema->resultset('Track')->create({
+ $col => $dt,
+ cd => 1,
+ }));
+ ok( $row = $schema->resultset('Track')
+ ->search({ trackid => $row->trackid }, { select => [$col] })
+ ->first
+ );
+ is( $row->$col, $dt, 'DateTime roundtrip' );
+ }
+}
+
+done_testing;
+
+# clean up our mess
+END {
+ foreach my $dbh (@handles_to_clean) {
+ eval { $dbh->do("DROP TABLE $_") } for qw/track/;
+ }
+}
Added: DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/ComputedColumn.pm
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/ComputedColumn.pm (rev 0)
+++ DBIx-Class/0.08/branches/prefetch/t/lib/DBICTest/Schema/ComputedColumn.pm 2010-02-06 01:55:26 UTC (rev 8568)
@@ -0,0 +1,34 @@
+package # hide from PAUSE
+ DBICTest::Schema::ComputedColumn;
+
+# for sybase and mssql computed column tests
+
+use base qw/DBICTest::BaseResult/;
+
+__PACKAGE__->table('computed_column_test');
+
+__PACKAGE__->add_columns(
+ 'id' => {
+ data_type => 'integer',
+ is_auto_increment => 1,
+ },
+ 'a_computed_column' => {
+ data_type => undef,
+ is_nullable => 0,
+ default_value => \'getdate()',
+ },
+ 'a_timestamp' => {
+ data_type => 'timestamp',
+ is_nullable => 0,
+ },
+ 'charfield' => {
+ data_type => 'varchar',
+ size => 20,
+ default_value => 'foo',
+ is_nullable => 0,
+ }
+);
+
+__PACKAGE__->set_primary_key('id');
+
+1;
Modified: DBIx-Class/0.08/branches/prefetch/t/lib/sqlite.sql
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/lib/sqlite.sql 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/lib/sqlite.sql 2010-02-06 01:55:26 UTC (rev 8568)
@@ -1,11 +1,9 @@
--
-- Created by SQL::Translator::Producer::SQLite
--- Created on Tue Jan 19 12:46:12 2010
+-- Created on Sat Jan 30 19:18:55 2010
--
+;
-
-BEGIN TRANSACTION;
-
--
-- Table: artist
--
@@ -447,6 +445,4 @@
-- View: year2000cds
--
CREATE VIEW year2000cds AS
- SELECT cdid, artist, title, year, genreid, single_track FROM cd WHERE year = "2000";
-
-COMMIT;
+ SELECT cdid, artist, title, year, genreid, single_track FROM cd WHERE year = "2000"
\ No newline at end of file
Modified: DBIx-Class/0.08/branches/prefetch/t/prefetch/grouped.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/prefetch/grouped.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/prefetch/grouped.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -87,12 +87,12 @@
'(
SELECT me.cd, me.track_count, cd.cdid, cd.artist, cd.title, cd.year, cd.genreid, cd.single_track
FROM (
- SELECT me.cd, COUNT (me.trackid) AS track_count,
+ SELECT me.cd, COUNT (me.trackid) AS track_count
FROM track me
JOIN cd cd ON cd.cdid = me.cd
WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
GROUP BY me.cd
- ) as me
+ ) me
JOIN cd cd ON cd.cdid = me.cd
WHERE ( me.cd IN ( ?, ?, ?, ?, ? ) )
)',
@@ -164,7 +164,7 @@
tracks.trackid, tracks.cd, tracks.position, tracks.title, tracks.last_updated_on, tracks.last_updated_at, tracks.small_dt,
liner_notes.liner_id, liner_notes.notes
FROM (
- SELECT me.cdid, COUNT( tracks.trackid ) AS track_count, MAX( tracks.trackid ) AS maxtr,
+ SELECT me.cdid, COUNT( tracks.trackid ) AS track_count, MAX( tracks.trackid ) AS maxtr
FROM cd me
LEFT JOIN track tracks ON tracks.cd = me.cdid
WHERE ( me.cdid IS NOT NULL )
Modified: DBIx-Class/0.08/branches/prefetch/t/search/subquery.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/search/subquery.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/search/subquery.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -19,7 +19,7 @@
search => \[ "title = ? AND year LIKE ?", 'buahaha', '20%' ],
attrs => { rows => 5 },
sqlbind => \[
- "( SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE (title = ? AND year LIKE ?) LIMIT 5)",
+ "( SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE (title = ? AND year LIKE ?) LIMIT 5)",
'buahaha',
'20%',
],
@@ -31,7 +31,7 @@
artist_id => { 'in' => $art_rs->search({}, { rows => 1 })->get_column( 'id' )->as_query },
},
sqlbind => \[
- "( SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE artist_id IN ( SELECT id FROM artist me LIMIT 1 ) )",
+ "( SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE artist_id IN ( SELECT id FROM artist me LIMIT 1 ) )",
],
},
@@ -68,7 +68,10 @@
],
},
sqlbind => \[
- "( SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE id > ?) cd2 )",
+ "( SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE id > ?
+ ) cd2
+ )",
[ 'id', 20 ]
],
},
@@ -107,9 +110,9 @@
sqlbind => \[
"( SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track
FROM
- (SELECT cd3.cdid,cd3.artist,cd3.title,cd3.year,cd3.genreid,cd3.single_track
+ (SELECT cd3.cdid, cd3.artist, cd3.title, cd3.year, cd3.genreid, cd3.single_track
FROM
- (SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track
+ (SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track
FROM cd me WHERE id < ?) cd3
WHERE id > ?) cd2
)",
@@ -142,7 +145,10 @@
],
},
sqlbind => \[
- "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (SELECT me.cdid,me.artist,me.title,me.year,me.genreid,me.single_track FROM cd me WHERE title = ?) cd2)",
+ "(SELECT cd2.cdid, cd2.artist, cd2.title, cd2.year, cd2.genreid, cd2.single_track FROM (
+ SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE title = ?
+ ) cd2
+ )",
[ 'title',
'Thriller'
]
Modified: DBIx-Class/0.08/branches/prefetch/t/sqlahacks/limit_dialects/toplimit.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/sqlahacks/limit_dialects/toplimit.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/sqlahacks/limit_dialects/toplimit.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -84,7 +84,7 @@
{
order_by => [ qw{ foo bar} ],
order_req => 'foo, bar',
- order_inner => 'foo ASC,bar ASC',
+ order_inner => 'foo ASC, bar ASC',
order_outer => 'foo DESC, bar DESC',
},
{
Modified: DBIx-Class/0.08/branches/prefetch/t/storage/debug.t
===================================================================
--- DBIx-Class/0.08/branches/prefetch/t/storage/debug.t 2010-02-05 21:29:24 UTC (rev 8567)
+++ DBIx-Class/0.08/branches/prefetch/t/storage/debug.t 2010-02-06 01:55:26 UTC (rev 8568)
@@ -51,7 +51,7 @@
my @cds = $schema->resultset('CD')->search( { artist => 1, cdid => { -between => [ 1, 3 ] }, } );
is_same_sql_bind(
$sql, \@bind,
- "SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( artist = ? AND (cdid BETWEEN ? AND ?) ): '1', '1', '3'",
+ "SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track FROM cd me WHERE ( artist = ? AND (cdid BETWEEN ? AND ?) )",
[qw/'1' '1' '3'/],
'got correct SQL with all bind parameters (debugcb)'
);
More information about the Bast-commits
mailing list