File Coverage

blib/lib/DBIx/Class/Fixtures.pm
Criterion Covered Total %
statement 19 21 90.4
branch n/a
condition n/a
subroutine 7 7 100.0
pod n/a
total 26 28 92.8


line stmt bran cond sub pod time code
1             package DBIx::Class::Fixtures;
2              
3 1     1   45687 use strict;
  1         8  
  1         26  
4 1     1   4 use warnings;
  1         2  
  1         25  
5              
6 1     1   238 use DBIx::Class 0.08100;
  1         36104  
  1         29  
7 1     1   7 use DBIx::Class::Exception;
  1         2  
  1         14  
8 1     1   4 use Class::Accessor::Grouped;
  1         2  
  1         14  
9 1     1   230 use Config::Any::JSON;
  1         838  
  1         23  
10 1     1   76 use Data::Dump::Streamer;
  0            
  0            
11             use Data::Visitor::Callback;
12             use Hash::Merge qw( merge );
13             use Data::Dumper;
14             use Class::C3::Componentised;
15             use MIME::Base64;
16             use IO::All;
17             use File::Temp qw/tempdir/;
18              
19             use base qw(Class::Accessor::Grouped);
20              
21             our $namespace_counter = 0;
22              
23             __PACKAGE__->mk_group_accessors( 'simple' => qw/config_dir
24             _inherited_attributes debug schema_class dumped_objects config_attrs/);
25              
26             our $VERSION = '1.001039';
27              
28             $VERSION = eval $VERSION;
29              
30             =head1 NAME
31              
32             DBIx::Class::Fixtures - Dump data and repopulate a database using rules
33              
34             =head1 SYNOPSIS
35              
36             use DBIx::Class::Fixtures;
37              
38             ...
39              
40             my $fixtures = DBIx::Class::Fixtures->new({
41             config_dir => '/home/me/app/fixture_configs'
42             });
43              
44             $fixtures->dump({
45             config => 'set_config.json',
46             schema => $source_dbic_schema,
47             directory => '/home/me/app/fixtures'
48             });
49              
50             $fixtures->populate({
51             directory => '/home/me/app/fixtures',
52             ddl => '/home/me/app/sql/ddl.sql',
53             connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
54             post_ddl => '/home/me/app/sql/post_ddl.sql',
55             });
56              
57             =head1 DESCRIPTION
58              
59             Dump fixtures from source database to filesystem then import to another
60             database (with same schema) at any time. Use as a constant dataset for running
61             tests against or for populating development databases when impractical to use
62             production clones. Describe fixture set using relations and conditions based on
63             your DBIx::Class schema.
64              
65             =head1 DEFINE YOUR FIXTURE SET
66              
67             Fixture sets are currently defined in .json files which must reside in your
68             config_dir (e.g. /home/me/app/fixture_configs/a_fixture_set.json). They
69             describe which data to pull and dump from the source database.
70              
71             For example:
72              
73             {
74             "sets": [
75             {
76             "class": "Artist",
77             "ids": ["1", "3"]
78             },
79             {
80             "class": "Producer",
81             "ids": ["5"],
82             "fetch": [
83             {
84             "rel": "artists",
85             "quantity": "2"
86             }
87             ]
88             }
89             ]
90             }
91              
92             This will fetch artists with primary keys 1 and 3, the producer with primary
93             key 5 and two of producer 5's artists where 'artists' is a has_many DBIx::Class
94             rel from Producer to Artist.
95              
96             The top level attributes are as follows:
97              
98             =head2 sets
99              
100             Sets must be an array of hashes, as in the example given above. Each set
101             defines a set of objects to be included in the fixtures. For details on valid
102             set attributes see L below.
103              
104             =head2 rules
105              
106             Rules place general conditions on classes. For example if whenever an artist
107             was dumped you also wanted all of their cds dumped too, then you could use a
108             rule to specify this. For example:
109              
110             {
111             "sets": [
112             {
113             "class": "Artist",
114             "ids": ["1", "3"]
115             },
116             {
117             "class": "Producer",
118             "ids": ["5"],
119             "fetch": [
120             {
121             "rel": "artists",
122             "quantity": "2"
123             }
124             ]
125             }
126             ],
127             "rules": {
128             "Artist": {
129             "fetch": [ {
130             "rel": "cds",
131             "quantity": "all"
132             } ]
133             }
134             }
135             }
136              
137             In this case all the cds of artists 1, 3 and all producer 5's artists will be
138             dumped as well. Note that 'cds' is a has_many DBIx::Class relation from Artist
139             to CD. This is eqivalent to:
140              
141             {
142             "sets": [
143             {
144             "class": "Artist",
145             "ids": ["1", "3"],
146             "fetch": [ {
147             "rel": "cds",
148             "quantity": "all"
149             } ]
150             },
151             {
152             "class": "Producer",
153             "ids": ["5"],
154             "fetch": [ {
155             "rel": "artists",
156             "quantity": "2",
157             "fetch": [ {
158             "rel": "cds",
159             "quantity": "all"
160             } ]
161             } ]
162             }
163             ]
164             }
165              
166             rules must be a hash keyed by class name.
167              
168             L
169              
170             =head2 includes
171              
172             To prevent repetition between configs you can include other configs. For
173             example:
174              
175             {
176             "sets": [ {
177             "class": "Producer",
178             "ids": ["5"]
179             } ],
180             "includes": [
181             { "file": "base.json" }
182             ]
183             }
184              
185             Includes must be an arrayref of hashrefs where the hashrefs have key 'file'
186             which is the name of another config file in the same directory. The original
187             config is merged with its includes using L.
188              
189             =head2 datetime_relative
190              
191             Only available for MySQL and PostgreSQL at the moment, must be a value that
192             DateTime::Format::* can parse. For example:
193              
194             {
195             "sets": [ {
196             "class": "RecentItems",
197             "ids": ["9"]
198             } ],
199             "datetime_relative": "2007-10-30 00:00:00"
200             }
201              
202             This will work when dumping from a MySQL database and will cause any datetime
203             fields (where datatype => 'datetime' in the column def of the schema class) to
204             be dumped as a DateTime::Duration object relative to the date specified in the
205             datetime_relative value. For example if the RecentItem object had a date field
206             set to 2007-10-25, then when the fixture is imported the field will be set to 5
207             days in the past relative to the current time.
208              
209             =head2 might_have
210              
211             Specifies whether to automatically dump might_have relationships. Should be a
212             hash with one attribute - fetch. Set fetch to 1 or 0.
213              
214             {
215             "might_have": { "fetch": 1 },
216             "sets": [
217             {
218             "class": "Artist",
219             "ids": ["1", "3"]
220             },
221             {
222             "class": "Producer",
223             "ids": ["5"]
224             }
225             ]
226             }
227              
228             Note: belongs_to rels are automatically dumped whether you like it or not, this
229             is to avoid FKs to nowhere when importing. General rules on has_many rels are
230             not accepted at this top level, but you can turn them on for individual sets -
231             see L.
232              
233             =head1 SET ATTRIBUTES
234              
235             =head2 class
236              
237             Required attribute. Specifies the DBIx::Class object class you wish to dump.
238              
239             =head2 ids
240              
241             Array of primary key ids to fetch, basically causing an $rs->find($_) for each.
242             If the id is not in the source db then it just won't get dumped, no warnings or
243             death.
244              
245             =head2 quantity
246              
247             Must be either an integer or the string 'all'. Specifying an integer will
248             effectively set the 'rows' attribute on the resultset clause, specifying 'all'
249             will cause the rows attribute to be left off and for all matching rows to be
250             dumped. There's no randomising here, it's just the first x rows.
251              
252             =head2 cond
253              
254             A hash specifying the conditions dumped objects must match. Essentially this is
255             a JSON representation of a DBIx::Class search clause. For example:
256              
257             {
258             "sets": [{
259             "class": "Artist",
260             "quantiy": "all",
261             "cond": { "name": "Dave" }
262             }]
263             }
264              
265             This will dump all artists whose name is 'dave'. Essentially
266             $artist_rs->search({ name => 'Dave' })->all.
267              
268             Sometimes in a search clause it's useful to use scalar refs to do things like:
269              
270             $artist_rs->search({ no1_singles => \'> no1_albums' })
271              
272             This could be specified in the cond hash like so:
273              
274             {
275             "sets": [ {
276             "class": "Artist",
277             "quantiy": "all",
278             "cond": { "no1_singles": "\> no1_albums" }
279             } ]
280             }
281              
282             So if the value starts with a backslash the value is made a scalar ref before
283             being passed to search.
284              
285             =head2 join
286              
287             An array of relationships to be used in the cond clause.
288              
289             {
290             "sets": [ {
291             "class": "Artist",
292             "quantiy": "all",
293             "cond": { "cds.position": { ">": 4 } },
294             "join": ["cds"]
295             } ]
296             }
297              
298             Fetch all artists who have cds with position greater than 4.
299              
300             =head2 fetch
301              
302             Must be an array of hashes. Specifies which rels to also dump. For example:
303              
304             {
305             "sets": [ {
306             "class": "Artist",
307             "ids": ["1", "3"],
308             "fetch": [ {
309             "rel": "cds",
310             "quantity": "3",
311             "cond": { "position": "2" }
312             } ]
313             } ]
314             }
315              
316             Will cause the cds of artists 1 and 3 to be dumped where the cd position is 2.
317              
318             Valid attributes are: 'rel', 'quantity', 'cond', 'has_many', 'might_have' and
319             'join'. rel is the name of the DBIx::Class rel to follow, the rest are the same
320             as in the set attributes. quantity is necessary for has_many relationships, but
321             not if using for belongs_to or might_have relationships.
322              
323             =head2 has_many
324              
325             Specifies whether to fetch has_many rels for this set. Must be a hash
326             containing keys fetch and quantity.
327              
328             Set fetch to 1 if you want to fetch them, and quantity to either 'all' or an
329             integer.
330              
331             Be careful here, dumping has_many rels can lead to a lot of data being dumped.
332              
333             =head2 might_have
334              
335             As with has_many but for might_have relationships. Quantity doesn't do anything
336             in this case.
337              
338             This value will be inherited by all fetches in this set. This is not true for
339             the has_many attribute.
340              
341             =head2 external
342              
343             In some cases your database information might be keys to values in some sort of
344             external storage. The classic example is you are using L
345             to store blob information on the filesystem. In this case you may wish the ability
346             to backup your external storage in the same way your database data. The L
347             attribute lets you specify a handler for this type of issue. For example:
348              
349             {
350             "sets": [{
351             "class": "Photo",
352             "quantity": "all",
353             "external": {
354             "file": {
355             "class": "File",
356             "args": {"path":"__ATTR(photo_dir)__"}
357             }
358             }
359             }]
360             }
361              
362             This would use L to read from a directory
363             where the path to a file is specified by the C field of the C source.
364             We use the uninflated value of the field so you need to completely handle backup
365             and restore. For the common case we provide L
366             and you can create your own custom handlers by placing a '+' in the namespace:
367              
368             "class": "+MyApp::Schema::SomeExternalStorage",
369              
370             Although if possible I'd love to get patches to add some of the other common
371             types (I imagine storage in MogileFS, Redis, etc or even Amazon might be popular.)
372              
373             See L for the external handler interface.
374              
375             =head1 RULE ATTRIBUTES
376              
377             =head2 cond
378              
379             Same as with L
380              
381             =head2 fetch
382              
383             Same as with L
384              
385             =head2 join
386              
387             Same as with L
388              
389             =head2 has_many
390              
391             Same as with L
392              
393             =head2 might_have
394              
395             Same as with L
396              
397             =head1 RULE SUBSTITUTIONS
398              
399             You can provide the following substitution patterns for your rule values. An
400             example of this might be:
401              
402             {
403             "sets": [{
404             "class": "Photo",
405             "quantity": "__ENV(NUMBER_PHOTOS_DUMPED)__",
406             }]
407             }
408              
409             =head2 ENV
410              
411             Provide a value from %ENV
412              
413             =head2 ATTR
414              
415             Provide a value from L
416              
417             =head2 catfile
418              
419             Create the path to a file from a list
420              
421             =head2 catdir
422              
423             Create the path to a directory from a list
424              
425             =head1 METHODS
426              
427             =head2 new
428              
429             =over 4
430              
431             =item Arguments: \%$attrs
432              
433             =item Return Value: $fixture_object
434              
435             =back
436              
437             Returns a new DBIx::Class::Fixture object. %attrs can have the following
438             parameters:
439              
440             =over
441              
442             =item config_dir:
443              
444             required. must contain a valid path to the directory in which your .json
445             configs reside.
446              
447             =item debug:
448              
449             determines whether to be verbose
450              
451             =item ignore_sql_errors:
452              
453             ignore errors on import of DDL etc
454              
455             =item config_attrs
456              
457             A hash of information you can use to do replacements inside your configuration
458             sets. For example, if your set looks like:
459              
460             {
461             "sets": [ {
462             "class": "Artist",
463             "ids": ["1", "3"],
464             "fetch": [ {
465             "rel": "cds",
466             "quantity": "__ATTR(quantity)__",
467             } ]
468             } ]
469             }
470              
471             my $fixtures = DBIx::Class::Fixtures->new( {
472             config_dir => '/home/me/app/fixture_configs'
473             config_attrs => {
474             quantity => 100,
475             },
476             });
477              
478             You may wish to do this if you want to let whoever runs the dumps have a bit
479             more control
480              
481             =back
482              
483             my $fixtures = DBIx::Class::Fixtures->new( {
484             config_dir => '/home/me/app/fixture_configs'
485             } );
486              
487             =cut
488              
489             sub new {
490             my $class = shift;
491              
492             my ($params) = @_;
493             unless (ref $params eq 'HASH') {
494             return DBIx::Class::Exception->throw('first arg to DBIx::Class::Fixtures->new() must be hash ref');
495             }
496              
497             unless ($params->{config_dir}) {
498             return DBIx::Class::Exception->throw('config_dir param not specified');
499             }
500              
501             my $config_dir = io->dir($params->{config_dir});
502             unless (-e $params->{config_dir}) {
503             return DBIx::Class::Exception->throw('config_dir directory doesn\'t exist');
504             }
505              
506             my $self = {
507             config_dir => $config_dir,
508             _inherited_attributes => [qw/datetime_relative might_have rules belongs_to/],
509             debug => $params->{debug} || 0,
510             ignore_sql_errors => $params->{ignore_sql_errors},
511             dumped_objects => {},
512             use_create => $params->{use_create} || 0,
513             use_find_or_create => $params->{use_find_or_create} || 0,
514             config_attrs => $params->{config_attrs} || {},
515             };
516              
517             bless $self, $class;
518              
519             return $self;
520             }
521              
522             =head2 available_config_sets
523              
524             Returns a list of all the config sets found in the L. These will
525             be a list of the json based files containing dump rules.
526              
527             =cut
528              
529             my @config_sets;
530             sub available_config_sets {
531             @config_sets = scalar(@config_sets) ? @config_sets : map {
532             $_->filename;
533             } grep {
534             -f "$_" && $_=~/json$/;
535             } shift->config_dir->all;
536             }
537              
538             =head2 dump
539              
540             =over 4
541              
542             =item Arguments: \%$attrs
543              
544             =item Return Value: 1
545              
546             =back
547              
548             $fixtures->dump({
549             config => 'set_config.json', # config file to use. must be in the config
550             # directory specified in the constructor
551             schema => $source_dbic_schema,
552             directory => '/home/me/app/fixtures' # output directory
553             });
554              
555             or
556              
557             $fixtures->dump({
558             all => 1, # just dump everything that's in the schema
559             schema => $source_dbic_schema,
560             directory => '/home/me/app/fixtures', # output directory
561             #excludes => [ qw/Foo MyView/ ], # optionally exclude certain sources
562             });
563              
564             In this case objects will be dumped to subdirectories in the specified
565             directory. For example:
566              
567             /home/me/app/fixtures/artist/1.fix
568             /home/me/app/fixtures/artist/3.fix
569             /home/me/app/fixtures/producer/5.fix
570              
571             C and C are required attributes. also, one of C or C must
572             be specified.
573              
574             The optional parameter C takes an array ref of source names and can be
575             used to exclude those sources when dumping the whole schema. This is useful if
576             you have views in there, since those do not need fixtures and will currently result
577             in an error when they are created and then used with C.
578              
579             Lastly, the C parameter can be a Perl HashRef instead of a file name.
580             If this form is used your HashRef should conform to the structure rules defined
581             for the JSON representations.
582              
583             =cut
584              
585             sub dump {
586             my $self = shift;
587              
588             my ($params) = @_;
589             unless (ref $params eq 'HASH') {
590             return DBIx::Class::Exception->throw('first arg to dump must be hash ref');
591             }
592              
593             foreach my $param (qw/schema directory/) {
594             unless ($params->{$param}) {
595             return DBIx::Class::Exception->throw($param . ' param not specified');
596             }
597             }
598              
599             if($params->{excludes} && !$params->{all}) {
600             return DBIx::Class::Exception->throw("'excludes' param only works when using the 'all' param");
601             }
602              
603             my $schema = $params->{schema};
604             my $config;
605             if ($params->{config}) {
606             $config = ref $params->{config} eq 'HASH' ?
607             $params->{config} :
608             do {
609             #read config
610             my $config_file = io->catfile($self->config_dir, $params->{config});
611             $self->load_config_file("$config_file");
612             };
613             } elsif ($params->{all}) {
614             my %excludes = map {$_=>1} @{$params->{excludes}||[]};
615             $config = {
616             might_have => { fetch => 0 },
617             has_many => { fetch => 0 },
618             belongs_to => { fetch => 0 },
619             sets => [
620             map {
621             { class => $_, quantity => 'all' };
622             } grep {
623             !$excludes{$_}
624             } $schema->sources],
625             };
626             } else {
627             DBIx::Class::Exception->throw('must pass config or set all');
628             }
629              
630             my $output_dir = io->dir($params->{directory});
631             unless (-e "$output_dir") {
632             $output_dir->mkpath ||
633             DBIx::Class::Exception->throw("output directory does not exist at $output_dir");
634             }
635              
636             $self->msg("generating fixtures");
637             my $tmp_output_dir = io->dir(tempdir);
638              
639             if (-e "$tmp_output_dir") {
640             $self->msg("- clearing existing $tmp_output_dir");
641             $tmp_output_dir->rmtree;
642             }
643             $self->msg("- creating $tmp_output_dir");
644             $tmp_output_dir->mkpath;
645              
646             # write version file (for the potential benefit of populate)
647             $tmp_output_dir->file('_dumper_version')->print($VERSION);
648              
649             # write our current config set
650             $tmp_output_dir->file('_config_set')->print( Dumper $config );
651              
652             $config->{rules} ||= {};
653             my @sources = @{delete $config->{sets}};
654              
655             while ( my ($k,$v) = each %{ $config->{rules} } ) {
656             if ( my $source = eval { $schema->source($k) } ) {
657             $config->{rules}{$source->source_name} = $v;
658             }
659             }
660              
661             foreach my $source (@sources) {
662             # apply rule to set if specified
663             my $rule = $config->{rules}->{$source->{class}};
664             $source = merge( $source, $rule ) if ($rule);
665              
666             # fetch objects
667             my $rs = $schema->resultset($source->{class});
668              
669             if ($source->{cond} and ref $source->{cond} eq 'HASH') {
670             # if value starts with \ assume it's meant to be passed as a scalar ref
671             # to dbic. ideally this would substitute deeply
672             $source->{cond} = {
673             map {
674             $_ => ($source->{cond}->{$_} =~ s/^\\//) ? \$source->{cond}->{$_}
675             : $source->{cond}->{$_}
676             } keys %{$source->{cond}}
677             };
678             }
679              
680             $rs = $rs->search($source->{cond}, { join => $source->{join} })
681             if $source->{cond};
682              
683             $self->msg("- dumping $source->{class}");
684              
685             my %source_options = ( set => { %{$config}, %{$source} } );
686             if ($source->{quantity}) {
687             $rs = $rs->search({}, { order_by => $source->{order_by} })
688             if $source->{order_by};
689              
690             if ($source->{quantity} =~ /^\d+$/) {
691             $rs = $rs->search({}, { rows => $source->{quantity} });
692             } elsif ($source->{quantity} ne 'all') {
693             DBIx::Class::Exception->throw("invalid value for quantity - $source->{quantity}");
694             }
695             }
696             elsif ($source->{ids} && @{$source->{ids}}) {
697             my @ids = @{$source->{ids}};
698             my (@pks) = $rs->result_source->primary_columns;
699             die "Can't dump multiple col-pks using 'id' option" if @pks > 1;
700             $rs = $rs->search_rs( { $pks[0] => { -in => \@ids } } );
701             }
702             else {
703             DBIx::Class::Exception->throw('must specify either quantity or ids');
704             }
705              
706             $source_options{set_dir} = $tmp_output_dir;
707             $self->dump_rs($rs, \%source_options );
708             }
709              
710             # clear existing output dir
711             foreach my $child ($output_dir->all) {
712             if ($child->is_dir) {
713             next if ("$child" eq "$tmp_output_dir");
714             if (grep { $_ =~ /\.fix/ } $child->all) {
715             $child->rmtree;
716             }
717             } elsif ($child =~ /_dumper_version$/) {
718             $child->unlink;
719             }
720             }
721              
722             $self->msg("- moving temp dir to $output_dir");
723             $tmp_output_dir->copy("$output_dir");
724              
725             if (-e "$output_dir") {
726             $self->msg("- clearing tmp dir $tmp_output_dir");
727             # delete existing fixture set
728             $tmp_output_dir->rmtree;
729             }
730              
731             $self->msg("done");
732              
733             return 1;
734             }
735              
736             sub load_config_file {
737             my ($self, $config_file) = @_;
738             DBIx::Class::Exception->throw("config does not exist at $config_file")
739             unless -e "$config_file";
740              
741             my $config = Config::Any::JSON->load($config_file);
742              
743             #process includes
744             if (my $incs = $config->{includes}) {
745             $self->msg($incs);
746             DBIx::Class::Exception->throw(
747             'includes params of config must be an array ref of hashrefs'
748             ) unless ref $incs eq 'ARRAY';
749              
750             foreach my $include_config (@$incs) {
751             DBIx::Class::Exception->throw(
752             'includes params of config must be an array ref of hashrefs'
753             ) unless (ref $include_config eq 'HASH') && $include_config->{file};
754              
755             my $include_file = $self->config_dir->file($include_config->{file});
756              
757             DBIx::Class::Exception->throw("config does not exist at $include_file")
758             unless -e "$include_file";
759              
760             my $include = Config::Any::JSON->load($include_file);
761             $self->msg($include);
762             $config = merge( $config, $include );
763             }
764             delete $config->{includes};
765             }
766              
767             # validate config
768             return DBIx::Class::Exception->throw('config has no sets')
769             unless $config && $config->{sets} &&
770             ref $config->{sets} eq 'ARRAY' && scalar @{$config->{sets}};
771              
772             $config->{might_have} = { fetch => 0 } unless exists $config->{might_have};
773             $config->{has_many} = { fetch => 0 } unless exists $config->{has_many};
774             $config->{belongs_to} = { fetch => 1 } unless exists $config->{belongs_to};
775              
776             return $config;
777             }
778              
779             sub dump_rs {
780             my ($self, $rs, $params) = @_;
781              
782             while (my $row = $rs->next) {
783             $self->dump_object($row, $params);
784             }
785             }
786              
787             sub dump_object {
788             my ($self, $object, $params) = @_;
789             my $set = $params->{set};
790              
791             my $v = Data::Visitor::Callback->new(
792             plain_value => sub {
793             my ($visitor, $data) = @_;
794             my $subs = {
795             ENV => sub {
796             my ( $self, $v ) = @_;
797             if (! defined($ENV{$v})) {
798             return "";
799             } else {
800             return $ENV{ $v };
801             }
802             },
803             ATTR => sub {
804             my ($self, $v) = @_;
805             if(my $attr = $self->config_attrs->{$v}) {
806             return $attr;
807             } else {
808             return "";
809             }
810             },
811             catfile => sub {
812             my ($self, @args) = @_;
813             "".io->catfile(@args);
814             },
815             catdir => sub {
816             my ($self, @args) = @_;
817             "".io->catdir(@args);
818             },
819             };
820              
821             my $subsre = join( '|', keys %$subs );
822             $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
823              
824             return $_;
825             }
826             );
827              
828             $v->visit( $set );
829              
830             die 'no dir passed to dump_object' unless $params->{set_dir};
831             die 'no object passed to dump_object' unless $object;
832              
833             my @inherited_attrs = @{$self->_inherited_attributes};
834              
835             my @pk_vals = map {
836             $object->get_column($_)
837             } $object->primary_columns;
838              
839             my $key = join("\0", @pk_vals);
840              
841             my $src = $object->result_source;
842             my $exists = $self->dumped_objects->{$src->name}{$key}++;
843              
844              
845             # write dir and gen filename
846             my $source_dir = io->catdir($params->{set_dir}, $self->_name_for_source($src));
847             $source_dir->mkpath(0, 0777);
848              
849             # Convert characters not allowed on windows
850             my $file = io->catfile("$source_dir",
851             join('-', map { s|[/\\:\*\|\?"<>]|_|g; $_; } @pk_vals) . '.fix'
852             );
853              
854             # write file
855             unless ($exists) {
856             $self->msg('-- dumping ' . "$file", 2);
857              
858             # get_columns will return virtual columns; we just want stored columns.
859             # columns_info keys seems to be the actual storage column names, so we'll
860             # use that.
861             my $col_info = $src->columns_info;
862             my @column_names = keys %$col_info;
863             my %columns = $object->get_columns;
864             my %ds; @ds{@column_names} = @columns{@column_names};
865              
866             if($set->{external}) {
867             foreach my $field (keys %{$set->{external}}) {
868             my $key = $ds{$field};
869             my ($plus, $class) = ( $set->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
870             my $args = $set->{external}->{$field}->{args};
871              
872             $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
873             eval "use $class";
874              
875             $ds{external}->{$field} =
876             encode_base64( $class
877             ->backup($key => $args),'');
878             }
879             }
880              
881             # mess with dates if specified
882             if ($set->{datetime_relative}) {
883             my $formatter= eval {$object->result_source->schema->storage->datetime_parser};
884             unless (!$formatter) {
885             my $dt;
886             if ($set->{datetime_relative} eq 'today') {
887             $dt = DateTime->today;
888             } else {
889             $dt = $formatter->parse_datetime($set->{datetime_relative}) unless ($@);
890             }
891              
892             while (my ($col, $value) = each %ds) {
893             my $col_info = $object->result_source->column_info($col);
894              
895             next unless $value
896             && $col_info->{_inflate_info}
897             && (
898             (uc($col_info->{data_type}) eq 'DATETIME')
899             or (uc($col_info->{data_type}) eq 'DATE')
900             or (uc($col_info->{data_type}) eq 'TIME')
901             or (uc($col_info->{data_type}) eq 'TIMESTAMP')
902             or (uc($col_info->{data_type}) eq 'INTERVAL')
903             );
904              
905             $ds{$col} = $object->get_inflated_column($col)->subtract_datetime($dt);
906             }
907             } else {
908             warn "datetime_relative not supported for this db driver at the moment";
909             }
910             }
911              
912             # do the actual dumping
913             my $serialized = Dump(\%ds)->Out();
914              
915             $file->print($serialized);
916             }
917              
918             # don't bother looking at rels unless we are actually planning to dump at least one type
919             my ($might_have, $belongs_to, $has_many) = map {
920             $set->{$_}{fetch} || $set->{rules}{$src->source_name}{$_}{fetch}
921             } qw/might_have belongs_to has_many/;
922              
923             return unless $might_have
924             || $belongs_to
925             || $has_many
926             || $set->{fetch};
927              
928             # dump rels of object
929             unless ($exists) {
930             foreach my $name (sort $src->relationships) {
931             my $info = $src->relationship_info($name);
932             my $r_source = $src->related_source($name);
933             # if belongs_to or might_have with might_have param set or has_many with
934             # has_many param set then
935             if (
936             ( $info->{attrs}{accessor} eq 'single' &&
937             (!$info->{attrs}{join_type} || $might_have)
938             )
939             || $info->{attrs}{accessor} eq 'filter'
940             ||
941             ($info->{attrs}{accessor} eq 'multi' && $has_many)
942             ) {
943             my $related_rs = $object->related_resultset($name);
944             my $rule = $set->{rules}->{$related_rs->result_source->source_name};
945             # these parts of the rule only apply to has_many rels
946             if ($rule && $info->{attrs}{accessor} eq 'multi') {
947             $related_rs = $related_rs->search(
948             $rule->{cond},
949             { join => $rule->{join} }
950             ) if ($rule->{cond});
951              
952             $related_rs = $related_rs->search(
953             {},
954             { rows => $rule->{quantity} }
955             ) if ($rule->{quantity} && $rule->{quantity} ne 'all');
956              
957             $related_rs = $related_rs->search(
958             {},
959             { order_by => $rule->{order_by} }
960             ) if ($rule->{order_by});
961              
962             }
963             if ($set->{has_many}{quantity} &&
964             $set->{has_many}{quantity} =~ /^\d+$/) {
965             $related_rs = $related_rs->search(
966             {},
967             { rows => $set->{has_many}->{quantity} }
968             );
969             }
970              
971             my %c_params = %{$params};
972             # inherit date param
973             my %mock_set = map {
974             $_ => $set->{$_}
975             } grep { $set->{$_} } @inherited_attrs;
976              
977             $c_params{set} = \%mock_set;
978             $c_params{set} = merge( $c_params{set}, $rule)
979             if $rule && $rule->{fetch};
980              
981             $self->dump_rs($related_rs, \%c_params);
982             }
983             }
984             }
985              
986             return unless $set && $set->{fetch};
987             foreach my $fetch (@{$set->{fetch}}) {
988             # inherit date param
989             $fetch->{$_} = $set->{$_} foreach
990             grep { !$fetch->{$_} && $set->{$_} } @inherited_attrs;
991             my $related_rs = $object->related_resultset($fetch->{rel});
992             my $rule = $set->{rules}->{$related_rs->result_source->source_name};
993              
994             if ($rule) {
995             my $info = $object->result_source->relationship_info($fetch->{rel});
996             if ($info->{attrs}{accessor} eq 'multi') {
997             $fetch = merge( $fetch, $rule );
998             } elsif ($rule->{fetch}) {
999             $fetch = merge( $fetch, { fetch => $rule->{fetch} } );
1000             }
1001             }
1002              
1003             die "relationship $fetch->{rel} does not exist for " . $src->source_name
1004             unless ($related_rs);
1005              
1006             if ($fetch->{cond} and ref $fetch->{cond} eq 'HASH') {
1007             # if value starts with \ assume it's meant to be passed as a scalar ref
1008             # to dbic. ideally this would substitute deeply
1009             $fetch->{cond} = { map {
1010             $_ => ($fetch->{cond}->{$_} =~ s/^\\//) ? \$fetch->{cond}->{$_}
1011             : $fetch->{cond}->{$_}
1012             } keys %{$fetch->{cond}} };
1013             }
1014              
1015             $related_rs = $related_rs->search(
1016             $fetch->{cond},
1017             { join => $fetch->{join} }
1018             ) if $fetch->{cond};
1019              
1020             $related_rs = $related_rs->search(
1021             {},
1022             { rows => $fetch->{quantity} }
1023             ) if $fetch->{quantity} && $fetch->{quantity} ne 'all';
1024             $related_rs = $related_rs->search(
1025             {},
1026             { order_by => $fetch->{order_by} }
1027             ) if $fetch->{order_by};
1028              
1029             $self->dump_rs($related_rs, { %{$params}, set => $fetch });
1030             }
1031             }
1032              
1033             sub _generate_schema {
1034             my $self = shift;
1035             my $params = shift || {};
1036             require DBI;
1037             $self->msg("\ncreating schema");
1038              
1039             my $schema_class = $self->schema_class || "DBIx::Class::Fixtures::Schema";
1040             eval "require $schema_class";
1041             die $@ if $@;
1042              
1043             my $pre_schema;
1044             my $connection_details = $params->{connection_details};
1045              
1046             $namespace_counter++;
1047              
1048             my $namespace = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1049             Class::C3::Componentised->inject_base( $namespace => $schema_class );
1050              
1051             $pre_schema = $namespace->connect(@{$connection_details});
1052             unless( $pre_schema ) {
1053             return DBIx::Class::Exception->throw('connection details not valid');
1054             }
1055             my @tables = map { $self->_name_for_source($pre_schema->source($_)) } $pre_schema->sources;
1056             $self->msg("Tables to drop: [". join(', ', sort @tables) . "]");
1057             my $dbh = $pre_schema->storage->dbh;
1058              
1059             # clear existing db
1060             $self->msg("- clearing DB of existing tables");
1061             $pre_schema->storage->txn_do(sub {
1062             $pre_schema->storage->with_deferred_fk_checks(sub {
1063             foreach my $table (@tables) {
1064             eval {
1065             $dbh->do("drop table $table" . ($params->{cascade} ? ' cascade' : '') )
1066             };
1067             }
1068             });
1069             });
1070              
1071             # import new ddl file to db
1072             my $ddl_file = $params->{ddl};
1073             $self->msg("- deploying schema using $ddl_file");
1074             my $data = _read_sql($ddl_file);
1075             foreach (@$data) {
1076             eval { $dbh->do($_) or warn "SQL was:\n $_"};
1077             if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1078             }
1079             $self->msg("- finished importing DDL into DB");
1080              
1081             # load schema object from our new DB
1082             $namespace_counter++;
1083             my $namespace2 = "DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter";
1084             Class::C3::Componentised->inject_base( $namespace2 => $schema_class );
1085             my $schema = $namespace2->connect(@{$connection_details});
1086             return $schema;
1087             }
1088              
1089             sub _read_sql {
1090             my $ddl_file = shift;
1091             my $fh;
1092             open $fh, "<$ddl_file" or die ("Can't open DDL file, $ddl_file ($!)");
1093             my @data = split(/\n/, join('', <$fh>));
1094             @data = grep(!/^--/, @data);
1095             @data = split(/;/, join('', @data));
1096             close($fh);
1097             @data = grep { $_ && $_ !~ /^-- / } @data;
1098             return \@data;
1099             }
1100              
1101             =head2 dump_config_sets
1102              
1103             Works just like L but instead of specifying a single json config set
1104             located in L we dump each set named in the C parameter.
1105              
1106             The parameters are the same as for L except instead of a C
1107             parameter we have a C which is a coderef expected to return
1108             a scalar that is a root directory where we will do the actual dumping. This
1109             coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1110             example:
1111              
1112             $fixture->dump_all_config_sets({
1113             schema => $schema,
1114             configs => [qw/one.json other.json/],
1115             directory_template => sub {
1116             my ($fixture, $params, $set) = @_;
1117             return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
1118             },
1119             });
1120              
1121             =cut
1122              
1123             sub dump_config_sets {
1124             my ($self, $params) = @_;
1125             my $available_config_sets = delete $params->{configs};
1126             my $directory_template = delete $params->{directory_template} ||
1127             DBIx::Class::Exception->throw("'directory_template is required parameter");
1128              
1129             for my $set (@$available_config_sets) {
1130             my $localparams = $params;
1131             $localparams->{directory} = $directory_template->($self, $localparams, $set);
1132             $localparams->{config} = $set;
1133             $self->dump($localparams);
1134             $self->dumped_objects({}); ## Clear dumped for next go, if there is one!
1135             }
1136             }
1137              
1138             =head2 dump_all_config_sets
1139              
1140             my %local_params = %$params;
1141             my $local_self = bless { %$self }, ref($self);
1142             $local_params{directory} = $directory_template->($self, \%local_params, $set);
1143             $local_params{config} = $set;
1144             $self->dump(\%local_params);
1145              
1146              
1147             Works just like L but instead of specifying a single json config set
1148             located in L we dump each set in turn to the specified directory.
1149              
1150             The parameters are the same as for L except instead of a C
1151             parameter we have a C which is a coderef expected to return
1152             a scalar that is a root directory where we will do the actual dumping. This
1153             coderef get three arguments: C<$self>, C<$params> and C<$set_name>. For
1154             example:
1155              
1156             $fixture->dump_all_config_sets({
1157             schema => $schema,
1158             directory_template => sub {
1159             my ($fixture, $params, $set) = @_;
1160             return io->catdir('var', 'fixtures', $params->{schema}->version, $set);
1161             },
1162             });
1163              
1164             =cut
1165              
1166             sub dump_all_config_sets {
1167             my ($self, $params) = @_;
1168             $self->dump_config_sets({
1169             %$params,
1170             configs=>[$self->available_config_sets],
1171             });
1172             }
1173              
1174             =head2 populate
1175              
1176             =over 4
1177              
1178             =item Arguments: \%$attrs
1179              
1180             =item Return Value: 1
1181              
1182             =back
1183              
1184             $fixtures->populate( {
1185             # directory to look for fixtures in, as specified to dump
1186             directory => '/home/me/app/fixtures',
1187              
1188             # DDL to deploy
1189             ddl => '/home/me/app/sql/ddl.sql',
1190              
1191             # database to clear, deploy and then populate
1192             connection_details => ['dbi:mysql:dbname=app_dev', 'me', 'password'],
1193              
1194             # DDL to deploy after populating records, ie. FK constraints
1195             post_ddl => '/home/me/app/sql/post_ddl.sql',
1196              
1197             # use CASCADE option when dropping tables
1198             cascade => 1,
1199              
1200             # optional, set to 1 to run ddl but not populate
1201             no_populate => 0,
1202              
1203             # optional, set to 1 to run each fixture through ->create rather than have
1204             # each $rs populated using $rs->populate. Useful if you have overridden new() logic
1205             # that effects the value of column(s).
1206             use_create => 0,
1207              
1208             # optional, same as use_create except with find_or_create.
1209             # Useful if you are populating a persistent data store.
1210             use_find_or_create => 0,
1211              
1212             # Dont try to clean the database, just populate over whats there. Requires
1213             # schema option. Use this if you want to handle removing old data yourself
1214             # no_deploy => 1
1215             # schema => $schema
1216             } );
1217              
1218             In this case the database app_dev will be cleared of all tables, then the
1219             specified DDL deployed to it, then finally all fixtures found in
1220             /home/me/app/fixtures will be added to it. populate will generate its own
1221             DBIx::Class schema from the DDL rather than being passed one to use. This is
1222             better as custom insert methods are avoided which can to get in the way. In
1223             some cases you might not have a DDL, and so this method will eventually allow a
1224             $schema object to be passed instead.
1225              
1226             If needed, you can specify a post_ddl attribute which is a DDL to be applied
1227             after all the fixtures have been added to the database. A good use of this
1228             option would be to add foreign key constraints since databases like Postgresql
1229             cannot disable foreign key checks.
1230              
1231             If your tables have foreign key constraints you may want to use the cascade
1232             attribute which will make the drop table functionality cascade, ie 'DROP TABLE
1233             $table CASCADE'.
1234              
1235             C is a required attribute.
1236              
1237             If you wish for DBIx::Class::Fixtures to clear the database for you pass in
1238             C (path to a DDL sql file) and C (array ref of DSN,
1239             user and pass).
1240              
1241             If you wish to deal with cleaning the schema yourself, then pass in a C
1242             attribute containing the connected schema you wish to operate on and set the
1243             C attribute.
1244              
1245             =cut
1246              
1247             sub populate {
1248             my $self = shift;
1249             my ($params) = @_;
1250             DBIx::Class::Exception->throw('first arg to populate must be hash ref')
1251             unless ref $params eq 'HASH';
1252              
1253             DBIx::Class::Exception->throw('directory param not specified')
1254             unless $params->{directory};
1255              
1256             my $fixture_dir = io->dir(delete $params->{directory});
1257             DBIx::Class::Exception->throw("fixture directory '$fixture_dir' does not exist")
1258             unless -d "$fixture_dir";
1259              
1260             my $ddl_file;
1261             my $dbh;
1262             my $schema;
1263             if ($params->{ddl} && $params->{connection_details}) {
1264             $ddl_file = io->file(delete $params->{ddl});
1265             unless (-e "$ddl_file") {
1266             return DBIx::Class::Exception->throw('DDL does not exist at ' . $ddl_file);
1267             }
1268             unless (ref $params->{connection_details} eq 'ARRAY') {
1269             return DBIx::Class::Exception->throw('connection details must be an arrayref');
1270             }
1271             $schema = $self->_generate_schema({
1272             ddl => "$ddl_file",
1273             connection_details => delete $params->{connection_details},
1274             %{$params}
1275             });
1276             } elsif ($params->{schema} && $params->{no_deploy}) {
1277             $schema = $params->{schema};
1278             } else {
1279             DBIx::Class::Exception->throw('you must set the ddl and connection_details params');
1280             }
1281              
1282              
1283             return 1 if $params->{no_populate};
1284              
1285             $self->msg("\nimporting fixtures");
1286             my $tmp_fixture_dir = io->dir(tempdir());
1287             my $config_set_path = io->file($fixture_dir, '_config_set');
1288             my $config_set = -e "$config_set_path" ? do { my $VAR1; eval($config_set_path->slurp); $VAR1 } : '';
1289              
1290             my $v = Data::Visitor::Callback->new(
1291             plain_value => sub {
1292             my ($visitor, $data) = @_;
1293             my $subs = {
1294             ENV => sub {
1295             my ( $self, $v ) = @_;
1296             if (! defined($ENV{$v})) {
1297             return "";
1298             } else {
1299             return $ENV{ $v };
1300             }
1301             },
1302             ATTR => sub {
1303             my ($self, $v) = @_;
1304             if(my $attr = $self->config_attrs->{$v}) {
1305             return $attr;
1306             } else {
1307             return "";
1308             }
1309             },
1310             catfile => sub {
1311             my ($self, @args) = @_;
1312             io->catfile(@args);
1313             },
1314             catdir => sub {
1315             my ($self, @args) = @_;
1316             io->catdir(@args);
1317             },
1318             };
1319              
1320             my $subsre = join( '|', keys %$subs );
1321             $_ =~ s{__($subsre)(?:\((.+?)\))?__}{ $subs->{ $1 }->( $self, $2 ? split( /,/, $2 ) : () ) }eg;
1322              
1323             return $_;
1324             }
1325             );
1326              
1327             $v->visit( $config_set );
1328              
1329              
1330             my %sets_by_src;
1331             if($config_set) {
1332             %sets_by_src = map { delete($_->{class}) => $_ }
1333             @{$config_set->{sets}}
1334             }
1335              
1336             if (-e "$tmp_fixture_dir") {
1337             $self->msg("- deleting existing temp directory $tmp_fixture_dir");
1338             $tmp_fixture_dir->rmtree;
1339             }
1340             $self->msg("- creating temp dir");
1341             $tmp_fixture_dir->mkpath();
1342             for ( map { $self->_name_for_source($schema->source($_)) } $schema->sources) {
1343             my $from_dir = io->catdir($fixture_dir, $_);
1344             next unless -e "$from_dir";
1345             $from_dir->copy( io->catdir($tmp_fixture_dir, $_)."" );
1346             }
1347              
1348             unless (-d "$tmp_fixture_dir") {
1349             DBIx::Class::Exception->throw("Unable to create temporary fixtures dir: $tmp_fixture_dir: $!");
1350             }
1351              
1352             my $fixup_visitor;
1353             my $formatter = $schema->storage->datetime_parser;
1354             unless ($@ || !$formatter) {
1355             my %callbacks;
1356             if ($params->{datetime_relative_to}) {
1357             $callbacks{'DateTime::Duration'} = sub {
1358             $params->{datetime_relative_to}->clone->add_duration($_);
1359             };
1360             } else {
1361             $callbacks{'DateTime::Duration'} = sub {
1362             $formatter->format_datetime(DateTime->today->add_duration($_))
1363             };
1364             }
1365             $callbacks{object} ||= "visit_ref";
1366             $fixup_visitor = new Data::Visitor::Callback(%callbacks);
1367             }
1368              
1369             my @sorted_source_names = $self->_get_sorted_sources( $schema );
1370             $schema->storage->txn_do(sub {
1371             $schema->storage->with_deferred_fk_checks(sub {
1372             foreach my $source (@sorted_source_names) {
1373             $self->msg("- adding " . $source);
1374             my $rs = $schema->resultset($source);
1375             my $source_dir = io->catdir($tmp_fixture_dir, $self->_name_for_source($rs->result_source));
1376             next unless (-e "$source_dir");
1377             my @rows;
1378             while (my $file = $source_dir->next) {
1379             next unless ($file =~ /\.fix$/);
1380             next if $file->is_dir;
1381             my $contents = $file->slurp;
1382             my $HASH1;
1383             eval($contents);
1384             $HASH1 = $fixup_visitor->visit($HASH1) if $fixup_visitor;
1385             if(my $external = delete $HASH1->{external}) {
1386             my @fields = keys %{$sets_by_src{$source}->{external}};
1387             foreach my $field(@fields) {
1388             my $key = $HASH1->{$field};
1389             my $content = decode_base64 ($external->{$field});
1390             my $args = $sets_by_src{$source}->{external}->{$field}->{args};
1391             my ($plus, $class) = ( $sets_by_src{$source}->{external}->{$field}->{class}=~/^(\+)*(.+)$/);
1392             $class = "DBIx::Class::Fixtures::External::$class" unless $plus;
1393             eval "use $class";
1394             $class->restore($key, $content, $args);
1395             }
1396             }
1397             if ( $params->{use_create} ) {
1398             $rs->create( $HASH1 );
1399             } elsif( $params->{use_find_or_create} ) {
1400             $rs->find_or_create( $HASH1 );
1401             } else {
1402             push(@rows, $HASH1);
1403             }
1404             }
1405             $rs->populate(\@rows) if scalar(@rows);
1406              
1407             ## Now we need to do some db specific cleanup
1408             ## this probably belongs in a more isolated space. Right now this is
1409             ## to just handle postgresql SERIAL types that use Sequences
1410             ## Will completely ignore sequences in Oracle due to having to drop
1411             ## and recreate them
1412              
1413             my $table = $rs->result_source->name;
1414             for my $column(my @columns = $rs->result_source->columns) {
1415             my $info = $rs->result_source->column_info($column);
1416             if(my $sequence = $info->{sequence}) {
1417             $self->msg("- updating sequence $sequence");
1418             $rs->result_source->storage->dbh_do(sub {
1419             my ($storage, $dbh, @cols) = @_;
1420             if ( $dbh->{Driver}->{Name} eq "Oracle" ) {
1421             $self->msg("- Cannot change sequence values in Oracle");
1422             } else {
1423             $self->msg(
1424             my $sql = sprintf("SELECT setval(?, (SELECT max(%s) FROM %s));",$dbh->quote_identifier($column),$dbh->quote_identifier($table))
1425             );
1426             my $sth = $dbh->prepare($sql);
1427             $sth->bind_param(1,$sequence);
1428              
1429             my $rv = $sth->execute or die $sth->errstr;
1430             $self->msg("- $sql");
1431             }
1432             });
1433             }
1434             }
1435              
1436             }
1437             });
1438             });
1439             $self->do_post_ddl( {
1440             schema=>$schema,
1441             post_ddl=>$params->{post_ddl}
1442             } ) if $params->{post_ddl};
1443              
1444             $self->msg("- fixtures imported");
1445             $self->msg("- cleaning up");
1446             $tmp_fixture_dir->rmtree;
1447             return 1;
1448             }
1449              
1450             # the overall logic is modified from SQL::Translator::Parser::DBIx::Class->parse
1451             sub _get_sorted_sources {
1452             my ( $self, $dbicschema ) = @_;
1453              
1454              
1455             my %table_monikers = map { $_ => 1 } $dbicschema->sources;
1456              
1457             my %tables;
1458             foreach my $moniker (sort keys %table_monikers) {
1459             my $source = $dbicschema->source($moniker);
1460              
1461             my $table_name = $source->name;
1462             my @primary = $source->primary_columns;
1463             my @rels = $source->relationships();
1464              
1465             my %created_FK_rels;
1466             foreach my $rel (sort @rels) {
1467             my $rel_info = $source->relationship_info($rel);
1468              
1469             # Ignore any rel cond that isn't a straight hash
1470             next unless ref $rel_info->{cond} eq 'HASH';
1471              
1472             my @keys = map {$rel_info->{cond}->{$_} =~ /^\w+\.(\w+)$/} keys(%{$rel_info->{cond}});
1473              
1474             # determine if this relationship is a self.fk => foreign.pk (i.e. belongs_to)
1475             my $fk_constraint;
1476             if ( exists $rel_info->{attrs}{is_foreign_key_constraint} ) {
1477             $fk_constraint = $rel_info->{attrs}{is_foreign_key_constraint};
1478             } elsif ( $rel_info->{attrs}{accessor}
1479             && $rel_info->{attrs}{accessor} eq 'multi' ) {
1480             $fk_constraint = 0;
1481             } else {
1482             $fk_constraint = not $source->_compare_relationship_keys(\@keys, \@primary);
1483             }
1484              
1485             # Dont add a relation if its not constraining
1486             next unless $fk_constraint;
1487              
1488             my $rel_table = $source->related_source($rel)->source_name;
1489             # Make sure we don't create the same relation twice
1490             my $key_test = join("\x00", sort @keys);
1491             next if $created_FK_rels{$rel_table}->{$key_test};
1492              
1493             if (scalar(@keys)) {
1494             $created_FK_rels{$rel_table}->{$key_test} = 1;
1495              
1496             # calculate dependencies: do not consider deferrable constraints and
1497             # self-references for dependency calculations
1498             if (! $rel_info->{attrs}{is_deferrable} and $rel_table ne $table_name) {
1499             $tables{$moniker}{$rel_table}++;
1500             }
1501             }
1502             }
1503             $tables{$moniker} = {} unless exists $tables{$moniker};
1504             }
1505              
1506             # resolve entire dep tree
1507             my $dependencies = {
1508             map { $_ => _resolve_deps ($_, \%tables) } (keys %tables)
1509             };
1510              
1511             # return the sorted result
1512             return sort {
1513             keys %{$dependencies->{$a} || {} } <=> keys %{ $dependencies->{$b} || {} }
1514             ||
1515             $a cmp $b
1516             } (keys %tables);
1517             }
1518              
1519             sub _resolve_deps {
1520             my ( $question, $answers, $seen ) = @_;
1521             my $ret = {};
1522             $seen ||= {};
1523              
1524             my %seen = map { $_ => $seen->{$_} + 1 } ( keys %$seen );
1525             $seen{$question} = 1;
1526              
1527             for my $dep (keys %{ $answers->{$question} }) {
1528             return {} if $seen->{$dep};
1529             my $subdeps = _resolve_deps( $dep, $answers, \%seen );
1530             $ret->{$_} += $subdeps->{$_} for ( keys %$subdeps );
1531             ++$ret->{$dep};
1532             }
1533             return $ret;
1534             }
1535              
1536             sub do_post_ddl {
1537             my ($self, $params) = @_;
1538              
1539             my $schema = $params->{schema};
1540             my $data = _read_sql($params->{post_ddl});
1541             foreach (@$data) {
1542             eval { $schema->storage->dbh->do($_) or warn "SQL was:\n $_"};
1543             if ($@ && !$self->{ignore_sql_errors}) { die "SQL was:\n $_\n$@"; }
1544             }
1545             $self->msg("- finished importing post-populate DDL into DB");
1546             }
1547              
1548             sub msg {
1549             my $self = shift;
1550             my $subject = shift || return;
1551             my $level = shift || 1;
1552             return unless $self->debug >= $level;
1553             if (ref $subject) {
1554             print Dumper($subject);
1555             } else {
1556             print $subject . "\n";
1557             }
1558             }
1559              
1560             # Helper method for ensuring that the name used for a given source
1561             # is always the same (This is used to name the fixture directories
1562             # for example)
1563              
1564             sub _name_for_source {
1565             my ($self, $source) = @_;
1566              
1567             return ref $source->name ? $source->source_name : $source->name;
1568             }
1569              
1570             =head1 AUTHOR
1571              
1572             Luke Saunders
1573              
1574             Initial development sponsored by and (c) Takkle, Inc. 2007
1575              
1576             =head1 CONTRIBUTORS
1577              
1578             Ash Berlin
1579              
1580             Matt S. Trout
1581              
1582             John Napiorkowski
1583              
1584             Drew Taylor
1585              
1586             Frank Switalski
1587              
1588             Chris Akins
1589              
1590             Tom Bloor
1591              
1592             Samuel Kaufman
1593              
1594             =head1 LICENSE
1595              
1596             This library is free software under the same license as perl itself
1597              
1598             =cut
1599              
1600             1;