File Coverage

blib/lib/Data/Pipeline.pm
Criterion Covered Total %
statement 7 9 77.7
branch n/a
condition n/a
subroutine 3 3 100.0
pod n/a
total 10 12 83.3


line stmt bran cond sub pod time code
1             package Data::Pipeline;
2              
3             our $VERSION = '0.02';
4              
5 4     4   198335 use Sub::Exporter;
  4         67329  
  4         28  
6 4     4   4466 use Sub::Name 'subname';
  4         2957  
  4         266  
7 4     4   11504 use Class::MOP ();
  0            
  0            
8             use Carp ();
9              
10             use Data::Pipeline::Iterator;
11              
12             use MooseX::Types::Moose qw(HashRef);
13              
14             sub import {
15             my($class, @methods) = @_;
16              
17             my $CALLER = caller();
18              
19             my %exports;
20              
21             for my $method (@methods) {
22              
23             my $impl_class = _find_class($method);
24              
25             if($impl_class =~ m{^Data::Pipeline::(Action|Adapter)X?::} || $impl_class -> isa('Data::Pipeline::Aggregator::Machine')) {
26             $exports{$method} = sub {
27             my $class = $CALLER;
28             return subname "Data::Pipeline::$method" => sub {
29             return $impl_class -> new( @_ );
30             };
31             };
32             }
33             elsif($impl_class =~ m{^Data::Pipeline::Aggregator}) {
34             $exports{$method} = sub {
35             my $class = $CALLER;
36             return subname "Data::Pipeline::$method" => sub {
37             my @actions = map {
38             ref($_) ? $_ :
39             ($c = _find_class($_)) ? $c -> new() :
40             Carp::croak "Unable to incorporate $_ into pipeline"
41             } @_;
42             return $impl_class -> new( actions => \@actions );
43             };
44             };
45             }
46              
47             }
48              
49             my $exporter = Sub::Exporter::build_exporter({
50             exports => \%exports,
51             groups => { default => [':all'] }
52             });
53              
54             goto &$exporter;
55             }
56              
57             sub _find_class($) {
58             my($type) = @_;
59              
60             #return $type if eval { Class::MOP::load_class($type) };
61              
62             #my $class="Data::Pipeline::$type";
63              
64             #return $class if eval { Class::MOP::load_class($class) };
65              
66             for my $p (qw(Aggregator Adapter Action AggregatorX AdapterX ActionX)) {
67              
68             $class="Data::Pipeline::${p}::${type}";
69            
70             return $class if eval { Class::MOP::load_class($class) };
71             }
72              
73             Carp::croak "Unable to find an implementation for $type";
74             }
75              
76             1;
77              
78             __END__
79              
80             =pod
81              
82             =for readme stop
83              
84             =head1 NAME
85              
86             Data::Pipeline - manage aggregated data filters
87              
88             =head1 SYNOPSIS
89              
90             use Data::Pipeline qw( Pipeline Truncate Count Array );
91              
92             my $p = Pipeline(
93             CSV,
94             Truncate( length => 5 ),
95             );
96              
97             my $iterator = $p -> from( file => $filename );
98              
99             until( $iterator -> finished ) {
100             my $v = $iterator -> next;
101             # get the first five items in a CSV file
102             }
103              
104             If combining the output of multiple pipelines:
105              
106             use Data::Pipeline qw( Pipeline Union );
107              
108             my $u = Union(
109             Pipeline( ... ),
110             Pipeline( ... ),
111             ...
112             );
113              
114             my $iterator = $u -> transform( $source1, $source2, ... );
115              
116             =for readme continue
117              
118             =begin readme
119              
120             Data::Pipeline 0.01
121              
122             toolkit for building data processing pipelines
123              
124             =head1 INSTALLATION
125              
126             Installation follows standard Perl CPAN module installation steps:
127              
128             cpan> install Data::Pipeline
129              
130             or, if not using the CPAN tool, then from within the unpacked distribution:
131              
132             % perl Makefile.PL
133             % make
134             % make test
135             % make install
136              
137              
138             =for readme stop
139              
140              
141             =head1 DESCRIPTION
142              
143             A Data::Pipeline pipeline is a linear sequence of actions taken on a stream of
144             data elements. Data is pulled from an iterator as needed, with each action
145             and pipeline presenting itself to the next stage as an iterator.
146              
147             Using an iterator interface allows actions and pipelines to be combined in
148             a wide range of configurations while preserving the lazy nature of
149             iterator evaluation when running the pipeline.
150              
151             =head1 CONSTRUCTORS
152              
153             Convenience methods are exported for all of the various classes based on the
154             name of the class. These are the name of the class without any of the
155             preceeding package namespace.
156              
157             For example, Pipeline refers to Data::Pipeline::Pipeline while
158             JSON refers to Data::Pipeline::Adapter::JSON.
159              
160             =head2 Aggregators
161              
162             Aggregators allow multiple actions to be strung together.
163              
164             =head2 Adapters
165              
166             Adapters provide the interface between pipelines and data. Most adapters
167             can be used for both input and output, but a few, such as the SPARQL adapter,
168             are specialized for only input or output due to the nature of the data
169             source they are working with.
170              
171             Documentation is available for each adapter under the
172             Data::Pipeline::Adapter:: namespace.
173              
174             =head2 Actions
175              
176             Actions transform data in a stream.
177              
178             Documentation is available for each action under the
179             Data::Pipeline::Action:: namespace.
180              
181             =head1 SEE ALSO
182              
183             L<Data::Pipeline::Cookbook> for examples.
184              
185             L<Data::Pipeline::Machine>,
186             L<Data::Pipeline::Aggregator>,
187             L<Data::Pipeline::Action>,
188             L<Data::Pipeline::Adapter>.
189              
190             =for readme continue
191              
192             =head1 BUGS
193              
194             There are probably quite a few. Certain machine features don't work
195             as expected. There has been no profiling or optimization, so the pipelines
196             will run much slower than they should. The interface design should be good
197             though.
198              
199             Bugs may be reported on rt.cpan.org or by e-mailing bug-Data-Pipeline at
200             rt.cpan.org.
201              
202             =head1 AUTHOR
203              
204             James Smith, C<< <jsmith@cpan.org> >>
205            
206             =head1 LICENSE
207              
208             Copyright (c) 2008 Texas A&M University.
209              
210             This library is free software, you can redistribute it and/or modify
211             it under the same terms as Perl itself.
212              
213             =cut