| line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
|
1
|
|
|
|
|
|
|
package DBIx::Result::Convert::JSONSchema; |
|
2
|
|
|
|
|
|
|
|
|
3
|
|
|
|
|
|
|
our $VERSION = '0.04'; |
|
4
|
|
|
|
|
|
|
|
|
5
|
|
|
|
|
|
|
|
|
6
|
|
|
|
|
|
|
=head1 NAME |
|
7
|
|
|
|
|
|
|
DBIx::Result::Convert::JSONSchema - Convert DBIx result schema to JSON schema |
|
8
|
|
|
|
|
|
|
|
|
9
|
|
|
|
|
|
|
=begin html |
|
10
|
|
|
|
|
|
|
|
|
11
|
|
|
|
|
|
|
|
|
12
|
|
|
|
|
|
|
|
|
13
|
|
|
|
|
|
|
|
|
14
|
|
|
|
|
|
|
=end html |
|
15
|
|
|
|
|
|
|
|
|
16
|
|
|
|
|
|
|
=head1 VERSION |
|
17
|
|
|
|
|
|
|
|
|
18
|
|
|
|
|
|
|
0.04 |
|
19
|
|
|
|
|
|
|
|
|
20
|
|
|
|
|
|
|
=head1 SYNOPSIS |
|
21
|
|
|
|
|
|
|
|
|
22
|
|
|
|
|
|
|
use DBIx::Result::Convert::JSONSchema; |
|
23
|
|
|
|
|
|
|
|
|
24
|
|
|
|
|
|
|
my $SchemaConvert = DBIx::Result::Convert::JSONSchema->new( schema => Schema ); |
|
25
|
|
|
|
|
|
|
my $json_schema = $SchemaConvert->get_json_schema( DBIx::Class::ResultSource ); |
|
26
|
|
|
|
|
|
|
|
|
27
|
|
|
|
|
|
|
=head1 DESCRIPTION |
|
28
|
|
|
|
|
|
|
|
|
29
|
|
|
|
|
|
|
This module attempts basic conversion of L to equivalent |
|
30
|
|
|
|
|
|
|
of L. |
|
31
|
|
|
|
|
|
|
By default the conversion assumes that the L originated |
|
32
|
|
|
|
|
|
|
from MySQL database. Thus all the types and defaults are set based on MySQL |
|
33
|
|
|
|
|
|
|
field definitions. |
|
34
|
|
|
|
|
|
|
It is, however, possible to overwrite field type map and length map to support |
|
35
|
|
|
|
|
|
|
L from other database solutions. |
|
36
|
|
|
|
|
|
|
|
|
37
|
|
|
|
|
|
|
Note, relations between tables are not taken in account! |
|
38
|
|
|
|
|
|
|
|
|
39
|
|
|
|
|
|
|
=cut |
|
40
|
|
|
|
|
|
|
|
|
41
|
|
|
|
|
|
|
|
|
42
|
4
|
|
|
4
|
|
5968
|
use Moo; |
|
|
4
|
|
|
|
|
46109
|
|
|
|
4
|
|
|
|
|
22
|
|
|
43
|
4
|
|
|
4
|
|
8280
|
use Types::Standard qw/ InstanceOf Enum HashRef /; |
|
|
4
|
|
|
|
|
310177
|
|
|
|
4
|
|
|
|
|
49
|
|
|
44
|
|
|
|
|
|
|
|
|
45
|
4
|
|
|
4
|
|
4422
|
use Carp; |
|
|
4
|
|
|
|
|
11
|
|
|
|
4
|
|
|
|
|
253
|
|
|
46
|
4
|
|
|
4
|
|
1878
|
use Module::Load qw/ load /; |
|
|
4
|
|
|
|
|
4626
|
|
|
|
4
|
|
|
|
|
32
|
|
|
47
|
|
|
|
|
|
|
|
|
48
|
|
|
|
|
|
|
|
|
49
|
|
|
|
|
|
|
has schema => ( |
|
50
|
|
|
|
|
|
|
is => 'ro', |
|
51
|
|
|
|
|
|
|
isa => InstanceOf['DBIx::Class::Schema'], |
|
52
|
|
|
|
|
|
|
required => 1, |
|
53
|
|
|
|
|
|
|
); |
|
54
|
|
|
|
|
|
|
|
|
55
|
|
|
|
|
|
|
has schema_source => ( |
|
56
|
|
|
|
|
|
|
is => 'lazy', |
|
57
|
|
|
|
|
|
|
isa => Enum[ qw/ MySQL / ], |
|
58
|
|
|
|
|
|
|
default => 'MySQL', |
|
59
|
|
|
|
|
|
|
); |
|
60
|
|
|
|
|
|
|
|
|
61
|
|
|
|
|
|
|
has length_type_map => ( |
|
62
|
|
|
|
|
|
|
is => 'rw', |
|
63
|
|
|
|
|
|
|
isa => HashRef, |
|
64
|
|
|
|
|
|
|
default => sub { |
|
65
|
|
|
|
|
|
|
return { |
|
66
|
|
|
|
|
|
|
string => [ qw/ minLength maxLength / ], |
|
67
|
|
|
|
|
|
|
number => [ qw/ minimum maximum / ], |
|
68
|
|
|
|
|
|
|
integer => [ qw/ minimum maximum / ], |
|
69
|
|
|
|
|
|
|
}; |
|
70
|
|
|
|
|
|
|
}, |
|
71
|
|
|
|
|
|
|
); |
|
72
|
|
|
|
|
|
|
|
|
73
|
|
|
|
|
|
|
has type_map => ( |
|
74
|
|
|
|
|
|
|
is => 'rw', |
|
75
|
|
|
|
|
|
|
isa => HashRef, |
|
76
|
|
|
|
|
|
|
default => sub { |
|
77
|
|
|
|
|
|
|
my ( $self ) = @_; |
|
78
|
|
|
|
|
|
|
|
|
79
|
|
|
|
|
|
|
my $type_class = __PACKAGE__ . '::Type::' . $self->schema_source; |
|
80
|
|
|
|
|
|
|
load $type_class; |
|
81
|
|
|
|
|
|
|
|
|
82
|
|
|
|
|
|
|
return $type_class->get_type_map; |
|
83
|
|
|
|
|
|
|
}, |
|
84
|
|
|
|
|
|
|
); |
|
85
|
|
|
|
|
|
|
|
|
86
|
|
|
|
|
|
|
has length_map => ( |
|
87
|
|
|
|
|
|
|
is => 'rw', |
|
88
|
|
|
|
|
|
|
isa => HashRef, |
|
89
|
|
|
|
|
|
|
default => sub { |
|
90
|
|
|
|
|
|
|
my ( $self ) = @_; |
|
91
|
|
|
|
|
|
|
|
|
92
|
|
|
|
|
|
|
my $defaults_class = __PACKAGE__ . '::Default::' . $self->schema_source; |
|
93
|
|
|
|
|
|
|
load $defaults_class; |
|
94
|
|
|
|
|
|
|
|
|
95
|
|
|
|
|
|
|
return $defaults_class->get_length_map; |
|
96
|
|
|
|
|
|
|
}, |
|
97
|
|
|
|
|
|
|
); |
|
98
|
|
|
|
|
|
|
|
|
99
|
|
|
|
|
|
|
has pattern_map => ( |
|
100
|
|
|
|
|
|
|
is => 'rw', |
|
101
|
|
|
|
|
|
|
isa => HashRef, |
|
102
|
|
|
|
|
|
|
lazy => 1, |
|
103
|
|
|
|
|
|
|
default => sub { |
|
104
|
|
|
|
|
|
|
return { |
|
105
|
|
|
|
|
|
|
time => '^\d{2}:\d{2}:\d{2}$', |
|
106
|
|
|
|
|
|
|
year => '^\d{4}$', |
|
107
|
|
|
|
|
|
|
datetime => '^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$', |
|
108
|
|
|
|
|
|
|
timestamp => '^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$', |
|
109
|
|
|
|
|
|
|
}; |
|
110
|
|
|
|
|
|
|
} |
|
111
|
|
|
|
|
|
|
); |
|
112
|
|
|
|
|
|
|
|
|
113
|
|
|
|
|
|
|
has format_map => ( |
|
114
|
|
|
|
|
|
|
is => 'rw', |
|
115
|
|
|
|
|
|
|
isa => HashRef, |
|
116
|
|
|
|
|
|
|
lazy => 1, |
|
117
|
|
|
|
|
|
|
default => sub { |
|
118
|
|
|
|
|
|
|
return { |
|
119
|
|
|
|
|
|
|
date => 'date', |
|
120
|
|
|
|
|
|
|
}; |
|
121
|
|
|
|
|
|
|
} |
|
122
|
|
|
|
|
|
|
); |
|
123
|
|
|
|
|
|
|
|
|
124
|
|
|
|
|
|
|
|
|
125
|
|
|
|
|
|
|
=head2 get_json_schema |
|
126
|
|
|
|
|
|
|
|
|
127
|
|
|
|
|
|
|
Returns somewhat equivalent JSON schema based on DBIx result source name. |
|
128
|
|
|
|
|
|
|
|
|
129
|
|
|
|
|
|
|
my $json_schema = $converted->get_json_schema( 'TableSource', { |
|
130
|
|
|
|
|
|
|
schema_declaration => 'http://json-schema.org/draft-04/schema#', |
|
131
|
|
|
|
|
|
|
decimals_to_pattern => 1, |
|
132
|
|
|
|
|
|
|
has_schema_property_description => 1, |
|
133
|
|
|
|
|
|
|
allow_additional_properties => 0, |
|
134
|
|
|
|
|
|
|
overwrite_schema_property_keys => { |
|
135
|
|
|
|
|
|
|
name => 'cat', |
|
136
|
|
|
|
|
|
|
address => 'dog', |
|
137
|
|
|
|
|
|
|
}, |
|
138
|
|
|
|
|
|
|
add_schema_properties => { |
|
139
|
|
|
|
|
|
|
address => { ... }, |
|
140
|
|
|
|
|
|
|
bank_account => '#/definitions/bank_account', |
|
141
|
|
|
|
|
|
|
}, |
|
142
|
|
|
|
|
|
|
overwrite_schema_properties => { |
|
143
|
|
|
|
|
|
|
name => { |
|
144
|
|
|
|
|
|
|
_action => 'merge', # one of - merge/overwrite |
|
145
|
|
|
|
|
|
|
minimum => 10, |
|
146
|
|
|
|
|
|
|
maximum => 20, |
|
147
|
|
|
|
|
|
|
type => 'number', |
|
148
|
|
|
|
|
|
|
}, |
|
149
|
|
|
|
|
|
|
}, |
|
150
|
|
|
|
|
|
|
include_required => [ qw/ street city / ], |
|
151
|
|
|
|
|
|
|
exclude_required => [ qw/ name address / ], |
|
152
|
|
|
|
|
|
|
exclude_properties => [ qw/ mouse house / ], |
|
153
|
|
|
|
|
|
|
|
|
154
|
|
|
|
|
|
|
dependencies => { |
|
155
|
|
|
|
|
|
|
first_name => [ qw/ middle_name last_name / ], |
|
156
|
|
|
|
|
|
|
}, |
|
157
|
|
|
|
|
|
|
}); |
|
158
|
|
|
|
|
|
|
|
|
159
|
|
|
|
|
|
|
Optional arguments to change how JSON schema is generated: |
|
160
|
|
|
|
|
|
|
|
|
161
|
|
|
|
|
|
|
=over 8 |
|
162
|
|
|
|
|
|
|
|
|
163
|
|
|
|
|
|
|
=item * schema_declaration |
|
164
|
|
|
|
|
|
|
|
|
165
|
|
|
|
|
|
|
Declare which version of the JSON Schema standard that the schema was written against. |
|
166
|
|
|
|
|
|
|
|
|
167
|
|
|
|
|
|
|
L |
|
168
|
|
|
|
|
|
|
|
|
169
|
|
|
|
|
|
|
B: "http://json-schema.org/schema#" |
|
170
|
|
|
|
|
|
|
|
|
171
|
|
|
|
|
|
|
=item * decimals_to_pattern |
|
172
|
|
|
|
|
|
|
|
|
173
|
|
|
|
|
|
|
1/0 - value to indicate if 'number' type field should be converted to 'string' type with |
|
174
|
|
|
|
|
|
|
RegExp pattern based on decimal place definition in database. |
|
175
|
|
|
|
|
|
|
|
|
176
|
|
|
|
|
|
|
B: 0 |
|
177
|
|
|
|
|
|
|
|
|
178
|
|
|
|
|
|
|
=item * has_schema_property_description |
|
179
|
|
|
|
|
|
|
|
|
180
|
|
|
|
|
|
|
Generate schema description for fields e.g. 'Optional numeric type value for field context e.g. 1'. |
|
181
|
|
|
|
|
|
|
|
|
182
|
|
|
|
|
|
|
B: 0 |
|
183
|
|
|
|
|
|
|
|
|
184
|
|
|
|
|
|
|
=item * allow_additional_properties |
|
185
|
|
|
|
|
|
|
|
|
186
|
|
|
|
|
|
|
Define if the schema accepts additional keys in given payload. |
|
187
|
|
|
|
|
|
|
|
|
188
|
|
|
|
|
|
|
B: 0 |
|
189
|
|
|
|
|
|
|
|
|
190
|
|
|
|
|
|
|
=item * overwrite_schema_property_keys |
|
191
|
|
|
|
|
|
|
|
|
192
|
|
|
|
|
|
|
HashRef representing mapping between old property name and new property name to overwrite existing schema keys, |
|
193
|
|
|
|
|
|
|
Properties from old key will be assigned to the new property. |
|
194
|
|
|
|
|
|
|
|
|
195
|
|
|
|
|
|
|
B The key conversion is executed last, every other option e.g. C will work only on original |
|
196
|
|
|
|
|
|
|
database column names. |
|
197
|
|
|
|
|
|
|
|
|
198
|
|
|
|
|
|
|
=item * overwrite_schema_properties |
|
199
|
|
|
|
|
|
|
|
|
200
|
|
|
|
|
|
|
HashRef of property name and new attributes which can be either overwritten or merged based on given B<_action> key. |
|
201
|
|
|
|
|
|
|
|
|
202
|
|
|
|
|
|
|
=item * exclude_required |
|
203
|
|
|
|
|
|
|
|
|
204
|
|
|
|
|
|
|
ArrayRef of database column names which should always be EXCLUDED from REQUIRED schema properties. |
|
205
|
|
|
|
|
|
|
|
|
206
|
|
|
|
|
|
|
=item * include_required |
|
207
|
|
|
|
|
|
|
|
|
208
|
|
|
|
|
|
|
ArrayRef of database column names which should always be INCLUDED in REQUIRED schema properties |
|
209
|
|
|
|
|
|
|
|
|
210
|
|
|
|
|
|
|
=item * exclude_properties |
|
211
|
|
|
|
|
|
|
|
|
212
|
|
|
|
|
|
|
ArrayRef of database column names which should be excluded from JSON schema AT ALL |
|
213
|
|
|
|
|
|
|
|
|
214
|
|
|
|
|
|
|
=item * dependencies |
|
215
|
|
|
|
|
|
|
|
|
216
|
|
|
|
|
|
|
L |
|
217
|
|
|
|
|
|
|
|
|
218
|
|
|
|
|
|
|
=item * add_schema_properties |
|
219
|
|
|
|
|
|
|
|
|
220
|
|
|
|
|
|
|
HashRef of custom schema properties that must be included in final definition |
|
221
|
|
|
|
|
|
|
Note that custom properties will overwrite defaults |
|
222
|
|
|
|
|
|
|
|
|
223
|
|
|
|
|
|
|
=item * schema_overwrite |
|
224
|
|
|
|
|
|
|
|
|
225
|
|
|
|
|
|
|
HashRef of top level schema properties e.g. 'required', 'properties' etc. to overwrite |
|
226
|
|
|
|
|
|
|
|
|
227
|
|
|
|
|
|
|
=back |
|
228
|
|
|
|
|
|
|
|
|
229
|
|
|
|
|
|
|
=cut |
|
230
|
|
|
|
|
|
|
|
|
231
|
|
|
|
|
|
|
sub get_json_schema { |
|
232
|
8
|
|
|
8
|
1
|
43669
|
my ( $self, $source, $args ) = @_; |
|
233
|
|
|
|
|
|
|
|
|
234
|
8
|
100
|
|
|
|
47
|
croak 'missing schema source' unless $source; |
|
235
|
|
|
|
|
|
|
|
|
236
|
7
|
|
100
|
|
|
34
|
$args //= {}; |
|
237
|
|
|
|
|
|
|
|
|
238
|
|
|
|
|
|
|
# additional schema generation options |
|
239
|
7
|
|
|
|
|
16
|
my $decimals_to_pattern = $args->{decimals_to_pattern}; |
|
240
|
7
|
|
|
|
|
14
|
my $has_schema_property_description = $args->{has_schema_property_description}; |
|
241
|
7
|
|
100
|
|
|
38
|
my $overwrite_schema_property_keys = $args->{overwrite_schema_property_keys} // {}; |
|
242
|
7
|
|
|
|
|
15
|
my $add_schema_properties = $args->{add_schema_properties}; |
|
243
|
7
|
|
100
|
|
|
32
|
my $overwrite_schema_properties = $args->{overwrite_schema_properties} // {}; |
|
244
|
7
|
100
|
|
|
|
25
|
my %exclude_required = map { $_ => 1 } @{ $args->{exclude_required} || [] }; |
|
|
1
|
|
|
|
|
4
|
|
|
|
7
|
|
|
|
|
35
|
|
|
245
|
7
|
100
|
|
|
|
15
|
my %include_required = map { $_ => 1 } @{ $args->{include_required} || [] }; |
|
|
2
|
|
|
|
|
6
|
|
|
|
7
|
|
|
|
|
40
|
|
|
246
|
7
|
100
|
|
|
|
16
|
my %exclude_properties = map { $_ => 1 } @{ $args->{exclude_properties} || [] }; |
|
|
1
|
|
|
|
|
4
|
|
|
|
7
|
|
|
|
|
32
|
|
|
247
|
|
|
|
|
|
|
|
|
248
|
7
|
|
|
|
|
18
|
my $dependencies = $args->{dependencies}; |
|
249
|
7
|
|
100
|
|
|
30
|
my $schema_declaration = $args->{schema_declaration} // 'http://json-schema.org/schema#'; |
|
250
|
7
|
|
100
|
|
|
25
|
my $allow_additional_properties = $args->{allow_additional_properties} // 0; |
|
251
|
7
|
|
100
|
|
|
39
|
my $schema_overwrite = $args->{schema_overwrite} // {}; |
|
252
|
|
|
|
|
|
|
|
|
253
|
7
|
100
|
|
|
|
61
|
my %json_schema = ( |
|
254
|
|
|
|
|
|
|
'$schema' => $schema_declaration, |
|
255
|
|
|
|
|
|
|
type => 'object', |
|
256
|
|
|
|
|
|
|
required => [], |
|
257
|
|
|
|
|
|
|
properties => {}, |
|
258
|
|
|
|
|
|
|
additionalProperties => $allow_additional_properties, |
|
259
|
|
|
|
|
|
|
|
|
260
|
|
|
|
|
|
|
( $dependencies ? ( dependencies => $dependencies ) : () ), |
|
261
|
|
|
|
|
|
|
); |
|
262
|
|
|
|
|
|
|
|
|
263
|
7
|
|
|
|
|
26
|
my $source_info = $self->_get_column_info( $source ); |
|
264
|
|
|
|
|
|
|
|
|
265
|
|
|
|
|
|
|
SCHEMA_COLUMN: |
|
266
|
6
|
|
|
|
|
854
|
foreach my $column ( keys %{ $source_info } ) { |
|
|
6
|
|
|
|
|
43
|
|
|
267
|
168
|
100
|
|
|
|
357
|
next SCHEMA_COLUMN if $exclude_properties{ $column }; |
|
268
|
|
|
|
|
|
|
|
|
269
|
167
|
|
|
|
|
285
|
my $column_info = $source_info->{ $column }; |
|
270
|
|
|
|
|
|
|
|
|
271
|
|
|
|
|
|
|
# DBIx schema data type -> JSON schema data type |
|
272
|
|
|
|
|
|
|
my $json_type = $self->type_map->{ $column_info->{data_type} } |
|
273
|
|
|
|
|
|
|
or croak sprintf( |
|
274
|
|
|
|
|
|
|
'unknown data type - %s (source: %s, column: %s)', |
|
275
|
167
|
50
|
|
|
|
2654
|
$column_info->{data_type}, $source, $column |
|
276
|
|
|
|
|
|
|
); |
|
277
|
|
|
|
|
|
|
|
|
278
|
167
|
|
|
|
|
1303
|
$json_schema{properties}->{ $column }->{type} = $json_type; |
|
279
|
|
|
|
|
|
|
|
|
280
|
|
|
|
|
|
|
# DBIx schema type -> JSON format |
|
281
|
167
|
|
|
|
|
2714
|
my $format_type = $self->format_map->{ $column_info->{data_type} }; |
|
282
|
167
|
100
|
|
|
|
1173
|
if ( $format_type ) { |
|
283
|
6
|
|
|
|
|
21
|
$json_schema{properties}->{ $column }->{format} = $format_type; |
|
284
|
|
|
|
|
|
|
} |
|
285
|
|
|
|
|
|
|
|
|
286
|
|
|
|
|
|
|
# DBIx schema size constraint -> JSON schema size constraint |
|
287
|
167
|
100
|
100
|
|
|
2634
|
if ( ! $format_type && $self->length_map->{ $column_info->{data_type} } ) { |
|
288
|
119
|
|
|
|
|
1532
|
$self->_set_json_schema_property_range( \%json_schema, $column_info, $column ); |
|
289
|
|
|
|
|
|
|
} |
|
290
|
|
|
|
|
|
|
|
|
291
|
|
|
|
|
|
|
# DBIx schema required -> JSON schema required |
|
292
|
167
|
|
|
|
|
747
|
my $is_required_field = $include_required{ $column }; |
|
293
|
167
|
100
|
100
|
|
|
824
|
if ( $is_required_field || ( ! $column_info->{default_value} && ! $column_info->{is_nullable} && ! $exclude_required{ $column } ) ) { |
|
|
|
|
100
|
|
|
|
|
|
294
|
7
|
|
33
|
|
|
55
|
my $required_property = $overwrite_schema_property_keys->{ $column } // $column; |
|
295
|
7
|
|
|
|
|
18
|
push @{ $json_schema{required} }, $required_property; |
|
|
7
|
|
|
|
|
24
|
|
|
296
|
|
|
|
|
|
|
} |
|
297
|
|
|
|
|
|
|
|
|
298
|
|
|
|
|
|
|
# DBIx schema defaults -> JSON schema defaults (no refs e.g. current_timestamp) |
|
299
|
167
|
50
|
66
|
|
|
404
|
if ( $column_info->{default_value} && ! ref $column_info->{default_value} ) { |
|
300
|
0
|
|
|
|
|
0
|
$json_schema{properties}->{ $column }->{default} = $column_info->{default_value}; |
|
301
|
|
|
|
|
|
|
} |
|
302
|
|
|
|
|
|
|
|
|
303
|
|
|
|
|
|
|
# DBIx schema list -> JSON enum list |
|
304
|
167
|
50
|
66
|
|
|
408
|
if ( $json_type eq 'enum' && $column_info->{extra} && $column_info->{extra}->{list} ) { # no autovivification |
|
|
|
|
33
|
|
|
|
|
|
305
|
12
|
|
|
|
|
29
|
$json_schema{properties}->{ $column }->{enum} = $column_info->{extra}->{list}; |
|
306
|
|
|
|
|
|
|
} |
|
307
|
|
|
|
|
|
|
|
|
308
|
|
|
|
|
|
|
# Consider 'is nullable' to accept 'null' values in all cases where field is not explicitly required |
|
309
|
167
|
100
|
100
|
|
|
505
|
if ( ! $is_required_field && $column_info->{is_nullable} ) { |
|
310
|
153
|
100
|
|
|
|
303
|
if ( $json_type eq 'enum' ) { |
|
311
|
12
|
|
50
|
|
|
30
|
$json_schema{properties}->{ $column }->{enum} //= []; |
|
312
|
12
|
|
|
|
|
32
|
push @{ $json_schema{properties}->{ $column }->{enum} }, 'null'; |
|
|
12
|
|
|
|
|
47
|
|
|
313
|
|
|
|
|
|
|
} |
|
314
|
|
|
|
|
|
|
else { |
|
315
|
141
|
|
|
|
|
378
|
$json_schema{properties}->{ $column }->{type} = [ $json_type, 'null' ]; |
|
316
|
|
|
|
|
|
|
} |
|
317
|
|
|
|
|
|
|
} |
|
318
|
|
|
|
|
|
|
|
|
319
|
|
|
|
|
|
|
# DBIx decimal numbers -> JSON schema numeric string pattern |
|
320
|
167
|
100
|
100
|
|
|
413
|
if ( $json_type eq 'number' && $decimals_to_pattern ) { |
|
321
|
4
|
100
|
66
|
|
|
36
|
if ( $column_info->{size} && ref $column_info->{size} eq 'ARRAY' ) { |
|
322
|
1
|
|
|
|
|
3
|
$json_schema{properties}->{ $column }->{type} = 'string'; |
|
323
|
1
|
|
|
|
|
8
|
$json_schema{properties}->{ $column }->{pattern} = $self->_get_decimal_pattern( $column_info->{size} ); |
|
324
|
|
|
|
|
|
|
} |
|
325
|
|
|
|
|
|
|
} |
|
326
|
|
|
|
|
|
|
|
|
327
|
|
|
|
|
|
|
# JSON schema field patterns |
|
328
|
167
|
100
|
|
|
|
2835
|
if ( $self->pattern_map->{ $column_info->{data_type} } ) { |
|
329
|
26
|
|
|
|
|
581
|
$json_schema{properties}->{ $column }->{pattern} = $self->pattern_map->{ $column_info->{data_type} }; |
|
330
|
|
|
|
|
|
|
} |
|
331
|
|
|
|
|
|
|
|
|
332
|
|
|
|
|
|
|
# JSON schema property description |
|
333
|
167
|
50
|
33
|
|
|
1469
|
if ( ! $json_schema{properties}->{ $column }->{description} && $has_schema_property_description ) { |
|
334
|
|
|
|
|
|
|
my $property_description = $self->_get_json_schema_property_description( |
|
335
|
|
|
|
|
|
|
$overwrite_schema_property_keys->{ $column } // $column, |
|
336
|
0
|
|
0
|
|
|
0
|
$json_schema{properties}->{ $column } |
|
337
|
|
|
|
|
|
|
); |
|
338
|
0
|
|
|
|
|
0
|
$json_schema{properties}->{ $column }->{description} = $property_description; |
|
339
|
|
|
|
|
|
|
} |
|
340
|
|
|
|
|
|
|
|
|
341
|
|
|
|
|
|
|
# JSON schema custom additional properties |
|
342
|
167
|
50
|
|
|
|
322
|
if ( $add_schema_properties ) { |
|
343
|
0
|
|
|
|
|
0
|
foreach my $property_key ( keys %{ $add_schema_properties } ) { |
|
|
0
|
|
|
|
|
0
|
|
|
344
|
0
|
|
|
|
|
0
|
$json_schema{properties}->{ $property_key } = $add_schema_properties->{ $property_key }; |
|
345
|
|
|
|
|
|
|
} |
|
346
|
|
|
|
|
|
|
} |
|
347
|
|
|
|
|
|
|
|
|
348
|
|
|
|
|
|
|
# Overwrites: merge JSON schema property key values with custom ones |
|
349
|
167
|
100
|
|
|
|
337
|
if ( my $overwrite_property = delete $overwrite_schema_properties->{ $column } ) { |
|
350
|
2
|
|
50
|
|
|
7
|
my $action = delete $overwrite_property->{_action} // 'merge'; |
|
351
|
|
|
|
|
|
|
|
|
352
|
|
|
|
|
|
|
$json_schema{properties}->{ $column } = { |
|
353
|
2
|
100
|
|
|
|
8
|
%{ $action eq 'merge' ? $json_schema{properties}->{ $column } : {} }, |
|
354
|
2
|
|
|
|
|
5
|
%{ $overwrite_property } |
|
|
2
|
|
|
|
|
10
|
|
|
355
|
|
|
|
|
|
|
}; |
|
356
|
|
|
|
|
|
|
} |
|
357
|
|
|
|
|
|
|
|
|
358
|
|
|
|
|
|
|
# Overwrite: replace JSON schema keys |
|
359
|
167
|
100
|
|
|
|
413
|
if ( my $new_key = $overwrite_schema_property_keys->{ $column } ) { |
|
360
|
2
|
|
|
|
|
7
|
$json_schema{properties}->{ $new_key } = delete $json_schema{properties}->{ $column }; |
|
361
|
|
|
|
|
|
|
} |
|
362
|
|
|
|
|
|
|
} |
|
363
|
|
|
|
|
|
|
|
|
364
|
|
|
|
|
|
|
return { |
|
365
|
|
|
|
|
|
|
%json_schema, |
|
366
|
6
|
|
|
|
|
32
|
%{ $schema_overwrite }, |
|
|
6
|
|
|
|
|
74
|
|
|
367
|
|
|
|
|
|
|
}; |
|
368
|
|
|
|
|
|
|
} |
|
369
|
|
|
|
|
|
|
|
|
370
|
|
|
|
|
|
|
# Return DBIx result source column info for the given result class name |
|
371
|
|
|
|
|
|
|
sub _get_column_info { |
|
372
|
9
|
|
|
9
|
|
33213
|
my ( $self, $source ) = @_; |
|
373
|
|
|
|
|
|
|
|
|
374
|
9
|
|
|
|
|
76
|
return $self->schema->source($source)->columns_info; |
|
375
|
|
|
|
|
|
|
} |
|
376
|
|
|
|
|
|
|
|
|
377
|
|
|
|
|
|
|
# Returns RegExp pattern for decimal numbers based on database field definition |
|
378
|
|
|
|
|
|
|
sub _get_decimal_pattern { |
|
379
|
1
|
|
|
1
|
|
3
|
my ( $self, $size ) = @_; |
|
380
|
|
|
|
|
|
|
|
|
381
|
1
|
|
|
|
|
3
|
my ( $x, $y ) = @{ $size }; |
|
|
1
|
|
|
|
|
3
|
|
|
382
|
1
|
|
|
|
|
342
|
return sprintf '^\d{1,%s}\.\d{0,%s}$', $x - $y, $y; |
|
383
|
|
|
|
|
|
|
} |
|
384
|
|
|
|
|
|
|
|
|
385
|
|
|
|
|
|
|
# Generates somewhat logical field description based on type and length constraints |
|
386
|
|
|
|
|
|
|
sub _get_json_schema_property_description { |
|
387
|
0
|
|
|
0
|
|
0
|
my ( $self, $column, $property ) = @_; |
|
388
|
|
|
|
|
|
|
|
|
389
|
0
|
0
|
|
|
|
0
|
if ( ! $property->{type} ) { |
|
390
|
0
|
0
|
|
|
|
0
|
if ( $property->{enum} ) { |
|
391
|
0
|
|
|
|
|
0
|
return sprintf 'Enum list type, one of - %s', join( ', ', @{ $property->{enum} } ); |
|
|
0
|
|
|
|
|
0
|
|
|
392
|
|
|
|
|
|
|
} |
|
393
|
|
|
|
|
|
|
|
|
394
|
0
|
|
|
|
|
0
|
return ''; |
|
395
|
|
|
|
|
|
|
} |
|
396
|
|
|
|
|
|
|
|
|
397
|
0
|
0
|
|
|
|
0
|
return '' if $property->{type} eq 'object'; # no idea how to handle |
|
398
|
|
|
|
|
|
|
|
|
399
|
0
|
|
|
|
|
0
|
my %types; |
|
400
|
0
|
0
|
|
|
|
0
|
if ( ref $property->{type} eq 'ARRAY' ) { |
|
401
|
0
|
|
|
|
|
0
|
%types = map { $_ => 1 } @{ $property->{type} }; |
|
|
0
|
|
|
|
|
0
|
|
|
|
0
|
|
|
|
|
0
|
|
|
402
|
|
|
|
|
|
|
} |
|
403
|
|
|
|
|
|
|
else { |
|
404
|
0
|
|
|
|
|
0
|
$types{ $property->{type} } = 1; |
|
405
|
|
|
|
|
|
|
} |
|
406
|
|
|
|
|
|
|
|
|
407
|
0
|
|
|
|
|
0
|
my $description = ''; |
|
408
|
0
|
0
|
|
|
|
0
|
$description .= 'Optional' if $types{null}; |
|
409
|
|
|
|
|
|
|
|
|
410
|
0
|
|
|
|
|
0
|
my $type_part; |
|
411
|
0
|
0
|
|
|
|
0
|
if ( grep { /^integer|number$/ } keys %types ) { |
|
|
0
|
|
|
|
|
0
|
|
|
412
|
0
|
|
|
|
|
0
|
$type_part = 'numeric'; |
|
413
|
|
|
|
|
|
|
} |
|
414
|
|
|
|
|
|
|
else { |
|
415
|
0
|
|
|
|
|
0
|
( $type_part ) = grep { $_ ne 'null' } keys %types; # lucky roll, last type that isn't 'null' should be legit |
|
|
0
|
|
|
|
|
0
|
|
|
416
|
|
|
|
|
|
|
} |
|
417
|
|
|
|
|
|
|
|
|
418
|
0
|
0
|
|
|
|
0
|
$description .= $description ? " $type_part" : ucfirst $type_part; |
|
419
|
0
|
|
|
|
|
0
|
$description .= sprintf ' type value for field %s', $column; |
|
420
|
|
|
|
|
|
|
|
|
421
|
0
|
0
|
0
|
|
|
0
|
if ( ( grep { /^integer$/ } keys %types ) && $property->{maximum} ) { |
|
|
0
|
0
|
0
|
|
|
0
|
|
|
422
|
0
|
|
0
|
|
|
0
|
my $integer_example = $property->{default} // int rand $property->{maximum}; |
|
423
|
0
|
|
|
|
|
0
|
$description .= ' e.g. ' . $integer_example; |
|
424
|
|
|
|
|
|
|
} |
|
425
|
0
|
|
|
|
|
0
|
elsif ( ( grep { /^string$/ } keys %types ) && $property->{pattern} ) { |
|
426
|
0
|
|
|
|
|
0
|
$description .= sprintf ' with pattern %s ', $property->{pattern}; |
|
427
|
|
|
|
|
|
|
} |
|
428
|
|
|
|
|
|
|
|
|
429
|
0
|
|
|
|
|
0
|
return $description; |
|
430
|
|
|
|
|
|
|
} |
|
431
|
|
|
|
|
|
|
|
|
432
|
|
|
|
|
|
|
# Convert from DBIx field length to JSON schema field length based on field type |
|
433
|
|
|
|
|
|
|
sub _set_json_schema_property_range { |
|
434
|
119
|
|
|
119
|
|
266
|
my ( $self, $json_schema, $column_info, $column ) = @_; |
|
435
|
|
|
|
|
|
|
|
|
436
|
119
|
|
|
|
|
1936
|
my $json_schema_min_type = $self->length_type_map->{ $self->type_map->{ $column_info->{data_type} } }->[0]; |
|
437
|
119
|
|
|
|
|
4318
|
my $json_schema_max_type = $self->length_type_map->{ $self->type_map->{ $column_info->{data_type} } }->[1]; |
|
438
|
|
|
|
|
|
|
|
|
439
|
119
|
|
|
|
|
2654
|
my $json_schema_min = $self->_get_json_schema_property_min_max_value( $column_info, 0 ); |
|
440
|
119
|
|
|
|
|
4780
|
my $json_schema_max = $self->_get_json_schema_property_min_max_value( $column_info, 1 ); |
|
441
|
|
|
|
|
|
|
|
|
442
|
|
|
|
|
|
|
# bump min value to 0 (don't see how this starts from negative) |
|
443
|
119
|
50
|
|
|
|
4839
|
$json_schema_min = 0 if $column_info->{is_auto_increment}; |
|
444
|
|
|
|
|
|
|
|
|
445
|
119
|
|
|
|
|
293
|
$json_schema->{properties}->{ $column }->{ $json_schema_min_type } = $json_schema_min; |
|
446
|
119
|
|
|
|
|
224
|
$json_schema->{properties}->{ $column }->{ $json_schema_max_type } = $json_schema_max; |
|
447
|
|
|
|
|
|
|
|
|
448
|
119
|
100
|
|
|
|
241
|
if ( $column_info->{size} ) { |
|
449
|
29
|
|
|
|
|
58
|
$json_schema->{properties}->{ $column }->{ $json_schema_max_type } = $column_info->{size}; |
|
450
|
|
|
|
|
|
|
} |
|
451
|
|
|
|
|
|
|
|
|
452
|
119
|
|
|
|
|
238
|
return; |
|
453
|
|
|
|
|
|
|
} |
|
454
|
|
|
|
|
|
|
|
|
455
|
|
|
|
|
|
|
# Returns min/max value from DBIx result field definition or lookup from defaults |
|
456
|
|
|
|
|
|
|
sub _get_json_schema_property_min_max_value { |
|
457
|
238
|
|
|
238
|
|
455
|
my ( $self, $column_info, $range ) = @_; |
|
458
|
|
|
|
|
|
|
|
|
459
|
238
|
0
|
33
|
|
|
546
|
if ( $column_info->{extra} && $column_info->{extra}->{unsigned} ) { # no autovivification |
|
460
|
0
|
|
|
|
|
0
|
return $self->length_map->{ $column_info->{data_type} }->{unsigned}->[ $range ]; |
|
461
|
|
|
|
|
|
|
} |
|
462
|
|
|
|
|
|
|
|
|
463
|
|
|
|
|
|
|
return ref $self->length_map->{ $column_info->{data_type} } eq 'ARRAY' ? $self->length_map->{ $column_info->{data_type} }->[ $range ] |
|
464
|
238
|
100
|
|
|
|
3623
|
: $self->length_map->{ $column_info->{data_type} }->{signed}->[ $range ]; |
|
465
|
|
|
|
|
|
|
} |
|
466
|
|
|
|
|
|
|
|
|
467
|
|
|
|
|
|
|
=head1 SEE ALSO |
|
468
|
|
|
|
|
|
|
|
|
469
|
|
|
|
|
|
|
L - Result source object |
|
470
|
|
|
|
|
|
|
|
|
471
|
|
|
|
|
|
|
=head1 AUTHOR |
|
472
|
|
|
|
|
|
|
|
|
473
|
|
|
|
|
|
|
malishew - C |
|
474
|
|
|
|
|
|
|
|
|
475
|
|
|
|
|
|
|
=head1 LICENSE |
|
476
|
|
|
|
|
|
|
|
|
477
|
|
|
|
|
|
|
This library is free software; you can redistribute it and/or modify it under |
|
478
|
|
|
|
|
|
|
the same terms as Perl itself. If you would like to contribute documentation |
|
479
|
|
|
|
|
|
|
or file a bug report then please raise an issue / pull request: |
|
480
|
|
|
|
|
|
|
|
|
481
|
|
|
|
|
|
|
https://github.com/Humanstate/p5-dbix-result-convert-jsonschema |
|
482
|
|
|
|
|
|
|
|
|
483
|
|
|
|
|
|
|
=cut |
|
484
|
|
|
|
|
|
|
|
|
485
|
|
|
|
|
|
|
__PACKAGE__->meta->make_immutable; |