| line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
|
1
|
|
|
|
|
|
|
package Pheno::Ranker; |
|
2
|
|
|
|
|
|
|
|
|
3
|
3
|
|
|
3
|
|
166844
|
use strict; |
|
|
3
|
|
|
|
|
25
|
|
|
|
3
|
|
|
|
|
89
|
|
|
4
|
3
|
|
|
3
|
|
15
|
use warnings; |
|
|
3
|
|
|
|
|
3
|
|
|
|
3
|
|
|
|
|
68
|
|
|
5
|
3
|
|
|
3
|
|
1017
|
use autodie; |
|
|
3
|
|
|
|
|
32153
|
|
|
|
3
|
|
|
|
|
14
|
|
|
6
|
3
|
|
|
3
|
|
19265
|
use feature qw(say); |
|
|
3
|
|
|
|
|
6
|
|
|
|
3
|
|
|
|
|
398
|
|
|
7
|
3
|
|
|
3
|
|
2076
|
use Data::Dumper; |
|
|
3
|
|
|
|
|
21532
|
|
|
|
3
|
|
|
|
|
192
|
|
|
8
|
3
|
|
|
3
|
|
23
|
use File::Basename qw(dirname); |
|
|
3
|
|
|
|
|
5
|
|
|
|
3
|
|
|
|
|
273
|
|
|
9
|
3
|
|
|
3
|
|
20
|
use Cwd qw(abs_path); |
|
|
3
|
|
|
|
|
6
|
|
|
|
3
|
|
|
|
|
133
|
|
|
10
|
3
|
|
|
3
|
|
501
|
use File::Spec::Functions qw(catdir catfile); |
|
|
3
|
|
|
|
|
979
|
|
|
|
3
|
|
|
|
|
153
|
|
|
11
|
3
|
|
|
3
|
|
3181
|
use Moo; |
|
|
3
|
|
|
|
|
26376
|
|
|
|
3
|
|
|
|
|
16
|
|
|
12
|
3
|
|
|
3
|
|
10437
|
use Types::Standard qw(Str Int Num Enum ArrayRef HashRef Undef Bool); |
|
|
3
|
|
|
|
|
394683
|
|
|
|
3
|
|
|
|
|
44
|
|
|
13
|
3
|
|
|
3
|
|
14659
|
use File::ShareDir::ProjectDistDir qw(dist_dir); |
|
|
3
|
|
|
|
|
115941
|
|
|
|
3
|
|
|
|
|
26
|
|
|
14
|
3
|
|
|
3
|
|
1109
|
use List::Util qw(all); |
|
|
3
|
|
|
|
|
7
|
|
|
|
3
|
|
|
|
|
307
|
|
|
15
|
3
|
|
|
3
|
|
2068
|
use Pheno::Ranker::IO; |
|
|
3
|
|
|
|
|
8
|
|
|
|
3
|
|
|
|
|
282
|
|
|
16
|
3
|
|
|
3
|
|
2752
|
use Pheno::Ranker::Align; |
|
|
3
|
|
|
|
|
15
|
|
|
|
3
|
|
|
|
|
250
|
|
|
17
|
3
|
|
|
3
|
|
21
|
use Pheno::Ranker::Stats; |
|
|
3
|
|
|
|
|
5
|
|
|
|
3
|
|
|
|
|
181
|
|
|
18
|
|
|
|
|
|
|
|
|
19
|
3
|
|
|
3
|
|
19
|
use Exporter 'import'; |
|
|
3
|
|
|
|
|
5
|
|
|
|
3
|
|
|
|
|
225
|
|
|
20
|
|
|
|
|
|
|
our @EXPORT_OK = qw($VERSION write_json); |
|
21
|
|
|
|
|
|
|
|
|
22
|
|
|
|
|
|
|
# Global variables: |
|
23
|
|
|
|
|
|
|
$Data::Dumper::Sortkeys = 1; |
|
24
|
|
|
|
|
|
|
our $VERSION = '0.00_2'; |
|
25
|
|
|
|
|
|
|
our $share_dir = dist_dir('Pheno-Ranker'); |
|
26
|
3
|
|
|
3
|
|
15
|
use constant DEVEL_MODE => 0; |
|
|
3
|
|
|
|
|
6
|
|
|
|
3
|
|
|
|
|
6674
|
|
|
27
|
|
|
|
|
|
|
|
|
28
|
|
|
|
|
|
|
# Misc variables |
|
29
|
|
|
|
|
|
|
my ( |
|
30
|
|
|
|
|
|
|
$config_sort_by, $config_max_out, $config_max_number_var, |
|
31
|
|
|
|
|
|
|
$config_seed, @config_allowed_terms |
|
32
|
|
|
|
|
|
|
); |
|
33
|
|
|
|
|
|
|
my $default_config_file = catfile( $share_dir, 'conf', 'config.yaml' ); |
|
34
|
|
|
|
|
|
|
|
|
35
|
|
|
|
|
|
|
############################################ |
|
36
|
|
|
|
|
|
|
# Start declaring attributes for the class # |
|
37
|
|
|
|
|
|
|
############################################ |
|
38
|
|
|
|
|
|
|
|
|
39
|
|
|
|
|
|
|
# Complex defaults here |
|
40
|
|
|
|
|
|
|
has 'config_file' => ( |
|
41
|
|
|
|
|
|
|
default => $default_config_file, |
|
42
|
|
|
|
|
|
|
coerce => sub { |
|
43
|
|
|
|
|
|
|
$_[0] // $default_config_file; |
|
44
|
|
|
|
|
|
|
}, |
|
45
|
|
|
|
|
|
|
is => 'ro', |
|
46
|
|
|
|
|
|
|
isa => sub { die "$_[0] is not a valid file" unless -e $_[0] }, |
|
47
|
|
|
|
|
|
|
trigger => sub { |
|
48
|
|
|
|
|
|
|
my ( $self, $config_file ) = @_; |
|
49
|
|
|
|
|
|
|
my $config = read_yaml($config_file); |
|
50
|
|
|
|
|
|
|
|
|
51
|
|
|
|
|
|
|
##################### |
|
52
|
|
|
|
|
|
|
# Set config params # |
|
53
|
|
|
|
|
|
|
##################### |
|
54
|
|
|
|
|
|
|
|
|
55
|
|
|
|
|
|
|
$config_sort_by = $config->{sort_by} // 'hamming'; |
|
56
|
|
|
|
|
|
|
$config_max_out = $config->{max_out} // 50; |
|
57
|
|
|
|
|
|
|
$config_max_number_var = $config->{max_number_var} // 10_000; |
|
58
|
|
|
|
|
|
|
|
|
59
|
|
|
|
|
|
|
# Validate $config->{allowed_terms} |
|
60
|
|
|
|
|
|
|
unless ( exists $config->{allowed_terms} |
|
61
|
|
|
|
|
|
|
&& ArrayRef->check( $config->{allowed_terms} ) |
|
62
|
|
|
|
|
|
|
&& @{ $config->{allowed_terms} } ) |
|
63
|
|
|
|
|
|
|
{ |
|
64
|
|
|
|
|
|
|
die |
|
65
|
|
|
|
|
|
|
"No <allowed terms> provided or not an array ref at\n$config_file\n"; |
|
66
|
|
|
|
|
|
|
} |
|
67
|
|
|
|
|
|
|
@config_allowed_terms = @{ $config->{allowed_terms} }; |
|
68
|
|
|
|
|
|
|
|
|
69
|
|
|
|
|
|
|
############################### |
|
70
|
|
|
|
|
|
|
# Set config exclusive params # |
|
71
|
|
|
|
|
|
|
############################### |
|
72
|
|
|
|
|
|
|
$config_seed = |
|
73
|
|
|
|
|
|
|
( defined $config->{seed} && Int->check( $config->{seed} ) ) |
|
74
|
|
|
|
|
|
|
? $config->{seed} |
|
75
|
|
|
|
|
|
|
: 123456789; |
|
76
|
|
|
|
|
|
|
|
|
77
|
|
|
|
|
|
|
# Set on $self |
|
78
|
|
|
|
|
|
|
$self->{primary_key} = $config->{primary_key} // 'id'; # setter; |
|
79
|
|
|
|
|
|
|
$self->{exclude_properties_regex} = $config->{exclude_properties_regex} |
|
80
|
|
|
|
|
|
|
// ''; # setter |
|
81
|
|
|
|
|
|
|
$self->{array_terms} = $config->{array_terms} // ['foo']; # setter - To validate |
|
82
|
|
|
|
|
|
|
$self->{array_regex} = $config->{array_regex} // '^(\w+):(\d+)'; # setter - To validate |
|
83
|
|
|
|
|
|
|
$self->{format} = $config->{format}; # setter |
|
84
|
|
|
|
|
|
|
|
|
85
|
|
|
|
|
|
|
# Validate $config->{id_correspondence} if we have "real" array_terms |
|
86
|
|
|
|
|
|
|
if ( $self->{array_terms}[0] ne 'foo' ) { |
|
87
|
|
|
|
|
|
|
unless ( exists $config->{id_correspondence} |
|
88
|
|
|
|
|
|
|
&& HashRef->check( $config->{id_correspondence} ) ) |
|
89
|
|
|
|
|
|
|
{ |
|
90
|
|
|
|
|
|
|
die |
|
91
|
|
|
|
|
|
|
"No <id_correspondence> provided or not a hash ref at\n$config_file\n"; |
|
92
|
|
|
|
|
|
|
} |
|
93
|
|
|
|
|
|
|
$self->{id_correspondence} = $config->{id_correspondence}; # setter |
|
94
|
|
|
|
|
|
|
|
|
95
|
|
|
|
|
|
|
# Validate format if exists and check that has a match in config->{id_correspondence} |
|
96
|
|
|
|
|
|
|
if ( exists $config->{format} && Str->check( $config->{format} ) ) { |
|
97
|
|
|
|
|
|
|
die |
|
98
|
|
|
|
|
|
|
"<$config->{format}> does not match any key from <id_correspondence>\n" |
|
99
|
|
|
|
|
|
|
unless |
|
100
|
|
|
|
|
|
|
exists $config->{id_correspondence}{ $config->{format} }; |
|
101
|
|
|
|
|
|
|
} |
|
102
|
|
|
|
|
|
|
} |
|
103
|
|
|
|
|
|
|
} |
|
104
|
|
|
|
|
|
|
); |
|
105
|
|
|
|
|
|
|
|
|
106
|
|
|
|
|
|
|
has sort_by => ( |
|
107
|
|
|
|
|
|
|
default => $config_sort_by, |
|
108
|
|
|
|
|
|
|
is => 'ro', |
|
109
|
|
|
|
|
|
|
coerce => sub { $_[0] // $config_sort_by }, |
|
110
|
|
|
|
|
|
|
lazy => 1, |
|
111
|
|
|
|
|
|
|
isa => Enum [qw(hamming jaccard)] |
|
112
|
|
|
|
|
|
|
); |
|
113
|
|
|
|
|
|
|
|
|
114
|
|
|
|
|
|
|
has max_out => ( |
|
115
|
|
|
|
|
|
|
default => $config_max_out, # Limit to speed up runtime |
|
116
|
|
|
|
|
|
|
is => 'ro', |
|
117
|
|
|
|
|
|
|
coerce => sub { $_[0] // $config_max_out }, |
|
118
|
|
|
|
|
|
|
lazy => 1, |
|
119
|
|
|
|
|
|
|
isa => Int |
|
120
|
|
|
|
|
|
|
); |
|
121
|
|
|
|
|
|
|
|
|
122
|
|
|
|
|
|
|
has max_number_var => ( |
|
123
|
|
|
|
|
|
|
default => $config_max_number_var, |
|
124
|
|
|
|
|
|
|
is => 'ro', |
|
125
|
|
|
|
|
|
|
coerce => sub { $_[0] // $config_max_number_var }, |
|
126
|
|
|
|
|
|
|
lazy => 1, |
|
127
|
|
|
|
|
|
|
isa => Int |
|
128
|
|
|
|
|
|
|
); |
|
129
|
|
|
|
|
|
|
|
|
130
|
|
|
|
|
|
|
has hpo_file => ( |
|
131
|
|
|
|
|
|
|
default => catfile( $share_dir, 'db', 'hp.json' ), |
|
132
|
|
|
|
|
|
|
coerce => sub { |
|
133
|
|
|
|
|
|
|
$_[0] // catfile( $share_dir, 'db', 'hp.json' ); |
|
134
|
|
|
|
|
|
|
}, |
|
135
|
|
|
|
|
|
|
is => 'ro', |
|
136
|
|
|
|
|
|
|
isa => sub { die "$_[0] is not a valid file" unless -e $_[0] }, |
|
137
|
|
|
|
|
|
|
); |
|
138
|
|
|
|
|
|
|
|
|
139
|
|
|
|
|
|
|
has poi_out_dir => ( |
|
140
|
|
|
|
|
|
|
default => catdir('./'), |
|
141
|
|
|
|
|
|
|
coerce => sub { |
|
142
|
|
|
|
|
|
|
$_[0] // catdir('./'); |
|
143
|
|
|
|
|
|
|
}, |
|
144
|
|
|
|
|
|
|
is => 'ro', |
|
145
|
|
|
|
|
|
|
isa => sub { die "$_[0] dir does not exist" unless -d $_[0] }, |
|
146
|
|
|
|
|
|
|
); |
|
147
|
|
|
|
|
|
|
|
|
148
|
|
|
|
|
|
|
has [qw /include_terms exclude_terms/] => ( |
|
149
|
|
|
|
|
|
|
is => 'ro', |
|
150
|
|
|
|
|
|
|
lazy => 1, |
|
151
|
|
|
|
|
|
|
|
|
152
|
|
|
|
|
|
|
#isa => ArrayRef [Enum $config->{allowed_terms}], # It's created at compile time and we don't have $config->{allowed_terms} |
|
153
|
|
|
|
|
|
|
isa => sub { |
|
154
|
|
|
|
|
|
|
my $value = shift; |
|
155
|
|
|
|
|
|
|
die "<--include_terms> and <--exclude_terms> must be an array ref" |
|
156
|
|
|
|
|
|
|
unless ref $value eq 'ARRAY'; |
|
157
|
|
|
|
|
|
|
die |
|
158
|
|
|
|
|
|
|
qq/Invalid term in <--include_terms> or <--exclude_terms>. Allowed values are:\n/, |
|
159
|
|
|
|
|
|
|
( join ',', @config_allowed_terms ), "\n" |
|
160
|
|
|
|
|
|
|
unless all { |
|
161
|
|
|
|
|
|
|
my $term = $_; |
|
162
|
|
|
|
|
|
|
grep { $_ eq $term } @config_allowed_terms |
|
163
|
|
|
|
|
|
|
} @$value; |
|
164
|
|
|
|
|
|
|
}, |
|
165
|
|
|
|
|
|
|
default => sub { [] }, |
|
166
|
|
|
|
|
|
|
); |
|
167
|
|
|
|
|
|
|
|
|
168
|
|
|
|
|
|
|
has 'cli' => ( |
|
169
|
|
|
|
|
|
|
is => 'ro', |
|
170
|
|
|
|
|
|
|
isa => Bool, |
|
171
|
|
|
|
|
|
|
default => 0, # Set the default value to 0 |
|
172
|
|
|
|
|
|
|
coerce => sub { $_[0] // 0 }, # Coerce to 0 if undefined |
|
173
|
|
|
|
|
|
|
); |
|
174
|
|
|
|
|
|
|
|
|
175
|
|
|
|
|
|
|
# Miscellanea atributes here |
|
176
|
|
|
|
|
|
|
has [ |
|
177
|
|
|
|
|
|
|
qw/target_file weights_file out_file include_hpo_ascendants align align_basename export export_basename log verbose age/ |
|
178
|
|
|
|
|
|
|
] => ( is => 'ro' ); |
|
179
|
|
|
|
|
|
|
|
|
180
|
|
|
|
|
|
|
has [qw /append_prefixes reference_files patients_of_interest/] => |
|
181
|
|
|
|
|
|
|
( default => sub { [] }, is => 'ro' ); |
|
182
|
|
|
|
|
|
|
|
|
183
|
|
|
|
|
|
|
########################################## |
|
184
|
|
|
|
|
|
|
# End declaring attributes for the class # |
|
185
|
|
|
|
|
|
|
########################################## |
|
186
|
|
|
|
|
|
|
|
|
187
|
|
|
|
|
|
|
sub BUILD { |
|
188
|
|
|
|
|
|
|
|
|
189
|
|
|
|
|
|
|
# BUILD: is an instance method that is called after the object has been constructed but before it is returned to the caller. |
|
190
|
|
|
|
|
|
|
# BUILDARGS is a class method that is responsible for processing the arguments passed to the constructor (new) and returning a hash reference of attributes that will be used to initialize the object. |
|
191
|
8
|
|
|
8
|
0
|
314
|
my $self = shift; |
|
192
|
|
|
|
|
|
|
|
|
193
|
|
|
|
|
|
|
#$self->{primary_key} = $config->{primary_key} // 'id'; # setter; |
|
194
|
|
|
|
|
|
|
#$self->{exclude_properties_regex} = $config->{exclude_properties_regex} |
|
195
|
|
|
|
|
|
|
# // ''; # setter |
|
196
|
|
|
|
|
|
|
|
|
197
|
|
|
|
|
|
|
# ************************ |
|
198
|
|
|
|
|
|
|
# Start Miscellanea checks |
|
199
|
|
|
|
|
|
|
# ************************ |
|
200
|
|
|
|
|
|
|
|
|
201
|
|
|
|
|
|
|
# APPEND_PREFIXES |
|
202
|
|
|
|
|
|
|
# Check that we have the right numbers of array elements |
|
203
|
8
|
50
|
|
|
|
19
|
if ( @{ $self->{append_prefixes} } ) { |
|
|
8
|
|
|
|
|
33
|
|
|
204
|
|
|
|
|
|
|
|
|
205
|
|
|
|
|
|
|
# die if used without $self->{append_prefixes} |
|
206
|
|
|
|
|
|
|
die "<--append_prefixes> needs at least 2 cohort files!\n" |
|
207
|
0
|
0
|
|
|
|
0
|
unless @{ $self->{reference_files} } > 1; |
|
|
0
|
|
|
|
|
0
|
|
|
208
|
|
|
|
|
|
|
|
|
209
|
|
|
|
|
|
|
# die if #cohorts and #append-prefixes don't match |
|
210
|
|
|
|
|
|
|
die "Numbers of items in <--r> and <--append-prefixes> don't match!\n" |
|
211
|
0
|
0
|
|
|
|
0
|
unless @{ $self->{reference_files} } == @{ $self->{append_prefixes} }; |
|
|
0
|
|
|
|
|
0
|
|
|
|
0
|
|
|
|
|
0
|
|
|
212
|
|
|
|
|
|
|
} |
|
213
|
|
|
|
|
|
|
|
|
214
|
|
|
|
|
|
|
# PATIENTS-OF-INTEREST |
|
215
|
8
|
50
|
|
|
|
18
|
if ( @{ $self->{patients_of_interest} } ) { |
|
|
8
|
|
|
|
|
66
|
|
|
216
|
|
|
|
|
|
|
|
|
217
|
|
|
|
|
|
|
# die if used without $self->{append_prefixes} |
|
218
|
|
|
|
|
|
|
die "<--patients-of-interest> needs to be used with <--r>\n" |
|
219
|
0
|
0
|
|
|
|
0
|
unless @{ $self->{reference_files} }; |
|
|
0
|
|
|
|
|
0
|
|
|
220
|
|
|
|
|
|
|
} |
|
221
|
|
|
|
|
|
|
|
|
222
|
|
|
|
|
|
|
# ********************** |
|
223
|
|
|
|
|
|
|
# End Miscellanea checks |
|
224
|
|
|
|
|
|
|
# ********************** |
|
225
|
|
|
|
|
|
|
} |
|
226
|
|
|
|
|
|
|
|
|
227
|
|
|
|
|
|
|
sub run { |
|
228
|
|
|
|
|
|
|
|
|
229
|
8
|
|
|
8
|
0
|
159
|
my $self = shift; |
|
230
|
|
|
|
|
|
|
|
|
231
|
|
|
|
|
|
|
#print Dumper $self and die; |
|
232
|
|
|
|
|
|
|
|
|
233
|
|
|
|
|
|
|
# Load variables |
|
234
|
8
|
|
|
|
|
18
|
my $reference_files = $self->{reference_files}; |
|
235
|
8
|
|
|
|
|
15
|
my $target_file = $self->{target_file}; |
|
236
|
8
|
|
|
|
|
15
|
my $weights_file = $self->{weights_file}; |
|
237
|
8
|
|
|
|
|
16
|
my $export = $self->{export}; |
|
238
|
8
|
|
|
|
|
19
|
my $export_basename = $self->{export_basename}; |
|
239
|
8
|
|
|
|
|
14
|
my $include_hpo_ascendants = $self->{include_hpo_ascendants}; |
|
240
|
8
|
|
|
|
|
17
|
my $hpo_file = $self->{hpo_file}; |
|
241
|
8
|
|
|
|
|
13
|
my $align = $self->{align}; |
|
242
|
8
|
|
|
|
|
16
|
my $align_basename = $self->{align_basename}; |
|
243
|
8
|
|
|
|
|
20
|
my $out_file = $self->{out_file}; |
|
244
|
8
|
|
|
|
|
16
|
my $cohort_files = $self->{cohort_files}; |
|
245
|
8
|
|
|
|
|
16
|
my $append_prefixes = $self->{append_prefixes}; |
|
246
|
8
|
|
|
|
|
15
|
my $max_out = $self->{max_out}; |
|
247
|
8
|
|
|
|
|
15
|
my $sort_by = $self->{sort_by}; |
|
248
|
8
|
|
|
|
|
19
|
my $primary_key = $self->{primary_key}; |
|
249
|
8
|
|
|
|
|
15
|
my $poi = $self->{patients_of_interest}; |
|
250
|
8
|
|
|
|
|
16
|
my $poi_out_dir = $self->{poi_out_dir}; |
|
251
|
8
|
|
|
|
|
13
|
my $cli = $self->{cli}; |
|
252
|
|
|
|
|
|
|
|
|
253
|
|
|
|
|
|
|
# die if --align dir does not exist |
|
254
|
8
|
100
|
|
|
|
174
|
my $align_dir = defined $align ? dirname($align) : '.'; |
|
255
|
8
|
50
|
|
|
|
132
|
die "Directory <$align_dir> does not exist (used with --align)\n" |
|
256
|
|
|
|
|
|
|
unless -d $align_dir; |
|
257
|
|
|
|
|
|
|
|
|
258
|
8
|
50
|
|
|
|
41
|
my $export_dir = defined $export ? dirname($export) : '.'; |
|
259
|
8
|
50
|
|
|
|
83
|
die "Directory <$export_dir> does not exist (used with --export)\n" |
|
260
|
|
|
|
|
|
|
unless -d $export_dir; |
|
261
|
|
|
|
|
|
|
|
|
262
|
|
|
|
|
|
|
# We assing weights if <--w> |
|
263
|
|
|
|
|
|
|
# NB: The user can exclude variables by using variable: 0 |
|
264
|
8
|
|
|
|
|
55
|
my $weight = validate_json($weights_file); |
|
265
|
|
|
|
|
|
|
|
|
266
|
|
|
|
|
|
|
# Now we load $hpo_nodes, $hpo_edges if --include_hpo_ascendants |
|
267
|
|
|
|
|
|
|
# NB: we load them within $self to minimize the #args |
|
268
|
6
|
|
|
|
|
27
|
my $nodes = my $edges = undef; |
|
269
|
6
|
50
|
|
|
|
18
|
( $nodes, $edges ) = parse_hpo_json( read_json($hpo_file) ) |
|
270
|
|
|
|
|
|
|
if $include_hpo_ascendants; |
|
271
|
6
|
|
|
|
|
19
|
$self->{nodes} = $nodes; # setter |
|
272
|
6
|
|
|
|
|
16
|
$self->{edges} = $edges; # setter |
|
273
|
|
|
|
|
|
|
|
|
274
|
|
|
|
|
|
|
############################### |
|
275
|
|
|
|
|
|
|
# START READING -r | -cohorts # |
|
276
|
|
|
|
|
|
|
############################### |
|
277
|
|
|
|
|
|
|
|
|
278
|
|
|
|
|
|
|
# *** IMPORTANT *** |
|
279
|
|
|
|
|
|
|
# We have three modes of operation: |
|
280
|
|
|
|
|
|
|
# 1 - intra-cohort (--r) a.json |
|
281
|
|
|
|
|
|
|
# 2 - inter-cohort (--r) a.json b.json c.json |
|
282
|
|
|
|
|
|
|
# 3 - patient (assigned automatically if -t) |
|
283
|
|
|
|
|
|
|
|
|
284
|
|
|
|
|
|
|
# *** IMPORTANT *** |
|
285
|
|
|
|
|
|
|
# $ref_data is an array array where each element is the content of the file (e.g, [] or {}) |
|
286
|
6
|
|
|
|
|
14
|
my $ref_data = []; |
|
287
|
6
|
|
|
|
|
12
|
for my $cohort_file ( @{$reference_files} ) { |
|
|
6
|
|
|
|
|
20
|
|
|
288
|
6
|
50
|
|
|
|
173
|
die "$cohort_file does not exist\n" unless -f $cohort_file; |
|
289
|
|
|
|
|
|
|
|
|
290
|
|
|
|
|
|
|
# Load JSON file as Perl data structure |
|
291
|
6
|
|
|
|
|
68
|
push @$ref_data, |
|
292
|
|
|
|
|
|
|
io_yaml_or_json( |
|
293
|
|
|
|
|
|
|
{ |
|
294
|
|
|
|
|
|
|
filepath => $cohort_file, |
|
295
|
|
|
|
|
|
|
mode => 'read' |
|
296
|
|
|
|
|
|
|
} |
|
297
|
|
|
|
|
|
|
); |
|
298
|
|
|
|
|
|
|
} |
|
299
|
|
|
|
|
|
|
|
|
300
|
|
|
|
|
|
|
# In <inter-cohort> we join --cohorts into one but we change the id |
|
301
|
|
|
|
|
|
|
# NB: Re-using $ref_data to save memory |
|
302
|
6
|
|
|
|
|
70
|
$ref_data = append_and_rename_primary_key( |
|
303
|
|
|
|
|
|
|
{ |
|
304
|
|
|
|
|
|
|
ref_data => $ref_data, |
|
305
|
|
|
|
|
|
|
append_prefixes => $append_prefixes, |
|
306
|
|
|
|
|
|
|
primary_key => $primary_key |
|
307
|
|
|
|
|
|
|
} |
|
308
|
|
|
|
|
|
|
); |
|
309
|
|
|
|
|
|
|
|
|
310
|
|
|
|
|
|
|
############################## |
|
311
|
|
|
|
|
|
|
# ENDT READING -r | -cohorts # |
|
312
|
|
|
|
|
|
|
############################## |
|
313
|
|
|
|
|
|
|
|
|
314
|
|
|
|
|
|
|
#------------------------------- |
|
315
|
|
|
|
|
|
|
# Write json for $poi if --poi | |
|
316
|
|
|
|
|
|
|
#------------------------------- |
|
317
|
|
|
|
|
|
|
# *** IMPORTANT *** |
|
318
|
|
|
|
|
|
|
# It will exit when done (dry-run) |
|
319
|
|
|
|
|
|
|
write_poi( |
|
320
|
|
|
|
|
|
|
{ |
|
321
|
|
|
|
|
|
|
ref_data => $ref_data, |
|
322
|
|
|
|
|
|
|
poi => $poi, |
|
323
|
|
|
|
|
|
|
poi_out_dir => $poi_out_dir, |
|
324
|
|
|
|
|
|
|
primary_key => $primary_key, |
|
325
|
|
|
|
|
|
|
verbose => $self->{verbose} |
|
326
|
|
|
|
|
|
|
} |
|
327
|
|
|
|
|
|
|
) |
|
328
|
6
|
50
|
0
|
|
|
27
|
and exit |
|
329
|
|
|
|
|
|
|
if @$poi; |
|
330
|
|
|
|
|
|
|
|
|
331
|
|
|
|
|
|
|
# We will process $ref_data to get stats on coverage |
|
332
|
6
|
|
|
|
|
27
|
my $coverage_stats = coverage_stats($ref_data); |
|
333
|
|
|
|
|
|
|
|
|
334
|
|
|
|
|
|
|
# We have to check if we have BFF|PXF or others unless defined at config |
|
335
|
|
|
|
|
|
|
add_attribute( $self, 'format', check_format($ref_data) ) |
|
336
|
6
|
100
|
|
|
|
45
|
unless defined $self->{format}; # setter via sub |
|
337
|
|
|
|
|
|
|
|
|
338
|
|
|
|
|
|
|
# First we create: |
|
339
|
|
|
|
|
|
|
# - $glob_hash => hash with all the COHORT keys possible |
|
340
|
|
|
|
|
|
|
# - $ref_hash => BIG hash with all individiduals' keys "flattened" |
|
341
|
6
|
|
|
|
|
29
|
my ( $glob_hash, $ref_hash ) = |
|
342
|
|
|
|
|
|
|
create_glob_and_ref_hashes( $ref_data, $weight, $self ); |
|
343
|
|
|
|
|
|
|
|
|
344
|
|
|
|
|
|
|
# Limit the number of variables if > $self-{max_number_var} |
|
345
|
|
|
|
|
|
|
# *** IMPORTANT *** |
|
346
|
|
|
|
|
|
|
# Change only performed in $glob_hash |
|
347
|
|
|
|
|
|
|
$glob_hash = randomize_variables( $glob_hash, $self ) |
|
348
|
6
|
50
|
|
|
|
36
|
if keys %$glob_hash > $self->{max_number_var}; |
|
349
|
|
|
|
|
|
|
|
|
350
|
|
|
|
|
|
|
# Second we peform one-hot encoding for each individual |
|
351
|
6
|
|
|
|
|
25
|
my $ref_binary_hash = create_binary_digit_string( $glob_hash, $ref_hash ); |
|
352
|
|
|
|
|
|
|
|
|
353
|
|
|
|
|
|
|
# Hases to be serialized to JSON if <--export> |
|
354
|
6
|
|
|
|
|
33
|
my $hash2serialize = { |
|
355
|
|
|
|
|
|
|
glob_hash => $glob_hash, |
|
356
|
|
|
|
|
|
|
ref_hash => $ref_hash, |
|
357
|
|
|
|
|
|
|
ref_binary_hash => $ref_binary_hash, |
|
358
|
|
|
|
|
|
|
coverage_stats => $coverage_stats |
|
359
|
|
|
|
|
|
|
}; |
|
360
|
|
|
|
|
|
|
|
|
361
|
|
|
|
|
|
|
# Perform cohort comparison |
|
362
|
6
|
100
|
|
|
|
46
|
cohort_comparison( $ref_binary_hash, $self ) unless $target_file; |
|
363
|
|
|
|
|
|
|
|
|
364
|
|
|
|
|
|
|
# Perform patient-to-cohort comparison and rank if (-t) |
|
365
|
6
|
100
|
|
|
|
20
|
if ($target_file) { |
|
366
|
|
|
|
|
|
|
|
|
367
|
|
|
|
|
|
|
#################### |
|
368
|
|
|
|
|
|
|
# START READING -t # |
|
369
|
|
|
|
|
|
|
#################### |
|
370
|
|
|
|
|
|
|
|
|
371
|
|
|
|
|
|
|
# local $tar_data is for patient |
|
372
|
1
|
|
|
|
|
11
|
my $tar_data = array2object( |
|
373
|
|
|
|
|
|
|
io_yaml_or_json( { filepath => $target_file, mode => 'read' } ) ); |
|
374
|
|
|
|
|
|
|
|
|
375
|
|
|
|
|
|
|
################## |
|
376
|
|
|
|
|
|
|
# END READING -t # |
|
377
|
|
|
|
|
|
|
################## |
|
378
|
|
|
|
|
|
|
|
|
379
|
|
|
|
|
|
|
# The target file has to have $_->{$primary_key} otherwise die |
|
380
|
|
|
|
|
|
|
die |
|
381
|
|
|
|
|
|
|
"Sorry, <$target_file> does not contain <id> term and it's mandatory\n" |
|
382
|
1
|
50
|
|
|
|
8
|
unless exists $tar_data->{$primary_key}; |
|
383
|
|
|
|
|
|
|
|
|
384
|
|
|
|
|
|
|
# We store {primary_key} as a variable as it might be deleted from $tar_data (--excluded-terms id) |
|
385
|
1
|
|
|
|
|
4
|
my $tar_data_id = $tar_data->{$primary_key}; |
|
386
|
|
|
|
|
|
|
|
|
387
|
|
|
|
|
|
|
# Now we load the rest of the hashes |
|
388
|
1
|
|
|
|
|
7
|
my $tar_hash = { |
|
389
|
|
|
|
|
|
|
$tar_data_id => remap_hash( |
|
390
|
|
|
|
|
|
|
{ |
|
391
|
|
|
|
|
|
|
hash => $tar_data, |
|
392
|
|
|
|
|
|
|
weight => $weight, |
|
393
|
|
|
|
|
|
|
self => $self |
|
394
|
|
|
|
|
|
|
} |
|
395
|
|
|
|
|
|
|
) |
|
396
|
|
|
|
|
|
|
}; |
|
397
|
|
|
|
|
|
|
|
|
398
|
|
|
|
|
|
|
# *** IMPORTANT *** |
|
399
|
|
|
|
|
|
|
# The target binary is created from matches to $glob_hash |
|
400
|
|
|
|
|
|
|
# Thus, it does not include variables ONLY present in TARGET |
|
401
|
1
|
|
|
|
|
7
|
my $tar_binary_hash = |
|
402
|
|
|
|
|
|
|
create_binary_digit_string( $glob_hash, $tar_hash ); |
|
403
|
|
|
|
|
|
|
my ( |
|
404
|
1
|
|
|
|
|
9
|
$results_rank, $results_align, $alignment_ascii, |
|
405
|
|
|
|
|
|
|
$alignment_dataframe, $alignment_csv |
|
406
|
|
|
|
|
|
|
) |
|
407
|
|
|
|
|
|
|
= compare_and_rank( |
|
408
|
|
|
|
|
|
|
{ |
|
409
|
|
|
|
|
|
|
glob_hash => $glob_hash, |
|
410
|
|
|
|
|
|
|
ref_binary_hash => $ref_binary_hash, |
|
411
|
|
|
|
|
|
|
tar_binary_hash => $tar_binary_hash, |
|
412
|
|
|
|
|
|
|
weight => $weight, |
|
413
|
|
|
|
|
|
|
self => $self |
|
414
|
|
|
|
|
|
|
} |
|
415
|
|
|
|
|
|
|
); |
|
416
|
|
|
|
|
|
|
|
|
417
|
|
|
|
|
|
|
# Print Ranked results to STDOUT only if CLI |
|
418
|
1
|
50
|
|
|
|
8
|
say join "\n", @$results_rank if $cli; |
|
419
|
|
|
|
|
|
|
|
|
420
|
|
|
|
|
|
|
# Write txt ( |
|
421
|
1
|
|
|
|
|
11
|
write_array2txt( { filepath => $out_file, data => $results_rank } ); |
|
422
|
|
|
|
|
|
|
|
|
423
|
|
|
|
|
|
|
# Write TXT for alignment (ALWAYS!!) |
|
424
|
1
|
50
|
|
|
|
19
|
write_alignment( |
|
|
|
50
|
|
|
|
|
|
|
425
|
|
|
|
|
|
|
{ |
|
426
|
|
|
|
|
|
|
align => $align ? $align : $align_basename, # DON'T -- $align // $align_basename, |
|
427
|
|
|
|
|
|
|
ascii => $alignment_ascii, |
|
428
|
|
|
|
|
|
|
dataframe => $alignment_dataframe, |
|
429
|
|
|
|
|
|
|
csv => $alignment_csv |
|
430
|
|
|
|
|
|
|
} |
|
431
|
|
|
|
|
|
|
) if defined $align; |
|
432
|
|
|
|
|
|
|
|
|
433
|
|
|
|
|
|
|
# Load keys into hash if <--e> |
|
434
|
1
|
50
|
|
|
|
653
|
if ( defined $export ) { |
|
435
|
0
|
|
|
|
|
0
|
$hash2serialize->{tar_hash} = $tar_hash; |
|
436
|
0
|
|
|
|
|
0
|
$hash2serialize->{tar_binary_hash} = $tar_binary_hash; |
|
437
|
0
|
0
|
|
|
|
0
|
$hash2serialize->{alignment_hash} = $results_align |
|
438
|
|
|
|
|
|
|
if defined $align; |
|
439
|
|
|
|
|
|
|
} |
|
440
|
|
|
|
|
|
|
} |
|
441
|
|
|
|
|
|
|
|
|
442
|
|
|
|
|
|
|
# Dump to JSON if <--export> |
|
443
|
|
|
|
|
|
|
# NB: Must work for -r and -t |
|
444
|
|
|
|
|
|
|
serialize_hashes( |
|
445
|
|
|
|
|
|
|
{ |
|
446
|
6
|
0
|
|
|
|
25
|
data => $hash2serialize, |
|
|
|
50
|
|
|
|
|
|
|
447
|
|
|
|
|
|
|
export_basename => $export ? $export : $export_basename |
|
448
|
|
|
|
|
|
|
} |
|
449
|
|
|
|
|
|
|
) if defined $export; |
|
450
|
|
|
|
|
|
|
|
|
451
|
|
|
|
|
|
|
# Return |
|
452
|
6
|
|
|
|
|
5515
|
return 1; |
|
453
|
|
|
|
|
|
|
} |
|
454
|
|
|
|
|
|
|
|
|
455
|
|
|
|
|
|
|
sub add_attribute { |
|
456
|
|
|
|
|
|
|
|
|
457
|
|
|
|
|
|
|
# Bypassing the encapsulation provided by Moo |
|
458
|
3
|
|
|
3
|
0
|
14
|
my ( $self, $name, $value ) = @_; |
|
459
|
3
|
|
|
|
|
9
|
$self->{$name} = $value; |
|
460
|
3
|
|
|
|
|
6
|
return 1; |
|
461
|
|
|
|
|
|
|
} |
|
462
|
|
|
|
|
|
|
|
|
463
|
|
|
|
|
|
|
1; |
|
464
|
|
|
|
|
|
|
|
|
465
|
|
|
|
|
|
|
=head1 NAME |
|
466
|
|
|
|
|
|
|
|
|
467
|
|
|
|
|
|
|
Convert::Pheno - A module that performs semantic similarity in PXF/BFF data structures and beyond (JSON|YAML) |
|
468
|
|
|
|
|
|
|
|
|
469
|
|
|
|
|
|
|
=head1 SYNOPSIS |
|
470
|
|
|
|
|
|
|
|
|
471
|
|
|
|
|
|
|
use Pheno::Ranker; |
|
472
|
|
|
|
|
|
|
|
|
473
|
|
|
|
|
|
|
# Create object |
|
474
|
|
|
|
|
|
|
my $ranker = Pheno::Ranker->new( |
|
475
|
|
|
|
|
|
|
{ |
|
476
|
|
|
|
|
|
|
reference_files => ['individuals.json'], |
|
477
|
|
|
|
|
|
|
out_file => 'matrix.txt' |
|
478
|
|
|
|
|
|
|
} |
|
479
|
|
|
|
|
|
|
); |
|
480
|
|
|
|
|
|
|
|
|
481
|
|
|
|
|
|
|
# Run it (output are text files) |
|
482
|
|
|
|
|
|
|
$ranker->run; |
|
483
|
|
|
|
|
|
|
|
|
484
|
|
|
|
|
|
|
=head1 DESCRIPTION |
|
485
|
|
|
|
|
|
|
|
|
486
|
|
|
|
|
|
|
We recommend using the included L<command-line interface|https://metacpan.org/dist/Pheno-Ranker/view/bin/pheno-ranker>. |
|
487
|
|
|
|
|
|
|
|
|
488
|
|
|
|
|
|
|
For a better description, please read the following documentation: |
|
489
|
|
|
|
|
|
|
|
|
490
|
|
|
|
|
|
|
=over |
|
491
|
|
|
|
|
|
|
|
|
492
|
|
|
|
|
|
|
=item General: |
|
493
|
|
|
|
|
|
|
|
|
494
|
|
|
|
|
|
|
L<https://cnag-biomedical-informatics.github.io/pheno-ranker> |
|
495
|
|
|
|
|
|
|
|
|
496
|
|
|
|
|
|
|
=item Command-Line Interface: |
|
497
|
|
|
|
|
|
|
|
|
498
|
|
|
|
|
|
|
L<https://github.com/CNAG-Biomedical-Informatics/pheno-ranker#readme> |
|
499
|
|
|
|
|
|
|
|
|
500
|
|
|
|
|
|
|
=back |
|
501
|
|
|
|
|
|
|
|
|
502
|
|
|
|
|
|
|
=head1 CITATION |
|
503
|
|
|
|
|
|
|
|
|
504
|
|
|
|
|
|
|
The author requests that any published work that utilizes C<Convert-Pheno> includes a cite to the the following reference: |
|
505
|
|
|
|
|
|
|
|
|
506
|
|
|
|
|
|
|
Rueda, M. et al. "Advancing Semantic Similarity Analysis of Phenotypic Data Stored in GA4GH Standards and Beyond. (2023) I<Manuscript in preparation>. |
|
507
|
|
|
|
|
|
|
|
|
508
|
|
|
|
|
|
|
=head1 AUTHOR |
|
509
|
|
|
|
|
|
|
|
|
510
|
|
|
|
|
|
|
Written by Manuel Rueda, PhD. Info about CNAG can be found at L<https://www.cnag.eu>. |
|
511
|
|
|
|
|
|
|
|
|
512
|
|
|
|
|
|
|
=head1 METHODS |
|
513
|
|
|
|
|
|
|
|
|
514
|
|
|
|
|
|
|
See L<https://cnag-biomedical-informatics.github.io/pheno-ranker/use-as-a-module>. |
|
515
|
|
|
|
|
|
|
|
|
516
|
|
|
|
|
|
|
=head1 COPYRIGHT |
|
517
|
|
|
|
|
|
|
|
|
518
|
|
|
|
|
|
|
This PERL file is copyrighted. See the LICENSE file included in this distribution. |
|
519
|
|
|
|
|
|
|
|
|
520
|
|
|
|
|
|
|
=cut |
|
521
|
|
|
|
|
|
|
|