1
1
# runs after the job (and after the default post-filter)
2
- from galaxy .jobs .handler import JOB_ERROR
3
2
from galaxy .tools .parameters import DataToolParameter
4
3
5
- # Older py compatibility
6
- try :
7
- set ()
8
- except :
9
- from sets import Set as set
10
4
11
-
12
- def validate_input ( trans , error_map , param_values , page_param_map ):
5
+ def validate_input (trans , error_map , param_values , page_param_map ):
13
6
dbkeys = set ()
14
7
data_param_names = set ()
15
8
data_params = 0
16
9
for name , param in page_param_map .items ():
17
- if isinstance ( param , DataToolParameter ):
10
+ if isinstance (param , DataToolParameter ):
18
11
# for each dataset parameter
19
12
if param_values .get (name , None ) is not None :
20
- dbkeys .add ( param_values [name ].dbkey )
13
+ dbkeys .add (param_values [name ].dbkey )
21
14
data_params += 1
22
15
# check meta data
23
16
try :
24
17
param = param_values [name ]
25
- if isinstance ( param .datatype , trans .app .datatypes_registry .get_datatype_by_extension ( 'gff' ).__class__ ):
18
+ if isinstance (param .datatype , trans .app .datatypes_registry .get_datatype_by_extension ('gff' ).__class__ ):
26
19
# TODO: currently cannot validate GFF inputs b/c they are not derived from interval.
27
20
pass
28
21
else : # Validate interval datatype.
29
- int ( param .metadata .startCol )
30
- int ( param .metadata .endCol )
31
- int ( param .metadata .chromCol )
22
+ int (param .metadata .startCol )
23
+ int (param .metadata .endCol )
24
+ int (param .metadata .chromCol )
32
25
if param .metadata .strandCol is not None :
33
- int ( param .metadata .strandCol )
34
- except :
26
+ int (param .metadata .strandCol )
27
+ except Exception :
35
28
error_msg = "The attributes of this dataset are not properly set. " + \
36
29
"Click the pencil icon in the history item to set the chrom, start, end and strand columns."
37
30
error_map [name ] = error_msg
38
- data_param_names .add ( name )
39
- if len ( dbkeys ) > 1 :
31
+ data_param_names .add (name )
32
+ if len (dbkeys ) > 1 :
40
33
for name in data_param_names :
41
34
error_map [name ] = "All datasets must belong to same genomic build, " \
42
35
"this dataset is linked to build '%s'" % param_values [name ].dbkey
@@ -45,22 +38,7 @@ def validate_input( trans, error_map, param_values, page_param_map ):
45
38
error_map [name ] = "A dataset of the appropriate type is required"
46
39
47
40
48
- # Commented out by INS, 5/30/2007. What is the PURPOSE of this?
49
- def exec_after_process (app , inp_data , out_data , param_dict , tool = None , stdout = None , stderr = None ):
50
- """Verify the output data after each run"""
51
- for data in out_data .values ():
52
- try :
53
- if stderr and len ( stderr ) > 0 :
54
- raise Exception ( stderr )
55
- except Exception :
56
- data .blurb = JOB_ERROR
57
- data .state = JOB_ERROR
58
-
59
-
60
41
def exec_after_merge (app , inp_data , out_data , param_dict , tool = None , stdout = None , stderr = None ):
61
- exec_after_process (
62
- app , inp_data , out_data , param_dict , tool = tool , stdout = stdout , stderr = stderr )
63
-
64
42
# strip strand column if clusters were merged
65
43
for data in out_data .values ():
66
44
if param_dict ['returntype' ] is True :
@@ -72,9 +50,6 @@ def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None
72
50
73
51
74
52
def exec_after_cluster (app , inp_data , out_data , param_dict , tool = None , stdout = None , stderr = None ):
75
- exec_after_process (
76
- app , inp_data , out_data , param_dict , tool = tool , stdout = stdout , stderr = stderr )
77
-
78
53
# strip strand column if clusters were merged
79
54
if param_dict ["returntype" ] == '1' :
80
55
for data in out_data .values ():
0 commit comments