Skip to content

Commit dd24318

Browse files
Merge branch '24.0' of https://github.com/bernt-matthias/tools-devteam into 24.0
2 parents 2f343e1 + a3927bd commit dd24318

17 files changed

+69
-7117
lines changed

.tt_skip

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ tool_collections/hgv/hgv_hilbertvis
44
tool_collections/hgv/snpfreq
55
tool_collections/taxonomy/find_diag_hits
66
tool_collections/taxonomy/gi2taxonomy
7-
tool_collections/taxonomy/kraken2tax
87
tool_collections/taxonomy/lca_wrapper
98
tool_collections/taxonomy/poisson2test
109
tool_collections/taxonomy/t2ps

tool_collections/gops/basecoverage/gops_basecoverage.py

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -8,41 +8,40 @@
88
from __future__ import print_function
99

1010
import fileinput
11-
import sys
1211

1312
from bx.cookbook import doc_optparse
1413
from bx.intervals.io import NiceReaderWrapper
1514
from bx.intervals.operations.base_coverage import base_coverage
1615
from bx.tabular.io import ParseError
1716
from galaxy.tools.util.galaxyops import fail, parse_cols_arg, skipped
1817

19-
assert sys.version_info[:2] >= ( 2, 4 )
20-
2118

2219
def main():
23-
options, args = doc_optparse.parse( __doc__ )
20+
options, args = doc_optparse.parse(__doc__)
2421
try:
25-
chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg( options.cols1 )
22+
chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg(options.cols1)
2623
in_fname, out_fname = args
27-
except:
24+
except Exception:
2825
doc_optparse.exception()
2926

30-
g1 = NiceReaderWrapper( fileinput.FileInput( in_fname ),
31-
chrom_col=chr_col_1,
32-
start_col=start_col_1,
33-
end_col=end_col_1,
34-
strand_col=strand_col_1,
35-
fix_strand=True )
27+
g1 = NiceReaderWrapper(
28+
fileinput.FileInput(in_fname),
29+
chrom_col=chr_col_1,
30+
start_col=start_col_1,
31+
end_col=end_col_1,
32+
strand_col=strand_col_1,
33+
fix_strand=True
34+
)
3635

3736
try:
3837
bases = base_coverage(g1)
3938
except ParseError as exc:
40-
fail( "Invalid file format: %s" % str( exc ) )
41-
out_file = open( out_fname, "w" )
42-
out_file.write( "%s\n" % str( bases ) )
39+
fail("Invalid file format: %s" % str(exc))
40+
out_file = open(out_fname, "w")
41+
out_file.write("%s\n" % str(bases))
4342
out_file.close()
4443
if g1.skipped > 0:
45-
print(skipped( g1, filedesc="" ))
44+
print(skipped(g1, filedesc=""))
4645

4746

4847
if __name__ == "__main__":

tool_collections/gops/basecoverage/operation_filter.py

Lines changed: 11 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,42 +1,35 @@
11
# runs after the job (and after the default post-filter)
2-
from galaxy.jobs.handler import JOB_ERROR
32
from galaxy.tools.parameters import DataToolParameter
43

5-
# Older py compatibility
6-
try:
7-
set()
8-
except:
9-
from sets import Set as set
104

11-
12-
def validate_input( trans, error_map, param_values, page_param_map ):
5+
def validate_input(trans, error_map, param_values, page_param_map):
136
dbkeys = set()
147
data_param_names = set()
158
data_params = 0
169
for name, param in page_param_map.items():
17-
if isinstance( param, DataToolParameter ):
10+
if isinstance(param, DataToolParameter):
1811
# for each dataset parameter
1912
if param_values.get(name, None) is not None:
20-
dbkeys.add( param_values[name].dbkey )
13+
dbkeys.add(param_values[name].dbkey)
2114
data_params += 1
2215
# check meta data
2316
try:
2417
param = param_values[name]
25-
if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ):
18+
if isinstance(param.datatype, trans.app.datatypes_registry.get_datatype_by_extension('gff').__class__):
2619
# TODO: currently cannot validate GFF inputs b/c they are not derived from interval.
2720
pass
2821
else: # Validate interval datatype.
29-
int( param.metadata.startCol )
30-
int( param.metadata.endCol )
31-
int( param.metadata.chromCol )
22+
int(param.metadata.startCol)
23+
int(param.metadata.endCol)
24+
int(param.metadata.chromCol)
3225
if param.metadata.strandCol is not None:
33-
int( param.metadata.strandCol )
34-
except:
26+
int(param.metadata.strandCol)
27+
except Exception:
3528
error_msg = "The attributes of this dataset are not properly set. " + \
3629
"Click the pencil icon in the history item to set the chrom, start, end and strand columns."
3730
error_map[name] = error_msg
38-
data_param_names.add( name )
39-
if len( dbkeys ) > 1:
31+
data_param_names.add(name)
32+
if len(dbkeys) > 1:
4033
for name in data_param_names:
4134
error_map[name] = "All datasets must belong to same genomic build, " \
4235
"this dataset is linked to build '%s'" % param_values[name].dbkey
@@ -45,22 +38,7 @@ def validate_input( trans, error_map, param_values, page_param_map ):
4538
error_map[name] = "A dataset of the appropriate type is required"
4639

4740

48-
# Commented out by INS, 5/30/2007. What is the PURPOSE of this?
49-
def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
50-
"""Verify the output data after each run"""
51-
for data in out_data.values():
52-
try:
53-
if stderr and len( stderr ) > 0:
54-
raise Exception( stderr )
55-
except Exception:
56-
data.blurb = JOB_ERROR
57-
data.state = JOB_ERROR
58-
59-
6041
def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
61-
exec_after_process(
62-
app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
63-
6442
# strip strand column if clusters were merged
6543
for data in out_data.values():
6644
if param_dict['returntype'] is True:
@@ -72,9 +50,6 @@ def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None
7250

7351

7452
def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
75-
exec_after_process(
76-
app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
77-
7853
# strip strand column if clusters were merged
7954
if param_dict["returntype"] == '1':
8055
for data in out_data.values():

tool_collections/taxonomy/kraken2tax/kraken2tax.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
<tool id="Kraken2Tax" name="Convert Kraken" version="1.2">
1+
<tool id="Kraken2Tax" name="Convert Kraken" version="1.2+galaxy0">
22
<description>data to Galaxy taxonomy representation</description>
33
<requirements>
4-
<requirement type="package" version="4.1.0">gawk</requirement>
5-
<requirement type="package" version="1.0.0">gb_taxonomy_tools</requirement>
4+
<requirement type="package" version="5.1.0">gawk</requirement>
5+
<requirement type="package" version="1.0.1">gb_taxonomy_tools</requirement>
66
</requirements>
77
<command>
88
<![CDATA[

0 commit comments

Comments
 (0)