Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
  • Loading branch information
antgonza committed Dec 19, 2023
1 parent 6a7719f commit 4a6e7bc
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 2 deletions.
3 changes: 1 addition & 2 deletions qiita_db/metadata_template/prep_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -890,8 +890,7 @@ def _get_predecessors(workflow, node):

# let's just keep one, let's give it preference to the one with the
# most total_conditions_satisfied
workflows = sorted(workflows, reverse=True)[:1]

workflows = sorted(workflows, key=lambda x: x[0], reverse=True)[:1]
missing_artifacts = dict()
for _, wk in workflows:
missing_artifacts[wk] = dict()
Expand Down
24 changes: 24 additions & 0 deletions qiita_db/metadata_template/test/test_prep_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -1447,6 +1447,30 @@ def test_artifact_setter(self):
'Pick closed-reference OTUs', 'Pick closed-reference OTUs',
'Pick closed-reference OTUs'])

# at this point we can error all the previous steps, add a new smaller
# workflow and make sure you get the same one as before because it will
# have a higher match than the new one
for pj in wk.graph.nodes:
pj._set_error('Killed')
sql = """UPDATE qiita.default_workflow_data_type
SET data_type_id = 1
WHERE default_workflow_id = 2"""
qdb.sql_connection.perform_as_transaction(sql)
wk = pt.add_default_workflow(qdb.user.User('test@foo.bar'))
self.assertEqual(len(wk.graph.nodes), 5)
self.assertEqual(len(wk.graph.edges), 3)
self.assertCountEqual(
[x.command.name for x in wk.graph.nodes],
# we should have 2 split libraries and 3 close reference
['Split libraries FASTQ', 'Split libraries FASTQ',
'Pick closed-reference OTUs', 'Pick closed-reference OTUs',
'Pick closed-reference OTUs'])
# let's return it back
sql = """UPDATE qiita.default_workflow_data_type
SET data_type_id = 2
WHERE default_workflow_id = 2"""
qdb.sql_connection.perform_as_transaction(sql)

# now let's try to generate again and it should fail cause the jobs
# are already created
with self.assertRaisesRegex(ValueError, "Cannot create job because "
Expand Down

0 comments on commit 4a6e7bc

Please sign in to comment.