diff --git a/bfabric/__init__.py b/bfabric/__init__.py index 1b753216..ecf2108d 100755 --- a/bfabric/__init__.py +++ b/bfabric/__init__.py @@ -3,7 +3,7 @@ name = "bfabricPy" alias = "suds-py3" -msg = "\033[93m{} version {} (2023-03-31) -- \"{}\"\ +msg = "\033[93m{} version {} (2023-10-11) -- \"{}\"\ \nCopyright (C) 2014-2023 Functional Genomics Center Zurich\033[0m\n\n"\ .format(name, __version__, alias) diff --git a/bfabric/_version.py b/bfabric/_version.py index 7c37594d..e318db39 100644 --- a/bfabric/_version.py +++ b/bfabric/_version.py @@ -1 +1 @@ -__version__ = "0.13.5" +__version__ = "0.13.6" diff --git a/bfabric/scripts/bfabric_feeder_mascot.py b/bfabric/scripts/bfabric_feeder_mascot.py index 87a8b3c2..b599b997 100755 --- a/bfabric/scripts/bfabric_feeder_mascot.py +++ b/bfabric/scripts/bfabric_feeder_mascot.py @@ -12,6 +12,7 @@ 2012-10-10 Christian Panse 2012-10-11 Christian Panse 2021-01-06 Christian Panse - replace multiprocess by caching strategy +2023-10-20 Christian Panse - add timestamp # Usage @@ -31,6 +32,7 @@ import hashlib import getopt from suds.client import Client +from datetime import datetime import json import itertools import http.client @@ -48,7 +50,11 @@ try: DB = json.load(open(DBfilename)) + print("Read {len} data items from {name} using {size:.1f} GBytes.".format(len=len(DB), + name=DBfilename, + size=sum(map(lambda x: int(x['resource']['size']), DB.values())) / (1024 * 1024 * 1024))) except: + print("loading '{}' failed".format(DBfilename)) pass @@ -69,7 +75,7 @@ def query_mascot_result(f): regex2 = re.compile(".*.+/(data/.+\.dat)$") regex2Result = regex2.match(f) if True: - print("input>") + print("{} input>".format(datetime.now())) print("\t{}".format(f)) if f in DB: print("\thit") @@ -110,7 +116,7 @@ def query_mascot_result(f): print("Exception {}".format(ValueError)) raise - print("output>") + print("{} output>".format(datetime.now())) if 'errorreport' in rv: print("\tfound errorreport '{}'.".format(rv['errorreport'])) @@ -180,7 +186,10 @@ def query_mascot_result(f): def parse_mascot_result_file(f): - print("DEBUG parse_mascot_result_file") + + # Getting the current date and time + print("{} DEBUG parse_mascot_result_file".format(datetime.now())) + regex0 = re.compile("^title=.*(p([0-9]+).+Proteomics.*(raw|RAW|wiff)).*") regex3 = re.compile("^(FILE|COM|release|USERNAME|USERID|TOL|TOLU|ITOL|ITOLU|MODS|IT_MODS|CHARGE|INSTRUMENT|QUANTITATION|DECOY)=(.+)$") @@ -239,7 +248,8 @@ def parse_mascot_result_file(f): ) ) #TODO - print("DEBUG") + + print("{}".format(datetime.now())) print(rv) print("DEBUG END") @@ -255,17 +265,17 @@ def printFrequency(S): count[x] = 1 for key in sorted(count.keys(), key=lambda key: int(key)): - print(key, count[key]) + print("p{}\t{}".format(key, count[key])) def statistics(): - print(len(DB)) + print("Statistics ...") + print("len(DB)\t=\t{}".format(len(DB))) printFrequency(map(lambda x: x['containerid'], DB.values())) - print("{} GBytes".format(sum(map(lambda x: int(x['resource']['size']), DB.values())) / (1024 * 1024 * 1024))) + print("file size\t=\t{} GBytes".format(sum(map(lambda x: int(x['resource']['size']), DB.values())) / (1024 * 1024 * 1024))) # printFrequency(map(lambda x: x['description'].split(";"), DB.values())) - - print(json.dumps(list(DB.values())[100], indent=4)) + # print(json.dumps(list(DB.values())[100], indent=4)) if __name__ == "__main__": @@ -281,10 +291,10 @@ def statistics(): print("reading file names from stdin ...") for f in sys.stdin.readlines(): query_mascot_result(f.strip()) - elif o == "--file" or o == 'f': + elif o == "--file" or o == '-f': print("processesing", value, "...") query_mascot_result(value) - elif o == "--statistics" or o == 's': + elif o == "--statistics" or o == '-s': statistics() sys.exit(0) diff --git a/bfabric/scripts/bfabric_save_link_to_workunit.py b/bfabric/scripts/bfabric_save_link_to_workunit.py new file mode 100755 index 00000000..53b58504 --- /dev/null +++ b/bfabric/scripts/bfabric_save_link_to_workunit.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# -*- coding: latin1 -*- + +""" +Copyright (C) 2023 Functional Genomics Center Zurich ETHZ|UZH. All rights reserved. + +Christian Panse 20231011 +""" + +import sys +import os +from bfabric import Bfabric + +def save_link(wuid=294156, link="", name=""): + B = Bfabric() + + rv = B.save_object('link', + obj={'name': name, + 'parentclassname': 'workunit', + 'parentid': wuid, + 'url': link}) + B.print_json(rv) + +if __name__ == "__main__": + if len(sys.argv) == 4: + save_link(wuid=sys.argv[1], link=sys.argv[2], name=sys.argv[3]) + else: + print ("Usage:") + print ("{} ".format(sys.argv[0])) + print ("Example:") + print ("{} 294156 'https://fgcz-shiny.uzh.ch/exploreDE_prot/?data=p3000/bfabric/Proteomics/SummarizedExperiment/2023/2023-09/2023-09-29/workunit_294156/2363303.rds' 'demo1 link'".format(sys.argv[0])) + diff --git a/setup.py b/setup.py index 21c2415b..04d24c1c 100755 --- a/setup.py +++ b/setup.py @@ -45,37 +45,28 @@ python_requires = ">=3.7", install_requires = INSTALL_REQUIRES, scripts = [ - 'bfabric/scripts/bfabric_delete.py', - 'bfabric/scripts/bfabric_read.py', 'bfabric/scripts/bfabric_flask.py', - 'bfabric/scripts/bfabric_save_importresource_sample.py', - 'bfabric/scripts/bfabric_save_resource.py', 'bfabric/scripts/bfabric_feeder_resource_autoQC.py', - 'bfabric/scripts/bfabric_setResourceStatus_available.py', 'bfabric/scripts/bfabric_list_not_existing_storage_directories.py', + 'bfabric/scripts/bfabric_list_not_available_proteomics_workunits.py', + 'bfabric/scripts/bfabric_upload_resource.py', + 'bfabric/scripts/bfabric_logthis.py', + 'bfabric/scripts/bfabric_setResourceStatus_available.py', 'bfabric/scripts/bfabric_setExternalJobStatus_done.py', 'bfabric/scripts/bfabric_setWorkunitStatus_available.py', 'bfabric/scripts/bfabric_setWorkunitStatus_processing.py', 'bfabric/scripts/bfabric_setWorkunitStatus_failed.py', - 'bfabric/scripts/bfabric_upload_resource.py', - 'bfabric/scripts/bfabric_save_fasta.py', - 'bfabric/scripts/bfabric_save_workunit_attribute.py', - 'bfabric/scripts/bfabric_save_csv2dataset.py', - 'bfabric/scripts/bfabric_save_dataset2csv.py', - 'bfabric/scripts/bfabric_logthis.py', + 'bfabric/scripts/bfabric_delete.py', + 'bfabric/scripts/bfabric_read.py', 'bfabric/scripts/bfabric_read_samples_of_workunit.py', 'bfabric/scripts/bfabric_read_samples_from_dataset.py', - 'bfabric/scripts/bfabric_list_not_available_proteomics_workunits.py', + 'bfabric/scripts/bfabric_save_csv2dataset.py', + 'bfabric/scripts/bfabric_save_dataset2csv.py', + 'bfabric/scripts/bfabric_save_fasta.py', + 'bfabric/scripts/bfabric_save_importresource_sample.py', + 'bfabric/scripts/bfabric_save_link_to_workunit.py', + 'bfabric/scripts/bfabric_save_resource.py', + 'bfabric/scripts/bfabric_save_workunit_attribute.py', 'bfabric/scripts/bfabric_save_workflowstep.py' - #'bfabric/scripts/bfabric_list_executables.py', - #'bfabric/scripts/bfabric_list_proteomics_projects.py', - #'bfabric/scripts/bfabric_upload_wrapper_creator_executable.py', - #'bfabric/scripts/bfabric_upload_submitter_executable.py', - #'bfabric/scripts/bfabric_create_bfabricrc.py', - #'bfabric/scripts/bfabric_save_importresource.py', - ##'bfabric/scripts/bfabric_save_importresource_mascot.py', - #'bfabric/scripts/bfabric_wrapper_creator_yaml.py', - #'bfabric/scripts/bfabric_submitter_yaml.py', - #'bfabric/scripts/fgcz_maxquant_wrapper.py' ], zip_safe=True)