Skip to content

Commit

Permalink
chore(cleanup): clean up the code
Browse files Browse the repository at this point in the history
  • Loading branch information
giangbui committed Mar 23, 2018
1 parent f8ec016 commit 35509dd
Show file tree
Hide file tree
Showing 5 changed files with 165 additions and 179 deletions.
118 changes: 36 additions & 82 deletions peregrine/resources/submission/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,28 +102,8 @@ def root_graphql_query():
# if peregrine.utils.contain_node_with_category(data.json,'data_file') == False:
# return flask.jsonify({ 'errors': 'No data_file node'}), 400
res = peregrine.utils.json2tbl(json.loads(data.data),'', "_" )
tsv = """# omw_id ili_id projects\n"""
row = []
for k in sorted(res[0]):
k = k.replace('_data_','')
tsv = tsv + "{}\t".format(k)
tsv = tsv[:-1] + "\n"

nrow = 0
for row_dict in res:
row=[]
for k in sorted(row_dict):
if row_dict[k]:
tsv = tsv + "{}\t".format(row_dict[k])
else:
tsv = tsv + "None\t"
tsv = tsv[:-1] + "\n"
nrow = nrow + 1
if nrow >= 1000:
break

return flask.Response(tsv, mimetype='text/tab-separated-values')

tsv = peregrine.utils.dicts2tsv(res)
return flask.Response(tsv, mimetype='text/tab-separated-values'), code
else:
return data, code
else:
Expand Down Expand Up @@ -157,63 +137,37 @@ def root_graphql_schema_query():
)
)

# @peregrine.blueprints.blueprint.route('/export', methods=['POST'])
# def get_manifest():
# """
# Creates and returns a manifest based on the filters pased on
# to this endpoint
# parameters:
# - name: filters
# in: graphql result in json format
# description: Filters to be applied when generating the manifest
# :return: A manifest that the user can use to download the files in there
# """
# payload = peregrine.utils.parse_request_json()
# export_data = payload.get('export_data')
# bag_path = payload.get('bag_path')

# if(bag_path is None):
# return flask.jsonify({'bag_path': None, 'errors': 'bag_path is required!!!'}), 400

# if peregrine.utils.contain_node_with_category(export_data,'data_file') == False:
# return flask.jsonify({ 'errors': 'No data_file node'}), 400


# res = peregrine.utils.json2tbl(export_data,'', "_" )

# import pdb; pdb.set_trace()

# tsv = """# omw_id ili_id projects\n"""
# row = []
# for k in sorted(res[0]):
# k = k.replace('_data_','')
# tsv = tsv + "{}\t".format(k)
# tsv = tsv[:-1] + "\n"

# nrow = 0
# for row_dict in res:
# row=[]
# for k in sorted(row_dict):
# if row_dict[k]:
# tsv = tsv + "{}\t".format(row_dict[k])
# else:
# tsv = tsv + "None\t"

# tsv = tsv[:-1] + "\n"

# nrow = nrow + 1
# if nrow >= 1000:
# break

# import pdb; pdb.set_trace()

# # bag_info = {'organization': 'CDIS',
# # 'data_type': 'TOPMed',
# # 'date_created': datetime.date.today().isoformat()}
# # args = dict(
# # bag_path=bag_path,
# # bag_info=bag_info,
# # payload=res)
# # peregrine.utils.create_bdbag(**args) # bag is a compressed file

# return flask.jsonify({'data': res}), 200
@peregrine.blueprints.blueprint.route('/export', methods=['POST'])
def get_manifest():
"""
Creates and returns a manifest based on the filters pased on
to this endpoint
parameters:
- name: filters
in: graphql result in json format
description: Filters to be applied when generating the manifest
:return: A manifest that the user can use to download the files in there
"""
payload = peregrine.utils.parse_request_json()
export_data = payload.get('export_data')
bag_path = payload.get('bag_path')

if(bag_path is None):
return flask.jsonify({'bag_path': None, 'errors': 'bag_path is required!!!'}), 400

if peregrine.utils.contain_node_with_category(export_data,'data_file') == False:
return flask.jsonify({ 'errors': 'No data_file node'}), 400

res = peregrine.utils.json2tbl(export_data,'', "_" )
tsv = peregrine.utils.dicts2tsv(res)

# bag_info = {'organization': 'CDIS',
# 'data_type': 'TOPMed',
# 'date_created': datetime.date.today().isoformat()}
# args = dict(
# bag_path=bag_path,
# bag_info=bag_info,
# payload=res)
# peregrine.utils.create_bdbag(**args) # bag is a compressed file

return flask.jsonify({'data': res}), 200
4 changes: 2 additions & 2 deletions peregrine/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from .payload import get_variables,jsonify_check_errors,parse_request_json,get_keys,contain_node_with_category
from .pybdbag import create_bdbag
from .scheduling import AsyncPool
from .json2csv import flatten_obj,json2tbl, to_csv
from .response import format_response
from .json2csv import flatten_obj,json2tbl, dicts2tsv
from .response import format_response
33 changes: 31 additions & 2 deletions peregrine/utils/json2csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,37 @@ def to_csv(hits, dialect='excel'):
return s.getvalue()


def dicts2tsv(dict_list):
"""
Convert the list of dictionary to tsv format.
Each element of the list represent a row in tsv
Args:
dict_list: list of dictionary
Return:
output string
"""
tsv = ""
row = []
for k in sorted(dict_list[0]):
k = k.replace('_data_','')
tsv = tsv + "{}\t".format(k)
tsv = tsv[:-1] + "\n"

nrow = 0
for dict_row in dict_list:
row=[]
for k in sorted(dict_row):
if dict_row[k]:
tsv = tsv + "{}\t".format(dict_row[k])
else:
tsv = tsv + "None\t"
tsv = tsv[:-1] + "\n"
nrow = nrow + 1
if nrow >= 1000:
break

return tsv

def join(table_list, L, index, row):
'''
Join sub tables to generate a big table
Expand Down Expand Up @@ -95,5 +126,3 @@ def json2tbl(json,prefix,delem):
else:
L.append({prefix: json})
return L


6 changes: 3 additions & 3 deletions peregrine/utils/payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,13 +260,13 @@ def contain_node_with_category(json, category):
get_keys(json, keys_list)
ns_field = get_fields()

dic = {}
category_map = {}
for (k,v) in ns_field.iteritems():
dic[v] = k._dictionary['category']
category_map[v] = k._dictionary['category']

for key in keys_list:
try:
if dic[key] == category:
if category_map[key] == category:
return True
except KeyError:
pass
Expand Down
Loading

0 comments on commit 35509dd

Please sign in to comment.