Skip to content

Commit

Permalink
Devel 1.4.12 (#55)
Browse files Browse the repository at this point in the history
* Cleaner pool iterable

* Added pipeline scripts for fast inspection

* Bugfix: SimpleFastaParser uses the entire header as identifier!

* Version++

* Even better handling of fasta files.

Expanded tests so it doesn't happen again

* Version++

* Threading (#43)

* pathos.multiprocessing -> pathos.ThreadingPool

* typo. Wrong fasta file

* Deleted comments

* Added done message

* version++

* Going back from pathos.ThreadPool to pathos.multiprocessing

* --debug: Improved logging and loggers

* version ++ 1.4.10

* Bugfix: full transition back to pathos.mp

* Removed asserts from input files: now we can use /dev/fd, stdin, stdout, devnull...

* v 1.4.11

* Fixed memory leak when multiprocessing

- Kill each child when they end their task: maxtasksperchild=1

* Version 1.4.12
  • Loading branch information
jlanga authored May 28, 2018
1 parent 041fe7a commit c005970
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Get exons from a transcriptome and raw genomic reads using abyss-bloom and bedto

## Requirements
```
abyss>=2.0.0
abyss==2.0.1 (something is happening with 2.0.2 and abyss-bloom kmers)
bedtools (tested on 2.0)
python3
biopython
Expand Down
2 changes: 1 addition & 1 deletion exfi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
filters.
"""

__version__ = '1.4.11'
__version__ = '1.4.12'
4 changes: 2 additions & 2 deletions exfi/build_splice_graph_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,13 +191,13 @@ def build_splice_graph_dict(
bed6df_dict = bed3_records_to_bed6df_dict(bed3records)

# Initialize pool of workers
pool = mp.Pool(args["threads"])
pool = mp.Pool(args["threads"], maxtasksperchild=1)

# Build graphs in parallel and merge results
splice_graphs = pool.map(
build_splice_graph,
bed6df_dict.values(),
chunksize=1000
chunksize=1000 # Number of Transcripts to process at one
)
pool.close()
pool.join()
Expand Down
4 changes: 2 additions & 2 deletions exfi/correct.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,14 +400,14 @@ def correct_splice_graph_dict(splice_graph_dict: SpliceGraphDict, args: dict) ->
filled_edges_by_transcript[transcript] = set()

# Initialize pool of workers
pool = mp.Pool(args["threads"])
pool = mp.Pool(args["threads"], maxtasksperchild=1)

# Process each graph in parallel
logging.info("\tCorrecting each splice graph")
corrected_splice_graphs = pool.starmap(
_sculpt_graph,
zip(splice_graph_dict.values(), filled_edges_by_transcript.values()),
chunksize=1000
chunksize=1000 # Number of splice graphs to process at once.
)
pool.close()
pool.join()
Expand Down
4 changes: 2 additions & 2 deletions exfi/polish.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def polish_splice_graph_dict(
"""

# Initialize pool of workers
pool = mp.Pool(args["threads"])
pool = mp.Pool(args["threads"], maxtasksperchild=1)

splice_graphs = (splice_graph for splice_graph in splice_graph_dict.values())
fasta_dicts = (
Expand All @@ -163,7 +163,7 @@ def polish_splice_graph_dict(
results = pool.starmap(
polish_splice_graph,
zip(splice_graphs, fasta_dicts),
chunksize=1000
chunksize=1000 # Number of splice_graphs to process at once
)
pool.close()
pool.join()
Expand Down

0 comments on commit c005970

Please sign in to comment.