Skip to content

Commit

Permalink
Merge pull request #260 from cytomining/partition_profiling
Browse files Browse the repository at this point in the history
Partition column in the metadata during profiling
  • Loading branch information
jccaicedo committed Jun 17, 2021
2 parents 16bc87e + 1a361cc commit e23a66a
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 12 deletions.
8 changes: 4 additions & 4 deletions deepprofiler/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def prepare(context):
def sample_sc(context):
if context.parent.obj["config"]["prepare"]["compression"]["implement"]:
context.parent.obj["config"]["paths"]["images"] = context.obj["config"]["paths"]["compressed_images"]
dset = deepprofiler.dataset.image_dataset.read_dataset(context.obj["config"])
dset = deepprofiler.dataset.image_dataset.read_dataset(context.obj["config"], mode='train')
deepprofiler.dataset.sampling.sample_dataset(context.obj["config"], dset)
print("Single-cell sampling complete.")

Expand All @@ -159,7 +159,7 @@ def sample_sc(context):
def train(context, epoch, seed):
if context.parent.obj["config"]["prepare"]["compression"]["implement"]:
context.parent.obj["config"]["paths"]["images"] = context.obj["config"]["paths"]["compressed_images"]
dset = deepprofiler.dataset.image_dataset.read_dataset(context.obj["config"])
dset = deepprofiler.dataset.image_dataset.read_dataset(context.obj["config"], mode='train')
deepprofiler.learning.training.learn_model(context.obj["config"], dset, epoch, seed)


Expand All @@ -177,8 +177,8 @@ def profile(context, part):
if part >= 0:
partfile = "index-{0:03d}.csv".format(part)
config["paths"]["index"] = context.obj["config"]["paths"]["index"].replace("index.csv", partfile)
metadata = deepprofiler.dataset.image_dataset.read_dataset(context.obj["config"])
deepprofiler.learning.profiling.profile(context.obj["config"], metadata)
dset = deepprofiler.dataset.image_dataset.read_dataset(context.obj["config"], mode='profile')
deepprofiler.learning.profiling.profile(context.obj["config"], dset)


# Auxiliary tool: Split index in multiple parts
Expand Down
19 changes: 11 additions & 8 deletions deepprofiler/dataset/image_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def number_of_records(self, dataset):
def add_target(self, new_target):
self.targets.append(new_target)

def read_dataset(config):
def read_dataset(config, mode = 'train'):
# Read metadata and split dataset in training and validation
metadata = deepprofiler.dataset.metadata.Metadata(config["paths"]["index"], dtype=None)
if config["prepare"]["compression"]["implement"]:
Expand All @@ -211,10 +211,12 @@ def read_dataset(config):
print(metadata.data.info())

# Split training data
split_field = config["train"]["partition"]["split_field"]
trainingFilter = lambda df: df[split_field].isin(config["train"]["partition"]["training_values"])
validationFilter = lambda df: df[split_field].isin(config["train"]["partition"]["validation_values"])
metadata.splitMetadata(trainingFilter, validationFilter)
if mode == 'train':
split_field = config["train"]["partition"]["split_field"]
trainingFilter = lambda df: df[split_field].isin(config["train"]["partition"]["training_values"])
validationFilter = lambda df: df[split_field].isin(config["train"]["partition"]["validation_values"])
metadata.splitMetadata(trainingFilter, validationFilter)


# Create a dataset
keyGen = lambda r: "{}/{}-{}".format(r["Metadata_Plate"], r["Metadata_Well"], r["Metadata_Site"])
Expand All @@ -228,9 +230,10 @@ def read_dataset(config):
)

# Add training targets
for t in config["train"]["partition"]["targets"]:
new_target = deepprofiler.dataset.target.MetadataColumnTarget(t, metadata.data[t].unique())
dset.add_target(new_target)
if mode == 'train':
for t in config["train"]["partition"]["targets"]:
new_target = deepprofiler.dataset.target.MetadataColumnTarget(t, metadata.data[t].unique())
dset.add_target(new_target)

# Activate outlines for masking if needed
if config["dataset"]["locations"]["mask_objects"]:
Expand Down

0 comments on commit e23a66a

Please sign in to comment.