diff --git a/src/deep_neurographs/machine_learning/feature_generation.py b/src/deep_neurographs/machine_learning/feature_generation.py index f158299..14692bd 100644 --- a/src/deep_neurographs/machine_learning/feature_generation.py +++ b/src/deep_neurographs/machine_learning/feature_generation.py @@ -28,7 +28,7 @@ WINDOW = [5, 5, 5] N_BRANCH_PTS = 50 N_PROFILE_PTS = 10 -N_SKEL_FEATURES = 19 +N_SKEL_FEATURES = 20 SUPPORTED_MODELS = [ "AdaBoost", "RandomForest", @@ -40,8 +40,44 @@ # -- Wrappers -- +def run( + neurograph, + model_type, + search_radius, + img_path, + labels_path=None, + proposals=None, +): + if "Graph" in model_type: + features = dict() + features["branches"] = run_on_branches(neurograph) + features["proposals"] = run_on_proposals( + neurograph, + model_type, + search_radius, + img_path, + labels_path=labels_path, + proposals=proposals, + ) + else: + features = run_on_proposals( + neurograph, + model_type, + search_radius, + img_path, + labels_path=labels_path, + proposals=proposals, + ) + return features + + def run_on_proposals( - neurograph, model_type, img_path, labels_path=None, proposals=None + neurograph, + model_type, + search_radius, + img_path, + labels_path=None, + proposals=None, ): """ Generates feature vectors for every proposal in a neurograph. @@ -54,6 +90,8 @@ def run_on_proposals( model_type : str Type of model to be trained. Options include: AdaBoost, RandomForest, FeedForwardNet, ConvNet, MultiModalNet. + search_radius : float + Search radius used to generate proposals. img_path : str Path to raw image stored in a GCS bucket. labels_path : str, optional @@ -92,7 +130,7 @@ def run_on_proposals( return features -def run_on_branches(neurograph, branches): +def run_on_branches(neurograph): """ Generates feature vectors for every edge in a neurograph. @@ -109,7 +147,7 @@ def run_on_branches(neurograph, branches): vector and the numerical vector. """ - return {"skel": generate_branch_features(neurograph, branches)} + return {"skel": generate_branch_features(neurograph)} # -- Proposal Feature Extraction -- @@ -368,15 +406,15 @@ def avg_branch_radii(neurograph, edge): def n_nearby_leafs(neurograph, proposal): xyz = neurograph.proposal_midpoint(proposal) leafs = neurograph.query_kdtree(xyz, 25, node_type="leaf") - return len(leafs) + return len(leafs) - 2 # --- Edge Feature Generation -- -def generate_branch_features(neurograph, edges): +def generate_branch_features(neurograph): features = dict() - for (i, j) in edges: + for (i, j) in neurograph.edges: edge = frozenset((i, j)) - features[edge] = np.zeros((34)) + features[edge] = np.zeros((32)) temp = np.concatenate( ( diff --git a/src/deep_neurographs/machine_learning/graph_datasets.py b/src/deep_neurographs/machine_learning/graph_datasets.py index 4aaf4c0..77f7056 100644 --- a/src/deep_neurographs/machine_learning/graph_datasets.py +++ b/src/deep_neurographs/machine_learning/graph_datasets.py @@ -20,7 +20,7 @@ # Wrapper -def init(neurograph, branch_features, proposal_features, heterogeneous=False): +def init(neurograph, features, heterogeneous=False): """ Initializes a dataset that can be used to train a graph neural network. @@ -28,14 +28,10 @@ def init(neurograph, branch_features, proposal_features, heterogeneous=False): ---------- neurograph : NeuroGraph Graph that dataset is built from. - branch_features : dict - Feature vectors corresponding to branches such that the keys are a - frozenset of the node pair and values are the corresponding feature - vectors. - proposal_features : dict - Feature vectors corresponding to proposals such that the keys are a - frozenset of the node pair and values are the corresponding feature - vectors. + features : dict + Feature vectors corresponding to branches such that the keys are + "proposals" and "branches". The values are a dictionary containing + different types of features for edges and branches. heterogeneous : bool Indication of whether dataset should be stored as a heterogeneous graph. @@ -48,10 +44,10 @@ def init(neurograph, branch_features, proposal_features, heterogeneous=False): """ # Extract features x_branches, _, idxs_branches = feature_generation.get_matrix( - neurograph, branch_features, "GraphNeuralNet" + neurograph, features["branches"], "GraphNeuralNet" ) x_proposals, y_proposals, idxs_proposals = feature_generation.get_matrix( - neurograph, proposal_features, "GraphNeuralNet" + neurograph, features["proposals"], "GraphNeuralNet" ) # Initialize data diff --git a/src/deep_neurographs/machine_learning/graph_models.py b/src/deep_neurographs/machine_learning/graph_models.py index 49b74d4..e6149b7 100644 --- a/src/deep_neurographs/machine_learning/graph_models.py +++ b/src/deep_neurographs/machine_learning/graph_models.py @@ -10,7 +10,7 @@ import torch import torch.nn.functional as F -from torch.nn import ELU, Dropout, Linear +from torch.nn import ELU, Dropout, LeakyReLU, Linear import torch.nn.init as init from torch_geometric.nn import GATv2Conv as GATConv from torch_geometric.nn import GCNConv @@ -24,22 +24,19 @@ def __init__(self, input_channels): self.conv2 = GCNConv(2 * input_channels, input_channels) self.conv3 = GCNConv(input_channels, input_channels // 2) self.dropout = Dropout(0.3) - self.ELU = ELU() + self.leaky_relu = LeakyReLU() self.output = Linear(input_channels // 2, 1) # Initialize weights self.init_weights() def init_weights(self): - layers = [self.conv1, self.conv2, self.conv3] - #, self.input, self.output] + layers = [self.conv1, self.conv2, self.conv3, self.input, self.output] for layer in layers: for param in layer.parameters(): if len(param.shape) > 1: - # Initialize weights using Glorot uniform initialization - init.xavier_uniform_(param) + init.kaiming_normal_(param) else: - # Initialize biases to zeros init.zeros_(param) def forward(self, x, edge_index): @@ -48,16 +45,18 @@ def forward(self, x, edge_index): # Layer 1 x = self.conv1(x, edge_index) - x = self.ELU(x) + x = self.leaky_relu(x) x = self.dropout(x) # Layer 2 x = self.conv2(x, edge_index) - x = self.ELU(x) + x = self.leaky_relu(x) x = self.dropout(x) # Layer 3 x = self.conv3(x, edge_index) + x = self.leaky_relu(x) + x = self.dropout(x) # Output x = self.output(x) diff --git a/src/deep_neurographs/machine_learning/graph_trainer.py b/src/deep_neurographs/machine_learning/graph_trainer.py index dab4df0..8c57340 100644 --- a/src/deep_neurographs/machine_learning/graph_trainer.py +++ b/src/deep_neurographs/machine_learning/graph_trainer.py @@ -292,7 +292,7 @@ def train_test_split(graph_ids): """ n_test_examples = int(len(graph_ids) * TEST_PERCENT) - test_ids = ["block_007", "block_010"] # sample(graph_ids, n_test_examples) + test_ids = ["block_007", "block_010"] #sample(graph_ids, n_test_examples) train_ids = list(set(graph_ids) - set(test_ids)) return train_ids, test_ids diff --git a/src/deep_neurographs/machine_learning/groundtruth_generation.py b/src/deep_neurographs/machine_learning/groundtruth_generation.py index 9264673..cf79cdd 100644 --- a/src/deep_neurographs/machine_learning/groundtruth_generation.py +++ b/src/deep_neurographs/machine_learning/groundtruth_generation.py @@ -17,7 +17,7 @@ from deep_neurographs import utils from deep_neurographs.geometry import dist as get_dist -ALIGNED_THRESHOLD = 3.5 +ALIGNED_THRESHOLD = 4 MIN_INTERSECTION = 10 diff --git a/src/deep_neurographs/machine_learning/inference.py b/src/deep_neurographs/machine_learning/inference.py index e747c95..bf6efb5 100644 --- a/src/deep_neurographs/machine_learning/inference.py +++ b/src/deep_neurographs/machine_learning/inference.py @@ -35,6 +35,7 @@ def run( img_path, labels_path, proposals, + search_radius, batch_size_proposals=BATCH_SIZE_PROPOSALS, confidence_threshold=0.7, seeds=None, @@ -47,6 +48,7 @@ def run( img_path, labels_path, proposals, + search_radius, seeds, batch_size_proposals=batch_size_proposals, confidence_threshold=confidence_threshold, @@ -59,6 +61,7 @@ def run( img_path, labels_path, proposals, + search_radius, batch_size_proposals=batch_size_proposals, confidence_threshold=confidence_threshold, ) @@ -71,6 +74,7 @@ def run_with_seeds( img_path, labels_path, proposals, + search_radius, seeds, batch_size_proposals=BATCH_SIZE_PROPOSALS, confidence_threshold=0.7, @@ -93,6 +97,7 @@ def run_without_seeds( img_path, labels_path, proposals, + search_radius, batch_size_proposals=BATCH_SIZE_PROPOSALS, confidence_threshold=0.7, progress_bar=True, @@ -121,6 +126,7 @@ def run_without_seeds( proposals_i, model, model_type, + search_radius, confidence_threshold=confidence_threshold, ) @@ -146,12 +152,14 @@ def predict( proposals, model, model_type, + search_radius, confidence_threshold=0.7, ): # Generate features - features = feature_generation.run( + features = feature_generation.run_on_proposals( neurograph, model_type, + search_radius, img_path, labels_path=labels_path, proposals=proposals,