Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor general #124

Merged
merged 3 commits into from
Apr 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 46 additions & 8 deletions src/deep_neurographs/machine_learning/feature_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
WINDOW = [5, 5, 5]
N_BRANCH_PTS = 50
N_PROFILE_PTS = 10
N_SKEL_FEATURES = 19
N_SKEL_FEATURES = 20
SUPPORTED_MODELS = [
"AdaBoost",
"RandomForest",
Expand All @@ -40,8 +40,44 @@


# -- Wrappers --
def run(
neurograph,
model_type,
search_radius,
img_path,
labels_path=None,
proposals=None,
):
if "Graph" in model_type:
features = dict()
features["branches"] = run_on_branches(neurograph)
features["proposals"] = run_on_proposals(
neurograph,
model_type,
search_radius,
img_path,
labels_path=labels_path,
proposals=proposals,
)
else:
features = run_on_proposals(
neurograph,
model_type,
search_radius,
img_path,
labels_path=labels_path,
proposals=proposals,
)
return features


def run_on_proposals(
neurograph, model_type, img_path, labels_path=None, proposals=None
neurograph,
model_type,
search_radius,
img_path,
labels_path=None,
proposals=None,
):
"""
Generates feature vectors for every proposal in a neurograph.
Expand All @@ -54,6 +90,8 @@ def run_on_proposals(
model_type : str
Type of model to be trained. Options include: AdaBoost, RandomForest,
FeedForwardNet, ConvNet, MultiModalNet.
search_radius : float
Search radius used to generate proposals.
img_path : str
Path to raw image stored in a GCS bucket.
labels_path : str, optional
Expand Down Expand Up @@ -92,7 +130,7 @@ def run_on_proposals(
return features


def run_on_branches(neurograph, branches):
def run_on_branches(neurograph):
"""
Generates feature vectors for every edge in a neurograph.

Expand All @@ -109,7 +147,7 @@ def run_on_branches(neurograph, branches):
vector and the numerical vector.

"""
return {"skel": generate_branch_features(neurograph, branches)}
return {"skel": generate_branch_features(neurograph)}


# -- Proposal Feature Extraction --
Expand Down Expand Up @@ -368,15 +406,15 @@ def avg_branch_radii(neurograph, edge):
def n_nearby_leafs(neurograph, proposal):
xyz = neurograph.proposal_midpoint(proposal)
leafs = neurograph.query_kdtree(xyz, 25, node_type="leaf")
return len(leafs)
return len(leafs) - 2


# --- Edge Feature Generation --
def generate_branch_features(neurograph, edges):
def generate_branch_features(neurograph):
features = dict()
for (i, j) in edges:
for (i, j) in neurograph.edges:
edge = frozenset((i, j))
features[edge] = np.zeros((34))
features[edge] = np.zeros((32))

temp = np.concatenate(
(
Expand Down
18 changes: 7 additions & 11 deletions src/deep_neurographs/machine_learning/graph_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,18 @@


# Wrapper
def init(neurograph, branch_features, proposal_features, heterogeneous=False):
def init(neurograph, features, heterogeneous=False):
"""
Initializes a dataset that can be used to train a graph neural network.

Parameters
----------
neurograph : NeuroGraph
Graph that dataset is built from.
branch_features : dict
Feature vectors corresponding to branches such that the keys are a
frozenset of the node pair and values are the corresponding feature
vectors.
proposal_features : dict
Feature vectors corresponding to proposals such that the keys are a
frozenset of the node pair and values are the corresponding feature
vectors.
features : dict
Feature vectors corresponding to branches such that the keys are
"proposals" and "branches". The values are a dictionary containing
different types of features for edges and branches.
heterogeneous : bool
Indication of whether dataset should be stored as a heterogeneous
graph.
Expand All @@ -48,10 +44,10 @@ def init(neurograph, branch_features, proposal_features, heterogeneous=False):
"""
# Extract features
x_branches, _, idxs_branches = feature_generation.get_matrix(
neurograph, branch_features, "GraphNeuralNet"
neurograph, features["branches"], "GraphNeuralNet"
)
x_proposals, y_proposals, idxs_proposals = feature_generation.get_matrix(
neurograph, proposal_features, "GraphNeuralNet"
neurograph, features["proposals"], "GraphNeuralNet"
)

# Initialize data
Expand Down
17 changes: 8 additions & 9 deletions src/deep_neurographs/machine_learning/graph_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

import torch
import torch.nn.functional as F
from torch.nn import ELU, Dropout, Linear
from torch.nn import ELU, Dropout, LeakyReLU, Linear
import torch.nn.init as init
from torch_geometric.nn import GATv2Conv as GATConv
from torch_geometric.nn import GCNConv
Expand All @@ -24,22 +24,19 @@ def __init__(self, input_channels):
self.conv2 = GCNConv(2 * input_channels, input_channels)
self.conv3 = GCNConv(input_channels, input_channels // 2)
self.dropout = Dropout(0.3)
self.ELU = ELU()
self.leaky_relu = LeakyReLU()
self.output = Linear(input_channels // 2, 1)

# Initialize weights
self.init_weights()

def init_weights(self):
layers = [self.conv1, self.conv2, self.conv3]
#, self.input, self.output]
layers = [self.conv1, self.conv2, self.conv3, self.input, self.output]
for layer in layers:
for param in layer.parameters():
if len(param.shape) > 1:
# Initialize weights using Glorot uniform initialization
init.xavier_uniform_(param)
init.kaiming_normal_(param)
else:
# Initialize biases to zeros
init.zeros_(param)

def forward(self, x, edge_index):
Expand All @@ -48,16 +45,18 @@ def forward(self, x, edge_index):

# Layer 1
x = self.conv1(x, edge_index)
x = self.ELU(x)
x = self.leaky_relu(x)
x = self.dropout(x)

# Layer 2
x = self.conv2(x, edge_index)
x = self.ELU(x)
x = self.leaky_relu(x)
x = self.dropout(x)

# Layer 3
x = self.conv3(x, edge_index)
x = self.leaky_relu(x)
x = self.dropout(x)

# Output
x = self.output(x)
Expand Down
2 changes: 1 addition & 1 deletion src/deep_neurographs/machine_learning/graph_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ def train_test_split(graph_ids):

"""
n_test_examples = int(len(graph_ids) * TEST_PERCENT)
test_ids = ["block_007", "block_010"] # sample(graph_ids, n_test_examples)
test_ids = ["block_007", "block_010"] #sample(graph_ids, n_test_examples)
train_ids = list(set(graph_ids) - set(test_ids))
return train_ids, test_ids

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from deep_neurographs import utils
from deep_neurographs.geometry import dist as get_dist

ALIGNED_THRESHOLD = 3.5
ALIGNED_THRESHOLD = 4
MIN_INTERSECTION = 10


Expand Down
10 changes: 9 additions & 1 deletion src/deep_neurographs/machine_learning/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def run(
img_path,
labels_path,
proposals,
search_radius,
batch_size_proposals=BATCH_SIZE_PROPOSALS,
confidence_threshold=0.7,
seeds=None,
Expand All @@ -47,6 +48,7 @@ def run(
img_path,
labels_path,
proposals,
search_radius,
seeds,
batch_size_proposals=batch_size_proposals,
confidence_threshold=confidence_threshold,
Expand All @@ -59,6 +61,7 @@ def run(
img_path,
labels_path,
proposals,
search_radius,
batch_size_proposals=batch_size_proposals,
confidence_threshold=confidence_threshold,
)
Expand All @@ -71,6 +74,7 @@ def run_with_seeds(
img_path,
labels_path,
proposals,
search_radius,
seeds,
batch_size_proposals=BATCH_SIZE_PROPOSALS,
confidence_threshold=0.7,
Expand All @@ -93,6 +97,7 @@ def run_without_seeds(
img_path,
labels_path,
proposals,
search_radius,
batch_size_proposals=BATCH_SIZE_PROPOSALS,
confidence_threshold=0.7,
progress_bar=True,
Expand Down Expand Up @@ -121,6 +126,7 @@ def run_without_seeds(
proposals_i,
model,
model_type,
search_radius,
confidence_threshold=confidence_threshold,
)

Expand All @@ -146,12 +152,14 @@ def predict(
proposals,
model,
model_type,
search_radius,
confidence_threshold=0.7,
):
# Generate features
features = feature_generation.run(
features = feature_generation.run_on_proposals(
neurograph,
model_type,
search_radius,
img_path,
labels_path=labels_path,
proposals=proposals,
Expand Down
Loading