Skip to content

Commit

Permalink
rename transient_size to encoding_length
Browse files Browse the repository at this point in the history
  • Loading branch information
Jordy Thielen committed Mar 17, 2024
1 parent 52cdfc5 commit 68b1a4a
Show file tree
Hide file tree
Showing 20 changed files with 160 additions and 158 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@

### Changed

- Variable `codes` is renamed to `stimulus`.
- Variable `codes` is renamed to `stimulus`
- Variable `transient_size` is renamed to `encoding_length`

### Fixed

Expand Down
30 changes: 15 additions & 15 deletions examples/dynamic_stopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@
n_samples = int(trialtime * fs)

# Setup rCCA
transient_size = 0.3
encoding_length = 0.3
onset_event = True

# Set stopping
Expand All @@ -139,7 +139,7 @@
target_p = 0.95 ** (1 / n_segments)

# Fit classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="correlation")
margin = pyntbci.stopping.MarginStopping(rcca, segmenttime, fs, target_p=target_p, max_time=trialtime)
margin.fit(X, y)
Expand All @@ -149,7 +149,7 @@
plt.plot(np.arange(1, 1 + margin.margins_.size) * segmenttime, margin.margins_, c="k")
plt.xlabel("time [sec]")
plt.ylabel("margin")
plt.title("margin dynamic stopping")
plt.title("Margin dynamic stopping")

# Loop folds
accuracy_margin = np.zeros(n_folds)
Expand All @@ -161,7 +161,7 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Train template-matching classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="correlation")
margin = pyntbci.stopping.MarginStopping(rcca, segmenttime, fs, target_p=target_p)
margin.fit(X_trn, y_trn)
Expand Down Expand Up @@ -200,7 +200,7 @@
ax[0].set_ylabel("accuracy")
ax[1].set_ylabel("duration [sec]")
ax[2].set_ylabel("itr [bits/min]")
ax[0].set_title(f"margin dynamic stopping: avg acc {accuracy_margin.mean():.2f} | " +
ax[0].set_title(f"Margin dynamic stopping: avg acc {accuracy_margin.mean():.2f} | " +
f"avg dur {duration_margin.mean():.2f} | avg itr {itr_margin.mean():.1f}")

# Print accuracy (average and standard deviation over folds)
Expand Down Expand Up @@ -234,7 +234,7 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Train template-matching classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="correlation")
beta = pyntbci.stopping.BetaStopping(rcca, target_p=target_p, fs=fs, max_time=trialtime)
beta.fit(X, y)
Expand Down Expand Up @@ -273,7 +273,7 @@
ax[0].set_ylabel("accuracy")
ax[1].set_ylabel("duration [sec]")
ax[2].set_ylabel("itr [bits/min]")
ax[0].set_title(f"beta dynamic stopping: avg acc {accuracy_beta.mean():.2f} | " +
ax[0].set_title(f"Beta dynamic stopping: avg acc {accuracy_beta.mean():.2f} | " +
f"avg dur {duration_beta.mean():.2f} | avg itr {itr_beta.mean():.1f}")

# Print accuracy (average and standard deviation over folds)
Expand All @@ -297,7 +297,7 @@
cr = 1.0

# Fit classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="inner")
bayes = pyntbci.stopping.BayesStopping(rcca, segmenttime, fs, cr=cr, max_time=trialtime)
bayes.fit(X, y)
Expand Down Expand Up @@ -328,7 +328,7 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Train template-matching classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="inner")
bayes = pyntbci.stopping.BayesStopping(rcca, segmenttime, fs, method="bes0", cr=cr, max_time=trialtime)
bayes.fit(X_trn, y_trn)
Expand Down Expand Up @@ -363,7 +363,7 @@
ax[0].set_ylabel("accuracy")
ax[1].set_ylabel("duration [sec]")
ax[2].set_ylabel("itr [bits/min]")
ax[0].set_title(f"bes0 dynamic stopping: avg acc {accuracy_bes0.mean():.2f} | " +
ax[0].set_title(f"BES0 dynamic stopping: avg acc {accuracy_bes0.mean():.2f} | " +
f"avg dur {duration_bes0.mean():.2f} | avg itr {itr_bes0.mean():.1f}")

# Print accuracy (average and standard deviation over folds)
Expand Down Expand Up @@ -398,7 +398,7 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Train template-matching classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="inner")
bayes = pyntbci.stopping.BayesStopping(rcca, segmenttime, fs, method="bes1", cr=cr, target_pf=target_pf,
target_pd=target_pd, max_time=trialtime)
Expand Down Expand Up @@ -434,7 +434,7 @@
ax[0].set_ylabel("accuracy")
ax[1].set_ylabel("duration [sec]")
ax[2].set_ylabel("itr [bits/min]")
ax[0].set_title(f"bes1 dynamic stopping: avg acc {accuracy_bes1.mean():.2f} | " +
ax[0].set_title(f"BES1 dynamic stopping: avg acc {accuracy_bes1.mean():.2f} | " +
f"avg dur {duration_bes1.mean():.2f} | avg itr {itr_bes1.mean():.1f}")

# Print accuracy (average and standard deviation over folds)
Expand Down Expand Up @@ -469,7 +469,7 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Train template-matching classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event="duration", encoding_length=encoding_length,
onset_event=onset_event, score_metric="inner")
bayes = pyntbci.stopping.BayesStopping(rcca, segmenttime, fs, method="bes2", cr=cr, target_pf=target_pf,
target_pd=target_pd, max_time=trialtime)
Expand Down Expand Up @@ -505,7 +505,7 @@
ax[0].set_ylabel("accuracy")
ax[1].set_ylabel("duration [sec]")
ax[2].set_ylabel("itr [bits/min]")
ax[0].set_title(f"bes2 dynamic stopping: avg acc {accuracy_bes2.mean():.2f} | " +
ax[0].set_title(f"BES2 dynamic stopping: avg acc {accuracy_bes2.mean():.2f} | " +
f"avg dur {duration_bes2.mean():.2f} | avg itr {itr_bes2.mean():.1f}")

# Print accuracy (average and standard deviation over folds)
Expand Down Expand Up @@ -544,6 +544,6 @@
ax[1].set_ylabel("duration [sec]")
ax[2].set_ylabel("itr [bits/min]")
ax[1].legend(bbox_to_anchor=(1.0, 1.0))
ax[0].set_title("comparison of dynamic stopping methods averaged across folds")
ax[0].set_title("Comparison of dynamic stopping methods averaged across folds")

# plt.show()
2 changes: 1 addition & 1 deletion examples/ecca.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@
# Plot CCA filters
fig, ax = plt.subplots(figsize=(5, 3))
pyntbci.plotting.topoplot(ecca.w_, capfile, ax=ax)
ax.set_title("spatial filter")
ax.set_title("Spatial filter")

# %%
# Cross-validation
Expand Down
42 changes: 21 additions & 21 deletions examples/epoch_cca_lda.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,9 @@
# specifically, a target class-label (flash versus no-flash) for each epoch in each trial.

# Slice trials to epochs
epoch_size = int(0.3 * fs) # 300 ms
step_size = int(1 / 60 * fs) # 1/60 ms
X_sliced, y_sliced = pyntbci.utilities.trials_to_epochs(X, y, V, epoch_size, step_size)
encoding_length = int(0.3 * fs) # 300 ms
encoding_stride = int(1 / 60 * fs) # 1/60 ms
X_sliced, y_sliced = pyntbci.utilities.trials_to_epochs(X, y, V, encoding_length, encoding_stride)
print("X_sliced: shape:", X_sliced.shape, ", type:", X_sliced.dtype)
print("y_sliced: shape:", y_sliced.shape, ", type:", y_sliced.dtype)

Expand Down Expand Up @@ -173,9 +173,9 @@
# Visualize spatial response at a particular time-point
fig, ax = plt.subplots(1, 2, figsize=(15, 3))
pyntbci.plotting.topoplot(erp_flash[:, int(0.150 * fs)], capfile, ax=ax[0]) # 150 ms
ax[0].set_title("target ERP at 150 ms")
ax[0].set_title("Target ERP at 150 ms")
pyntbci.plotting.topoplot(erp_flash[:, int(0.175 * fs)], capfile, ax=ax[1]) # 175 ms
ax[1].set_title("target ERP at 175 ms")
ax[1].set_title("Target ERP at 175 ms")

# %%
# Epoch to trial decoding with LDA
Expand Down Expand Up @@ -206,16 +206,16 @@
n_samples = int(4.2 * fs)

# Set epoch size
epoch_size = int(0.3 * fs)
step_size = int(1 / 60 * fs)
encoding_length = int(0.3 * fs)
encoding_stride = int(1 / 60 * fs)

# Setup cross-validation
n_folds = 5
folds = np.repeat(np.arange(n_folds), n_trials / n_folds)

# Set up codebook for trial classification
n = int(np.ceil(n_samples / V.shape[1]))
_V = np.tile(V, (1, n)).astype("float32")[:, :n_samples - epoch_size:step_size]
_V = np.tile(V, (1, n)).astype("float32")[:, :n_samples - encoding_length:encoding_stride]

# Setup pipeline
pipeline = make_pipeline(
Expand All @@ -231,20 +231,20 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Slice trials to epochs
X_sliced_trn, y_sliced_trn = pyntbci.utilities.trials_to_epochs(X_trn, y_trn, V, epoch_size, step_size)
X_sliced_tst, y_sliced_tst = pyntbci.utilities.trials_to_epochs(X_tst, y_tst, V, epoch_size, step_size)
X_sliced_trn, y_sliced_trn = pyntbci.utilities.trials_to_epochs(X_trn, y_trn, V, encoding_length, encoding_stride)
X_sliced_tst, y_sliced_tst = pyntbci.utilities.trials_to_epochs(X_tst, y_tst, V, encoding_length, encoding_stride)

# Train pipeline (on epoch level)
pipeline.fit(X_sliced_trn.reshape((-1, n_channels, epoch_size)), y_sliced_trn.flatten())
pipeline.fit(X_sliced_trn.reshape((-1, n_channels, encoding_length)), y_sliced_trn.flatten())

# Apply pipeline (on epoch level)
yh_sliced_tst = pipeline.predict(X_sliced_tst.reshape((-1, n_channels, epoch_size)))
yh_sliced_tst = pipeline.predict(X_sliced_tst.reshape((-1, n_channels, encoding_length)))

# Compute accuracy (on epoch level)
accuracy_epoch[i_fold] = np.mean(yh_sliced_tst == y_sliced_tst.flatten())

# Apply pipeline (on trial level)
ph_tst = pipeline.predict_proba(X_sliced_tst.reshape((-1, n_channels, epoch_size)))[:, 1]
ph_tst = pipeline.predict_proba(X_sliced_tst.reshape((-1, n_channels, encoding_length)))[:, 1]
ph_tst = np.reshape(ph_tst, y_sliced_tst.shape)
rho = pyntbci.utilities.correlation(ph_tst, _V)
yh_tst = np.argmax(rho, axis=1)
Expand Down Expand Up @@ -302,16 +302,16 @@
n_samples = int(4.2 * fs)

# Set epoch size
epoch_size = int(0.3 * fs)
step_size = int(1 / 60 * fs)
encoding_length = int(0.3 * fs)
encoding_stride = int(1 / 60 * fs)

# Setup cross-validation
n_folds = 5
folds = np.repeat(np.arange(n_folds), n_trials / n_folds)

# Set up codebook for trial classification
n = int(np.ceil(n_samples / V.shape[1]))
_V = np.tile(V, (1, n)).astype("float32")[:, :n_samples - epoch_size:step_size]
_V = np.tile(V, (1, n)).astype("float32")[:, :n_samples - encoding_length:encoding_stride]

# Setup pipeline
cca = pyntbci.transformers.CCA(n_components=1)
Expand All @@ -327,17 +327,17 @@
X_tst, y_tst = X[folds == i_fold, :, :n_samples], y[folds == i_fold]

# Slice trials to epochs
X_sliced_trn, y_sliced_trn = pyntbci.utilities.trials_to_epochs(X_trn, y_trn, V, epoch_size, step_size)
X_sliced_tst, y_sliced_tst = pyntbci.utilities.trials_to_epochs(X_tst, y_tst, V, epoch_size, step_size)
X_sliced_trn, y_sliced_trn = pyntbci.utilities.trials_to_epochs(X_trn, y_trn, V, encoding_length, encoding_stride)
X_sliced_tst, y_sliced_tst = pyntbci.utilities.trials_to_epochs(X_tst, y_tst, V, encoding_length, encoding_stride)

# Train pipeline (on epoch level)
X_ = X_sliced_trn.reshape((-1, n_channels, epoch_size))
X_ = X_sliced_trn.reshape((-1, n_channels, encoding_length))
X_ = cca.fit_transform(X_, y_sliced_trn.flatten())[0]
X_ = vec.fit_transform(X_, y_sliced_trn.flatten())
lda.fit(X_, y_sliced_trn.flatten())

# Apply pipeline (on epoch level)
X_ = X_sliced_tst.reshape((-1, n_channels, epoch_size))
X_ = X_sliced_tst.reshape((-1, n_channels, encoding_length))
X_ = cca.transform(X_)[0]
X_ = vec.transform(X_)
yh_sliced_tst = lda.predict(X_)
Expand All @@ -346,7 +346,7 @@
accuracy_epoch[i_fold] = np.mean(yh_sliced_tst == y_sliced_tst.flatten())

# Apply pipeline (on trial level)
ph_tst = pipeline.predict_proba(X_sliced_tst.reshape((-1, n_channels, epoch_size)))[:, 1]
ph_tst = pipeline.predict_proba(X_sliced_tst.reshape((-1, n_channels, encoding_length)))[:, 1]
ph_tst = np.reshape(ph_tst, y_sliced_tst.shape)
rho = pyntbci.utilities.correlation(ph_tst, _V)
yh_tst = np.argmax(rho, axis=1)
Expand Down
4 changes: 2 additions & 2 deletions examples/etrca.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,11 +126,11 @@
ax = ax.flatten()
for i_class in range(n_classes):
pyntbci.plotting.topoplot(etrca.w_[:, 0, i_class], capfile, ax=ax[i_class])
ax[i_class].set_title(f"spatial filter class={i_class}")
ax[i_class].set_title(f"Spatial filter class={i_class}")
else:
fig, ax = plt.subplots(figsize=(5, 3))
pyntbci.plotting.topoplot(etrca.w_, capfile, ax=ax)
ax.set_title("spatial filter")
ax.set_title("Spatial filter")

# %%
# Cross-validation
Expand Down
6 changes: 3 additions & 3 deletions examples/fbrcca.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@

# Set rCCA (see pyntbci.classifiers.rCCA)
event = "duration" # event definition type
transient_size = 0.3 # length of a transient response in seconds
encoding_length = 0.3 # length of a transient response in seconds
onset_event = True

# Set folds for chronological cross-validation
Expand Down Expand Up @@ -92,7 +92,7 @@
X_tst, y_tst = X[folds == i_fold, :, :, :], y[folds == i_fold]

# Setup classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event=event, transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event=event, encoding_length=encoding_length,
onset_event=onset_event)
fbrcca = pyntbci.classifiers.FilterBank(estimator=rcca, gating="mean")

Expand All @@ -108,7 +108,7 @@
# Loop individual pass-bands
for i_band in range(n_bands):
# Setup classifier
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event=event, transient_size=transient_size,
rcca = pyntbci.classifiers.rCCA(stimulus=V, fs=fs, event=event, encoding_length=encoding_length,
onset_event=onset_event)

# Train classifier
Expand Down
Loading

0 comments on commit 68b1a4a

Please sign in to comment.