From 39fdea5d952c1bf33d6402f4639c37c58ff12ed5 Mon Sep 17 00:00:00 2001 From: Jakub Date: Thu, 19 Dec 2024 18:38:31 +0100 Subject: [PATCH] removed debugging prints --- src/layers/normalization.rs | 10 +++++----- src/model/encoder.rs | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/layers/normalization.rs b/src/layers/normalization.rs index 12e026f..48ff1dc 100644 --- a/src/layers/normalization.rs +++ b/src/layers/normalization.rs @@ -19,18 +19,18 @@ pub fn layer_norm( // Step 1: Calculate mean and variance across the features (axis=1) let mean = x.mean_axis(Axis(1)).unwrap(); let variance = x.var_axis(Axis(1), 0.0); - println!("Mean: {:?}", mean); - println!("Variance: {:?}", variance); + //println!("Mean: {:?}", mean); + // println!("Variance: {:?}", variance); let expanded_mean = mean.insert_axis(Axis(1)); // Expands [6] to [6, 1] let expanded_variance = variance.insert_axis(Axis(1)); // Expands [6] to [6, 1] - println!("EXPMean: {:?}", expanded_mean); - println!("EXPVariance: {:?}", expanded_variance); + // println!("EXPMean: {:?}", expanded_mean); + //println!("EXPVariance: {:?}", expanded_variance); // Add epsilon to expanded variance let normalized = (x - &expanded_mean) / (expanded_variance + epsilon).mapv(f32::sqrt); - println!("Normalized {}", normalized); + // println!("Normalized {}", normalized); // Step 2: Normalize the input //let normalized = (x - &reshaped_mean) / (reshaped_variance + epsilon).mapv(f32::sqrt); diff --git a/src/model/encoder.rs b/src/model/encoder.rs index 9789089..c8ed88f 100644 --- a/src/model/encoder.rs +++ b/src/model/encoder.rs @@ -41,7 +41,7 @@ pub fn encoding( dummy_learned_matrices.clone(), // W_O ); - println!("Attention1 :{}", attention_output); + //println!("Attention1 :{}", attention_output); // Add & Normalize (Residual Connection + Layer Norm) let attention_residual = attention_output.add(&input); // Residual connection let reshaped_attention = attention_residual @@ -60,7 +60,7 @@ pub fn encoding( // Feed-Forward Network let feed_forward_output = feed_forward_layer.forward(attention_norm.clone()); - println!("feed_forward_output :{:?}", feed_forward_output); + //println!("feed_forward_output :{:?}", feed_forward_output); // Add & Normalize (Residual Connection + Layer Norm) let feed_forward_residual = feed_forward_output.add(&attention_norm); // Residual connection let reshaped_ff_attention = feed_forward_residual