-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Training Pipeline Implementation
- Loading branch information
Showing
14 changed files
with
540 additions
and
62 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,40 @@ | ||
#[allow(dead_code)] | ||
struct Dataset { | ||
inputs: Vec<Vec<usize>>, // Each input is a sequence of token IDs | ||
targets: Vec<Vec<usize>>, // Each target is the corresponding output sequence | ||
use crate::data::generation::{generate_input_target_pairs, generate_staircase_pairs}; | ||
use crate::data::io::get_input; | ||
use crate::data::tokenizer::Tokenizer; | ||
|
||
pub struct Dataset { | ||
pub(crate) inputs: Vec<Vec<usize>>, // Each input is a sequence of token IDs | ||
pub(crate) targets: Vec<Vec<usize>>, // Each target is the corresponding output sequence | ||
} | ||
|
||
pub fn gen_data() -> (Tokenizer, Dataset) { | ||
let raw_text = get_input(); | ||
|
||
let tokenizer = Tokenizer::new(raw_text.clone()); | ||
|
||
// Generate input-target pairs | ||
let pairs = generate_input_target_pairs(&tokenizer, raw_text); | ||
|
||
let mut all_inputs = Vec::new(); | ||
let mut all_targets = Vec::new(); | ||
|
||
// For each input-target pair, generate staircase pairs and add to the dataset | ||
for (input, target) in pairs { | ||
let staircase_pairs = generate_staircase_pairs(&input, &target); | ||
|
||
// Add the staircase pairs to the dataset | ||
for (staircase_input, staircase_target) in staircase_pairs { | ||
all_inputs.push(staircase_input); | ||
all_targets.push(staircase_target); | ||
} | ||
} | ||
|
||
// Return tokenizer and the generated dataset | ||
( | ||
tokenizer, | ||
Dataset { | ||
inputs: all_inputs, | ||
targets: all_targets, | ||
}, | ||
) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.