Skip to content

Commit

Permalink
Modified parse_runner to load specified files, and run lexer over all…
Browse files Browse the repository at this point in the history
… loaded files
  • Loading branch information
Commander-lol committed Apr 12, 2016
1 parent eb8ed3b commit cf3d9e2
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 8 deletions.
6 changes: 4 additions & 2 deletions src/common/lexer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,13 @@ impl Lexer {
self.ignore_whitespace = ignore_whitespace;
}

pub fn tokenise(&mut self, src: &String) -> Result<Vec<Token>, String> {
pub fn tokenise(&mut self, file: (&String, &String)) -> Result<Vec<Token>, String> {
if self.is_verbose {
println!("Tokenising Input");
println!(" :: Ignore Whitespace [{:?}]", self.ignore_whitespace);
}
let filename = file.0;
let src = file.1;

let mut tokens : Vec<Token> = Vec::new();

Expand Down Expand Up @@ -112,7 +114,7 @@ impl Lexer {
let token = Token::new(
rule.clone_ident(),
t_content.into(),
"null".into(),
filename.clone(),
util::get_line_col(&src, index)
);

Expand Down
25 changes: 19 additions & 6 deletions src/parse/parse_runner.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
use common::util::{ConfigurableProgram, ProgramFragment};

use common::util::{ConfigurableProgram, ProgramFragment, load_as_dir_or_file};
use common::module::Module;

use common::lexer::Lexer;
use common::lexer::{Lexer, Token};

use std::option::Option;

use std::path::Path;

pub struct ParseBuilder{
Expand Down Expand Up @@ -96,11 +94,26 @@ impl ProgramFragment for ParseRunner {
module_opts.meta.author, module_opts.meta.license, module_opts.meta.version);
}

let input_files_res = load_as_dir_or_file(Path::new(&self.input_path));
let input_files_vec : Vec<(String, String)> = match input_files_res {
Err(e) => return Err(format!("{}", e)),
Ok(val) => val
};

let input_files : Vec<(&String, &String)> = input_files_vec
.iter()
.map(|file| (&file.0, &file.1))
.collect();

let mut lex = Lexer::from_dir(module_conf.sub_dir("lex"), self.is_verbose);
lex.set_ignore_whitespace(module_opts.options.strip_whitespace.unwrap_or(false));
let s : String = "RIGHTLY data #\nVERILY \"hello\" + data #".into();
// let s : String = "RIGHTLY data #\nVERILY \"hello\" + data #".into();

let mut tokens : Vec<(&String, Vec<Token>)> = Vec::new();

let tokens = try!(lex.tokenise(&s));
for file in input_files {
tokens.push((file.0, try!(lex.tokenise(file))));
}

Result::Ok(())
}
Expand Down

0 comments on commit cf3d9e2

Please sign in to comment.