From cf3d9e2d87e6043666238967ce4a894d823cba97 Mon Sep 17 00:00:00 2001 From: Louis Capitanchik Date: Tue, 12 Apr 2016 01:09:26 +0100 Subject: [PATCH] Modified parse_runner to load specified files, and run lexer over all loaded files --- src/common/lexer/lexer.rs | 6 ++++-- src/parse/parse_runner.rs | 25 +++++++++++++++++++------ 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/src/common/lexer/lexer.rs b/src/common/lexer/lexer.rs index f3aee0a..a4c4618 100644 --- a/src/common/lexer/lexer.rs +++ b/src/common/lexer/lexer.rs @@ -59,11 +59,13 @@ impl Lexer { self.ignore_whitespace = ignore_whitespace; } - pub fn tokenise(&mut self, src: &String) -> Result, String> { + pub fn tokenise(&mut self, file: (&String, &String)) -> Result, String> { if self.is_verbose { println!("Tokenising Input"); println!(" :: Ignore Whitespace [{:?}]", self.ignore_whitespace); } + let filename = file.0; + let src = file.1; let mut tokens : Vec = Vec::new(); @@ -112,7 +114,7 @@ impl Lexer { let token = Token::new( rule.clone_ident(), t_content.into(), - "null".into(), + filename.clone(), util::get_line_col(&src, index) ); diff --git a/src/parse/parse_runner.rs b/src/parse/parse_runner.rs index 42a9cb6..912a845 100644 --- a/src/parse/parse_runner.rs +++ b/src/parse/parse_runner.rs @@ -1,11 +1,9 @@ -use common::util::{ConfigurableProgram, ProgramFragment}; - +use common::util::{ConfigurableProgram, ProgramFragment, load_as_dir_or_file}; use common::module::Module; -use common::lexer::Lexer; +use common::lexer::{Lexer, Token}; use std::option::Option; - use std::path::Path; pub struct ParseBuilder{ @@ -96,11 +94,26 @@ impl ProgramFragment for ParseRunner { module_opts.meta.author, module_opts.meta.license, module_opts.meta.version); } + let input_files_res = load_as_dir_or_file(Path::new(&self.input_path)); + let input_files_vec : Vec<(String, String)> = match input_files_res { + Err(e) => return Err(format!("{}", e)), + Ok(val) => val + }; + + let input_files : Vec<(&String, &String)> = input_files_vec + .iter() + .map(|file| (&file.0, &file.1)) + .collect(); + let mut lex = Lexer::from_dir(module_conf.sub_dir("lex"), self.is_verbose); lex.set_ignore_whitespace(module_opts.options.strip_whitespace.unwrap_or(false)); - let s : String = "RIGHTLY data #\nVERILY \"hello\" + data #".into(); + // let s : String = "RIGHTLY data #\nVERILY \"hello\" + data #".into(); + + let mut tokens : Vec<(&String, Vec)> = Vec::new(); - let tokens = try!(lex.tokenise(&s)); + for file in input_files { + tokens.push((file.0, try!(lex.tokenise(file)))); + } Result::Ok(()) }