Skip to content

Commit

Permalink
Allow interpreting raw code from command line
Browse files Browse the repository at this point in the history
  • Loading branch information
RocketRace committed Apr 21, 2020
1 parent 5e03850 commit 79f9ff0
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 18 deletions.
39 changes: 23 additions & 16 deletions src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,21 +31,28 @@ enum State {
///
/// * `HashMap<String, usize>` - A mapping between identifiers (e.g. "baba")
/// and their corresponding IDs.
pub fn tokenize(path: &str) -> (Vec<Token>, HashMap<usize, String>) {
let mut file = match File::open(path) {
Ok(f) => f,
Err(_) => {
throw_error(
ErrorType::FileError,
format!("Could not open file at `{}`", path),
None
);
panic!() // necessary for match arms to match
}
};

pub fn tokenize(path: Option<String>, source: Option<&mut Vec<u8>>) -> (Vec<Token>, HashMap<usize, String>) {
let mut buffer = Vec::new();
file.read_to_end(&mut buffer).unwrap();

if let Some(p) = path {
let mut file = match File::open(&p) {
Ok(f) => f,
Err(_) => {
throw_error(
ErrorType::FileError,
format!("Could not open file at `{}`", p),
None
);
panic!() // necessary for match arms to match
}
};
file.read_to_end(&mut buffer).unwrap();
}
else {
if let Some(bytes) = source {
buffer.append(bytes);
}
}

let mut out: Vec<Token> = Vec::new();
let mut identifiers: HashMap<usize, String> = HashMap::new();
Expand Down Expand Up @@ -153,8 +160,8 @@ mod tests {

#[test]
fn tokenize_alnum() {
let path = "tests/tokenize_alnum.baba";
let (tokens, _identifiers) = tokenize(path);
let path = String::from("tests/tokenize_alnum.baba");
let (tokens, _identifiers) = tokenize(Some(path), None);

assert_eq!(
tokens,
Expand Down
21 changes: 19 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,18 @@ use std::env;
/// Babalang interpreter
fn main() -> std::io::Result<()> {
// Get path of source file
let mut raw_content = None;
let file_path = match env::args().skip(1).next() {
Some(x) => x,
Some(x) => {
let option = String::from("-c");
if x == option {
raw_content = env::args().skip(2).next();
None
}
else {
Some(x)
}
}
None => {
error_handler::throw_error_str(
error_handler::ErrorType::FileError,
Expand All @@ -24,10 +34,17 @@ fn main() -> std::io::Result<()> {
}
};

let (tokens, identifiers) = if let Some(content) = raw_content {
let mut raw_bytes = content.bytes().collect::<Vec<u8>>();
lexer::tokenize(None, Some(&mut raw_bytes))
}
else {
lexer::tokenize(file_path, None)
};
// Tokenize the source file and return a vector of tokens
let (tokens, identifiers) = lexer::tokenize(&file_path);
// println!("Successfully tokenized program at `{}`", file_path);


// A vector of Statements (e.g. BABA IS YOU)
let statements = statement_parser::parse(&tokens, &identifiers);
// println!("Successfully parsed program into statements");
Expand Down

0 comments on commit 79f9ff0

Please sign in to comment.