diff options
| author | Mustafa Quraish <[email protected]> | 2022-02-06 21:59:21 -0500 |
|---|---|---|
| committer | Mustafa Quraish <[email protected]> | 2022-02-07 03:18:08 -0500 |
| commit | 198e4a9983e13f02261b6a34afc536e22e1ce410 (patch) | |
| tree | 477f0a7537452ec380511b129c1ea7c00e3da281 /compiler | |
| parent | [C]: Add `here` keyword that evaluates to a string with it's location (diff) | |
| download | cup-198e4a9983e13f02261b6a34afc536e22e1ce410.tar.xz cup-198e4a9983e13f02261b6a34afc536e22e1ce410.zip | |
[cup] Add ability to import files
Diffstat (limited to 'compiler')
| -rw-r--r-- | compiler/lexer.cup | 13 | ||||
| -rw-r--r-- | compiler/main.cup | 11 | ||||
| -rw-r--r-- | compiler/parser.cup | 67 |
3 files changed, 76 insertions, 15 deletions
diff --git a/compiler/lexer.cup b/compiler/lexer.cup index a7fd032..1bcbb35 100644 --- a/compiler/lexer.cup +++ b/compiler/lexer.cup @@ -16,6 +16,19 @@ fn lexer_new(filename: char*, src: char*, len: int): Lexer* { lexer.src = src; lexer.len = len; return lexer; +} + +fn lexer_new_open_file(filename: char*): Lexer* { + let input_file = fopen(filename, 'r'); + defer fclose(input_file); + + // using `fmap` here doesn't work on linux, for some reason. + let file_size = fsize(input_file); + let src: char* = malloc(file_size+1); + fread(input_file, src, file_size); + src[file_size] = '\0'; + + return lexer_new(filename, src, file_size); } fn lexer_loc(lexer: Lexer*, loc: Location*) { diff --git a/compiler/main.cup b/compiler/main.cup index 6d7c3d8..fa7db31 100644 --- a/compiler/main.cup +++ b/compiler/main.cup @@ -9,16 +9,7 @@ fn main(argc: int, argv: char **): int { if (argc != 2) die("Usage: cupcc <input_file>"); - let input_file = fopen(argv[1], 'r'); - defer fclose(input_file); - - // using `fmap` here doesn't work on linux, for some reason. - let file_size = fsize(input_file); - let src: char* = malloc(file_size+1); - fread(input_file, src, file_size); - src[file_size] = '\0'; - - let lexer = lexer_new(argv[1], src, file_size); + let lexer = lexer_new_open_file(argv[1]); let ast = parse_program(lexer); dump_ast(ast, 0); diff --git a/compiler/parser.cup b/compiler/parser.cup index 208f49c..a6a4f7b 100644 --- a/compiler/parser.cup +++ b/compiler/parser.cup @@ -136,8 +136,32 @@ fn eval_constexp(node: Node*): int { return 0; } +fn parse_expression(lexer: Lexer*): Node*; + fn parse_constant_declaration(lexer: Lexer*): Node* { - die("parse_constant_declaration is not implemented yet."); + let token: Token; + lexer_next_assert(lexer, &token, TOKEN_CONST); + + lexer_next_assert(lexer, &token, TOKEN_IDENTIFIER); + if (identifier_exists(&token)) + die_loc2(here, &token.loc, "Identifier already exists: ", token.value.as_string); + let constant_name = token.value.as_string; + + lexer_peek(lexer, &token); + // All constants are implicitly `int`, but we'll allow it for consistency + if (token.typ == TOKEN_COLON) { + lexer_next(lexer, &token); + if (token.typ != TOKEN_INT) + die_loc(here, &token.loc, "Expected 'int' type for constant"); + lexer_peek(lexer, &token); + } + + lexer_next_assert(lexer, &token, TOKEN_ASSIGN); + let expr = parse_expression(lexer); + let value = eval_constexp(expr); + constant_push(constant_name, value); + + lexer_next_assert(lexer, &token, TOKEN_SEMICOLON); } fn parse_literal(lexer: Lexer*): Node* { @@ -216,8 +240,6 @@ fn parse_type(lexer: Lexer*): Type* { return typ; } -fn parse_expression(lexer: Lexer*): Node*; - fn parse_function_call_args(lexer: Lexer*, func: Node*): Node* { let token: Token; @@ -609,8 +631,11 @@ fn parse_var_declaration(lexer: Lexer*): Node* { if (token.typ == TOKEN_ASSIGN) { lexer_next(lexer, &token); decl.init = parse_expression(lexer); - } else if (!has_type) { - die_loc(&token.loc, "Expected ':' or '=' after variable declaration"); + } + + // FIXME: This will need to be enabled at some point + if (false) { + die_loc(here, &token.loc, "Expected ':' or '=' after variable declaration"); } if (is_global) { @@ -822,6 +847,26 @@ fn parse_function(lexer: Lexer*): Node* { return node; } +let p_opened_files = vector_new(); + +fn parser_open_new_file(path: char*) { + for (let i = 0; i < p_lexer_stack.size; i = i + 1) { + let lex: Lexer* = p_lexer_stack.data[i]; + if (streq(lex.filename, path)) { + puts("Found a circular import dependency in: "); puts(path); putsln(": Exiting."); + exit(1); + } + } + for (let i = 0; i < p_opened_files.size; i = i + 1) { + if (streq(p_opened_files.data[i], path)) { + puts("Already opened file: "); puts(path); putsln(": Ignoring."); + return; + } + } + vector_push(p_opened_files, path); + vector_push(p_lexer_stack, lexer_new_open_file(path)); +} + fn parse_program(lexer: Lexer*): Node* { initialize_builtins(); @@ -830,6 +875,7 @@ fn parse_program(lexer: Lexer*): Node* { let token: Token; lexer_peek(lexer, &token); + vector_push(p_lexer_stack, lexer); while (token.typ != TOKEN_EOF) { if (token.typ == TOKEN_FN) { @@ -840,11 +886,22 @@ fn parse_program(lexer: Lexer*): Node* { lexer_next(lexer, &token); } else if (token.typ == TOKEN_CONST) { parse_constant_declaration(lexer); + } else if (token.typ == TOKEN_IMPORT) { + lexer_next(lexer, &token); + lexer_next_assert(lexer, &token, TOKEN_STRINGLIT); + let path = token.value.as_string; + parser_open_new_file(path); + lexer = vector_top(p_lexer_stack); } else { die_loc2(here, &token.loc, "unexpected token in parse_program", token_type_to_string(token.typ)); } lexer_peek(lexer, &token); + while (token.typ == TOKEN_EOF && p_lexer_stack.size > 1) { + vector_pop(p_lexer_stack); + lexer = vector_top(p_lexer_stack); + lexer_peek(lexer, &token); + } } return node; }
\ No newline at end of file |