aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/lexer.c10
-rw-r--r--src/parser.c6
-rw-r--r--src/types.c24
3 files changed, 19 insertions, 21 deletions
diff --git a/src/lexer.c b/src/lexer.c
index f02ef94..83bd976 100644
--- a/src/lexer.c
+++ b/src/lexer.c
@@ -132,19 +132,19 @@ Token Lexer_next(Lexer *lexer)
case ',': return Lexer_make_token(lexer, TOKEN_COMMA, 1);
case '*': return Lexer_make_token(lexer, TOKEN_STAR, 1);
case '%': return Lexer_make_token(lexer, TOKEN_PERCENT, 1);
-
+
case '&': {
if (peek(lexer, 1) == '&')
return Lexer_make_token(lexer, TOKEN_AND, 2);
return Lexer_make_token(lexer, TOKEN_AMPERSAND, 1);
}
-
+
case '!': {
if (peek(lexer, 1) == '=')
return Lexer_make_token(lexer, TOKEN_NEQ, 2);
return Lexer_make_token(lexer, TOKEN_EXCLAMATION, 1);
}
-
+
case '<': {
if (peek(lexer, 1) == '=')
return Lexer_make_token(lexer, TOKEN_LEQ, 2);
@@ -185,7 +185,7 @@ Token Lexer_next(Lexer *lexer)
return Lexer_make_token(lexer, TOKEN_MINUSEQUALS, 2);
return Lexer_make_token(lexer, TOKEN_MINUS, 1);
}
-
+
case '/': {
if (peek(lexer, 1) == '/') {
lexer->pos += 2; // skip the '//'
@@ -206,7 +206,7 @@ Token Lexer_next(Lexer *lexer)
if (isdigit(lexer->src[lexer->pos])) {
// TODO: Parse hex and octal numbers
i64 pos = lexer->pos;
- while (pos < lexer->len && isdigit(lexer->src[pos]))
+ while (pos < lexer->len && isdigit(lexer->src[pos]))
pos++;
Token token = Token_from_int(atoi(lexer->src + lexer->pos), Lexer_loc(lexer));
advance(lexer, pos - lexer->pos);
diff --git a/src/parser.c b/src/parser.c
index 30aa256..30d080b 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -174,7 +174,7 @@ bool identifier_exists(Token *token) {
void push_constant(Node *node) {
assert(constants_count < TOTAL_CONSTANTS_SIZE);
all_constants[constants_count++] = node;
-}
+}
void add_global_variable(Variable *var)
{
@@ -297,7 +297,7 @@ i64 eval_constexp(Node *expr)
case OP_MOD: return eval_constexp(expr->binary.left) % eval_constexp(expr->binary.right);
case OP_NEG: return -eval_constexp(expr->unary_expr);
case OP_NOT: return !eval_constexp(expr->unary_expr);
-
+
default:
die("Unsupported constant expression type %s\n", node_type_to_str(expr->type));
}
@@ -918,12 +918,10 @@ Type *parse_struct_union_declaration(Lexer *lexer, bool is_global) {
// But if they do provide one, we'll add it to the list of defined structs so they
// it can referenced internally.
- bool has_name = false;
if (token.type == TOKEN_IDENTIFIER) {
struct_type->struct_name = token.value.as_string;
push_struct_definition(struct_type);
Lexer_next(lexer);
- has_name = true;
}
assert_token(Lexer_next(lexer), TOKEN_OPEN_BRACE);
diff --git a/src/types.c b/src/types.c
index 008af10..d004634 100644
--- a/src/types.c
+++ b/src/types.c
@@ -54,7 +54,7 @@ Type *type_new(DataType type)
if (type == TYPE_INT) return &type_int;
if (type == TYPE_CHAR) return &type_char;
if (type == TYPE_ANY) return &type_any;
-
+
Type *self = calloc(sizeof(Type), 1);
self->type = type;
return self;
@@ -70,9 +70,9 @@ Type *type_new_ptr(DataType type)
bool is_string_type(Type *type)
{
- return type
- && type->type == TYPE_PTR
- && type->ptr->type == TYPE_CHAR;
+ return type
+ && type->type == TYPE_PTR
+ && type->ptr->type == TYPE_CHAR;
}
bool is_int_type(Type *type)
@@ -115,15 +115,15 @@ static char *data_type_to_str(Type *type)
char *type_to_str(Type *type)
{
// FIXME: Handle array types.
-
+
// TODO: This allocates memory and we probably don't want to do that.
// TODO: Probably want to increase this size once we have longer types
char *str = calloc(sizeof(char), 32);
int ptr_count = 0;
for (; type->type == TYPE_PTR; type = type->ptr)
ptr_count++;
-
-
+
+
// FIXME: This is inefficient as all hell but this will only really be
// used for error reporting.
strcat(str, data_type_to_str(type));
@@ -140,11 +140,11 @@ i64 push_field(Type *type, char *field_name, Type *field_type)
type->fields.type = realloc(type->fields.type, sizeof(Type *) * (type->fields.num_fields + 1));
type->fields.offset = realloc(type->fields.offset, sizeof(i64) * (type->fields.num_fields + 1));
type->fields.name = realloc(type->fields.name, sizeof(char *) * (type->fields.num_fields + 1));
-
+
i64 field_size = size_for_type(field_type);
i64 offset_factor = i64min(field_size, 8);
i64 offset = is_union ? 0 : align_up(type->fields.size, offset_factor);
-
+
type->fields.type[type->fields.num_fields] = field_type;
type->fields.offset[type->fields.num_fields] = offset;
type->fields.name[type->fields.num_fields] = field_name;
@@ -168,7 +168,7 @@ i64 find_field_index(Type *type, char *field_name)
Node *handle_unary_expr_types(Node *node, Token *token)
{
Type *old_type = node->unary_expr->expr_type;
-
+
if (node->type != OP_ADDROF && old_type->type == TYPE_STRUCT)
die_location(token->loc, "Performing invalid unary operation on struct type");
@@ -205,8 +205,8 @@ Node *handle_binary_expr_types(Node *node, Token *token)
if (left->type == TYPE_STRUCT || right->type == TYPE_STRUCT)
die_location(token->loc, "Performing invalid binary operation on struct type");
-
- switch (node->type)
+
+ switch (node->type)
{
case OP_PLUS: {
if (is_int_type(left) && is_int_type(right)) {