aboutsummaryrefslogtreecommitdiff
path: root/src/comp/front/parser.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/comp/front/parser.rs')
-rw-r--r--src/comp/front/parser.rs1052
1 files changed, 738 insertions, 314 deletions
diff --git a/src/comp/front/parser.rs b/src/comp/front/parser.rs
index e629683c..bb7b8558 100644
--- a/src/comp/front/parser.rs
+++ b/src/comp/front/parser.rs
@@ -1,4 +1,4 @@
-import std._io;
+import std.io;
import std._vec;
import std._str;
import std.option;
@@ -8,6 +8,7 @@ import std.map.hashmap;
import driver.session;
import util.common;
+import util.common.filename;
import util.common.append;
import util.common.span;
import util.common.new_str_hash;
@@ -17,6 +18,11 @@ tag restriction {
RESTRICT_NO_CALL_EXPRS;
}
+tag file_type {
+ CRATE_FILE;
+ SOURCE_FILE;
+}
+
state type parser =
state obj {
fn peek() -> token.token;
@@ -24,28 +30,37 @@ state type parser =
impure fn err(str s);
impure fn restrict(restriction r);
fn get_restriction() -> restriction;
+ fn get_file_type() -> file_type;
+ fn get_env() -> eval.env;
fn get_session() -> session.session;
fn get_span() -> common.span;
fn next_def_id() -> ast.def_id;
+ fn get_prec_table() -> vec[op_spec];
};
impure fn new_parser(session.session sess,
- ast.crate_num crate, str path) -> parser {
+ eval.env env,
+ ast.crate_num crate,
+ str path) -> parser {
state obj stdio_parser(session.session sess,
+ eval.env env,
+ file_type ftype,
mutable token.token tok,
mutable common.pos lo,
mutable common.pos hi,
mutable ast.def_num def,
mutable restriction res,
ast.crate_num crate,
- lexer.reader rdr)
+ lexer.reader rdr,
+ vec[op_spec] precs)
{
fn peek() -> token.token {
- // log token.to_str(tok);
ret tok;
}
impure fn bump() {
+ // log rdr.get_filename()
+ // + ":" + common.istr(lo.line as int);
tok = lexer.next_token(rdr);
lo = rdr.get_mark_pos();
hi = rdr.get_curr_pos();
@@ -78,12 +93,29 @@ impure fn new_parser(session.session sess,
def += 1;
ret tup(crate, def);
}
+
+ fn get_file_type() -> file_type {
+ ret ftype;
+ }
+
+ fn get_env() -> eval.env {
+ ret env;
+ }
+
+ fn get_prec_table() -> vec[op_spec] {
+ ret precs;
+ }
}
- auto srdr = _io.new_stdio_reader(path);
+ auto ftype = SOURCE_FILE;
+ if (_str.ends_with(path, ".rc")) {
+ ftype = CRATE_FILE;
+ }
+ auto srdr = io.new_stdio_reader(path);
auto rdr = lexer.new_reader(srdr, path);
auto npos = rdr.get_curr_pos();
- ret stdio_parser(sess, lexer.next_token(rdr),
- npos, npos, 0, UNRESTRICTED, crate, rdr);
+ ret stdio_parser(sess, env, ftype, lexer.next_token(rdr),
+ npos, npos, 0, UNRESTRICTED, crate, rdr,
+ prec_table());
}
impure fn unexpected(parser p, token.token t) {
@@ -121,9 +153,23 @@ impure fn parse_ident(parser p) -> ast.ident {
}
-impure fn parse_str_lit(parser p) -> ast.ident {
+/* FIXME: gross hack copied from rustboot to make certain configuration-based
+ * decisions work at build-time. We should probably change it to use a
+ * lexical sytnax-extension or something similar. For now we just imitate
+ * rustboot.
+ */
+impure fn parse_str_lit_or_env_ident(parser p) -> ast.ident {
alt (p.peek()) {
case (token.LIT_STR(?s)) { p.bump(); ret s; }
+ case (token.IDENT(?i)) {
+ auto v = eval.lookup(p.get_session(), p.get_env(),
+ p.get_span(), i);
+ if (!eval.val_is_str(v)) {
+ p.err("expecting string-valued variable");
+ }
+ p.bump();
+ ret eval.val_as_str(v);
+ }
case (_) {
p.err("expecting string literal");
fail;
@@ -132,7 +178,8 @@ impure fn parse_str_lit(parser p) -> ast.ident {
}
-impure fn parse_ty_fn(parser p, ast.span lo) -> ast.ty_ {
+impure fn parse_ty_fn(ast.proto proto, parser p,
+ ast.span lo) -> ast.ty_ {
impure fn parse_fn_input_ty(parser p) -> rec(ast.mode mode, @ast.ty ty) {
auto mode;
if (p.peek() == token.BINOP(token.AND)) {
@@ -158,6 +205,10 @@ impure fn parse_ty_fn(parser p, ast.span lo) -> ast.ty_ {
auto inputs = parse_seq[rec(ast.mode mode, @ast.ty ty)](token.LPAREN,
token.RPAREN, some(token.COMMA), f, p);
+ // FIXME: dropping constrs on the floor at the moment.
+ // pick them up when they're used by typestate pass.
+ parse_constrs(p);
+
let @ast.ty output;
if (p.peek() == token.RARROW) {
p.bump();
@@ -166,20 +217,33 @@ impure fn parse_ty_fn(parser p, ast.span lo) -> ast.ty_ {
output = @spanned(lo, inputs.span, ast.ty_nil);
}
- ret ast.ty_fn(inputs.node, output);
+ ret ast.ty_fn(proto, inputs.node, output);
+}
+
+impure fn parse_proto(parser p) -> ast.proto {
+ alt (p.peek()) {
+ case (token.ITER) { p.bump(); ret ast.proto_iter; }
+ case (token.FN) { p.bump(); ret ast.proto_fn; }
+ case (?t) { unexpected(p, t); }
+ }
+ fail;
}
impure fn parse_ty_obj(parser p, &mutable ast.span hi) -> ast.ty_ {
expect(p, token.OBJ);
impure fn parse_method_sig(parser p) -> ast.ty_method {
auto flo = p.get_span();
- expect(p, token.FN);
+
+ // FIXME: do something with this, currently it's dropped on the floor.
+ let ast.effect eff = parse_effect(p);
+ let ast.proto proto = parse_proto(p);
auto ident = parse_ident(p);
- auto f = parse_ty_fn(p, flo);
+ auto f = parse_ty_fn(proto, p, flo);
expect(p, token.SEMI);
alt (f) {
- case (ast.ty_fn(?inputs, ?output)) {
- ret rec(ident=ident, inputs=inputs, output=output);
+ case (ast.ty_fn(?proto, ?inputs, ?output)) {
+ ret rec(proto=proto, ident=ident,
+ inputs=inputs, output=output);
}
}
fail;
@@ -200,10 +264,72 @@ impure fn parse_ty_field(parser p) -> ast.ty_field {
ret rec(ident=id, ty=ty);
}
+impure fn parse_constr_arg(parser p) -> @ast.constr_arg {
+ auto lo = p.get_span();
+ auto carg = ast.carg_base;
+ if (p.peek() == token.BINOP(token.STAR)) {
+ p.bump();
+ } else {
+ carg = ast.carg_ident(parse_ident(p));
+ }
+ ret @spanned(lo, lo, carg);
+}
+
+impure fn parse_ty_constr(parser p) -> @ast.constr {
+ auto lo = p.get_span();
+ auto path = parse_path(p, GREEDY);
+ auto pf = parse_constr_arg;
+ auto args = parse_seq[@ast.constr_arg](token.LPAREN,
+ token.RPAREN,
+ some(token.COMMA), pf, p);
+ auto hi = args.span;
+ ret @spanned(lo, hi, rec(path=path, args=args.node));
+}
+
+impure fn parse_constrs(parser p) -> common.spanned[vec[@ast.constr]] {
+ auto lo = p.get_span();
+ auto hi = lo;
+ let vec[@ast.constr] constrs = vec();
+ if (p.peek() == token.COLON) {
+ p.bump();
+ let bool more = true;
+ while (more) {
+ alt (p.peek()) {
+ case (token.IDENT(_)) {
+ auto constr = parse_ty_constr(p);
+ hi = constr.span;
+ append[@ast.constr](constrs, constr);
+ if (p.peek() == token.COMMA) {
+ p.bump();
+ more = false;
+ }
+ }
+ case (_) { more = false; }
+ }
+ }
+ }
+ ret spanned(lo, hi, constrs);
+}
+
+impure fn parse_ty_constrs(@ast.ty t, parser p) -> @ast.ty {
+ if (p.peek() == token.COLON) {
+ auto constrs = parse_constrs(p);
+ ret @spanned(t.span, constrs.span,
+ ast.ty_constr(t, constrs.node));
+ }
+ ret t;
+}
+
impure fn parse_ty(parser p) -> @ast.ty {
auto lo = p.get_span();
auto hi = lo;
let ast.ty_ t;
+
+ // FIXME: do something with these; currently they're
+ // dropped on the floor.
+ let ast.effect eff = parse_effect(p);
+ let ast.layer lyr = parse_layer(p);
+
alt (p.peek()) {
case (token.BOOL) { p.bump(); t = ast.ty_bool; }
case (token.INT) { p.bump(); t = ast.ty_int; }
@@ -275,9 +401,20 @@ impure fn parse_ty(parser p) -> @ast.ty {
case (token.FN) {
auto flo = p.get_span();
p.bump();
- t = parse_ty_fn(p, flo);
+ t = parse_ty_fn(ast.proto_fn, p, flo);
+ alt (t) {
+ case (ast.ty_fn(_, _, ?out)) {
+ hi = out.span;
+ }
+ }
+ }
+
+ case (token.ITER) {
+ auto flo = p.get_span();
+ p.bump();
+ t = parse_ty_fn(ast.proto_iter, p, flo);
alt (t) {
- case (ast.ty_fn(_, ?out)) {
+ case (ast.ty_fn(_, _, ?out)) {
hi = out.span;
}
}
@@ -297,7 +434,8 @@ impure fn parse_ty(parser p) -> @ast.ty {
fail;
}
}
- ret @spanned(lo, hi, t);
+
+ ret parse_ty_constrs(@spanned(lo, hi, t), p);
}
impure fn parse_arg(parser p) -> ast.arg {
@@ -341,9 +479,9 @@ impure fn parse_seq[T](token.token bra,
ret spanned(lo, hi, v);
}
-impure fn parse_lit(parser p) -> option.t[ast.lit] {
+impure fn parse_lit(parser p) -> ast.lit {
auto lo = p.get_span();
- let ast.lit_ lit;
+ let ast.lit_ lit = ast.lit_nil;
alt (p.peek()) {
case (token.LIT_INT(?i)) {
p.bump();
@@ -369,12 +507,11 @@ impure fn parse_lit(parser p) -> option.t[ast.lit] {
p.bump();
lit = ast.lit_str(s);
}
- case (_) {
- lit = ast.lit_nil; // FIXME: typestate bug requires this
- ret none[ast.lit];
+ case (?t) {
+ unexpected(p, t);
}
}
- ret some(spanned(lo, lo, lit));
+ ret spanned(lo, lo, lit);
}
fn is_ident(token.token t) -> bool {
@@ -520,14 +657,37 @@ impure fn parse_bottom_expr(parser p) -> @ast.expr {
case (token.REC) {
p.bump();
- auto pf = parse_field;
- auto fs =
- parse_seq[ast.field](token.LPAREN,
- token.RPAREN,
- some(token.COMMA),
- pf, p);
- hi = fs.span;
- ex = ast.expr_rec(fs.node, ast.ann_none);
+ expect(p, token.LPAREN);
+ auto fields = vec(parse_field(p));
+
+ auto more = true;
+ auto base = none[@ast.expr];
+ while (more) {
+ alt (p.peek()) {
+ case (token.RPAREN) {
+ hi = p.get_span();
+ p.bump();
+ more = false;
+ }
+ case (token.WITH) {
+ p.bump();
+ base = some[@ast.expr](parse_expr(p));
+ hi = p.get_span();
+ expect(p, token.RPAREN);
+ more = false;
+ }
+ case (token.COMMA) {
+ p.bump();
+ fields += parse_field(p);
+ }
+ case (?t) {
+ unexpected(p, t);
+ }
+ }
+
+ }
+
+ ex = ast.expr_rec(fields, base, ast.ann_none);
}
case (token.BIND) {
@@ -554,22 +714,124 @@ impure fn parse_bottom_expr(parser p) -> @ast.expr {
ex = ast.expr_bind(e, es.node, ast.ann_none);
}
- case (_) {
- alt (parse_lit(p)) {
- case (some[ast.lit](?lit)) {
- hi = lit.span;
- ex = ast.expr_lit(@lit, ast.ann_none);
+ case (token.POUND) {
+ p.bump();
+ auto pth = parse_path(p, GREEDY);
+ auto pf = parse_expr;
+ auto es = parse_seq[@ast.expr](token.LPAREN,
+ token.RPAREN,
+ some(token.COMMA),
+ pf, p);
+ hi = es.span;
+ ex = expand_syntax_ext(p, es.span, pth, es.node,
+ none[@ast.expr]);
+ }
+
+ case (token.FAIL) {
+ p.bump();
+ ex = ast.expr_fail;
+ }
+
+ case (token.LOG) {
+ p.bump();
+ auto e = parse_expr(p);
+ auto hi = e.span;
+ ex = ast.expr_log(e);
+ }
+
+ case (token.CHECK) {
+ p.bump();
+ alt (p.peek()) {
+ case (token.LPAREN) {
+ auto e = parse_expr(p);
+ auto hi = e.span;
+ ex = ast.expr_check_expr(e);
}
- case (none[ast.lit]) {
- p.err("expecting expression");
+ case (_) {
+ p.get_session().unimpl("constraint-check stmt");
}
}
}
+
+ case (token.RET) {
+ p.bump();
+ alt (p.peek()) {
+ case (token.SEMI) {
+ ex = ast.expr_ret(none[@ast.expr]);
+ }
+ case (_) {
+ auto e = parse_expr(p);
+ hi = e.span;
+ ex = ast.expr_ret(some[@ast.expr](e));
+ }
+ }
+ }
+
+ case (token.PUT) {
+ p.bump();
+ alt (p.peek()) {
+ case (token.SEMI) {
+ ex = ast.expr_put(none[@ast.expr]);
+ }
+ case (_) {
+ auto e = parse_expr(p);
+ hi = e.span;
+ ex = ast.expr_put(some[@ast.expr](e));
+ }
+ }
+ }
+
+ case (token.BE) {
+ p.bump();
+ auto e = parse_expr(p);
+ // FIXME: Is this the right place for this check?
+ if /*check*/ (ast.is_call_expr(e)) {
+ hi = e.span;
+ ex = ast.expr_be(e);
+ }
+ else {
+ p.err("Non-call expression in tail call");
+ }
+ }
+
+ case (_) {
+ auto lit = parse_lit(p);
+ hi = lit.span;
+ ex = ast.expr_lit(@lit, ast.ann_none);
+ }
}
ret @spanned(lo, hi, ex);
}
+/*
+ * FIXME: This is a crude approximation of the syntax-extension system,
+ * for purposes of prototyping and/or hard-wiring any extensions we
+ * wish to use while bootstrapping. The eventual aim is to permit
+ * loading rust crates to process extensions, but this will likely
+ * require a rust-based frontend, or an ocaml-FFI-based connection to
+ * rust crates. At the moment we have neither.
+ */
+
+impure fn expand_syntax_ext(parser p, ast.span sp,
+ &ast.path path, vec[@ast.expr] args,
+ option.t[@ast.expr] body) -> ast.expr_ {
+
+ check (_vec.len[ast.ident](path.node.idents) > 0u);
+ auto extname = path.node.idents.(0);
+ if (_str.eq(extname, "fmt")) {
+ auto expanded = extfmt.expand_syntax_ext(args, body);
+ auto newexpr = ast.expr_ext(path, args, body,
+ expanded,
+ ast.ann_none);
+
+ ret newexpr;
+ } else {
+ p.err("unknown syntax extension");
+ fail;
+ }
+}
+
impure fn extend_expr_by_ident(parser p, span lo, span hi,
@ast.expr e, ast.ident i) -> @ast.expr {
auto e_ = e.node;
@@ -705,6 +967,13 @@ impure fn parse_prefix_expr(parser p) -> @ast.expr {
ex = ast.expr_unary(ast.box, e, ast.ann_none);
}
+ case (token.MUTABLE) {
+ p.bump();
+ auto e = parse_prefix_expr(p);
+ hi = e.span;
+ ex = ast.expr_unary(ast._mutable, e, ast.ann_none);
+ }
+
case (_) {
ret parse_dot_or_call_expr(p);
}
@@ -712,144 +981,73 @@ impure fn parse_prefix_expr(parser p) -> @ast.expr {
ret @spanned(lo, hi, ex);
}
-impure fn parse_binops(parser p,
- (impure fn(parser) -> @ast.expr) sub,
- vec[tup(token.binop, ast.binop)] ops)
+type op_spec = rec(token.token tok, ast.binop op, int prec);
+
+// FIXME make this a const, don't store it in parser state
+fn prec_table() -> vec[op_spec] {
+ ret vec(rec(tok=token.BINOP(token.STAR), op=ast.mul, prec=11),
+ rec(tok=token.BINOP(token.SLASH), op=ast.div, prec=11),
+ rec(tok=token.BINOP(token.PERCENT), op=ast.rem, prec=11),
+ rec(tok=token.BINOP(token.PLUS), op=ast.add, prec=10),
+ rec(tok=token.BINOP(token.MINUS), op=ast.sub, prec=10),
+ rec(tok=token.BINOP(token.LSL), op=ast.lsl, prec=9),
+ rec(tok=token.BINOP(token.LSR), op=ast.lsr, prec=9),
+ rec(tok=token.BINOP(token.ASR), op=ast.asr, prec=9),
+ rec(tok=token.BINOP(token.AND), op=ast.bitand, prec=8),
+ rec(tok=token.BINOP(token.CARET), op=ast.bitxor, prec=6),
+ rec(tok=token.BINOP(token.OR), op=ast.bitor, prec=6),
+ // ast.mul is a bogus placeholder here, AS is special
+ // cased in parse_more_binops
+ rec(tok=token.AS, op=ast.mul, prec=5),
+ rec(tok=token.LT, op=ast.lt, prec=4),
+ rec(tok=token.LE, op=ast.le, prec=4),
+ rec(tok=token.GE, op=ast.ge, prec=4),
+ rec(tok=token.GT, op=ast.gt, prec=4),
+ rec(tok=token.EQEQ, op=ast.eq, prec=3),
+ rec(tok=token.NE, op=ast.ne, prec=3),
+ rec(tok=token.ANDAND, op=ast.and, prec=2),
+ rec(tok=token.OROR, op=ast.or, prec=1));
+}
+
+impure fn parse_binops(parser p) -> @ast.expr {
+ ret parse_more_binops(p, parse_prefix_expr(p), 0);
+}
+
+impure fn parse_more_binops(parser p, @ast.expr lhs, int min_prec)
-> @ast.expr {
- auto lo = p.get_span();
- auto hi = lo;
- auto e = sub(p);
- auto more = true;
- while (more) {
- more = false;
- for (tup(token.binop, ast.binop) pair in ops) {
- alt (p.peek()) {
- case (token.BINOP(?op)) {
- if (pair._0 == op) {
- p.bump();
- auto rhs = sub(p);
- hi = rhs.span;
- auto exp = ast.expr_binary(pair._1, e, rhs,
- ast.ann_none);
- e = @spanned(lo, hi, exp);
- more = true;
- }
- }
- case (_) { /* fall through */ }
- }
- }
+ // Magic nonsense to work around rustboot bug
+ fn op_eq(token.token a, token.token b) -> bool {
+ if (a == b) {ret true;}
+ else {ret false;}
}
- ret e;
-}
-
-impure fn parse_binary_exprs(parser p,
- (impure fn(parser) -> @ast.expr) sub,
- vec[tup(token.token, ast.binop)] ops)
- -> @ast.expr {
- auto lo = p.get_span();
- auto hi = lo;
- auto e = sub(p);
- auto more = true;
- while (more) {
- more = false;
- for (tup(token.token, ast.binop) pair in ops) {
- if (pair._0 == p.peek()) {
- p.bump();
- auto rhs = sub(p);
- hi = rhs.span;
- auto exp = ast.expr_binary(pair._1, e, rhs, ast.ann_none);
- e = @spanned(lo, hi, exp);
- more = true;
- }
- }
- }
- ret e;
-}
-
-impure fn parse_factor_expr(parser p) -> @ast.expr {
- auto sub = parse_prefix_expr;
- ret parse_binops(p, sub, vec(tup(token.STAR, ast.mul),
- tup(token.SLASH, ast.div),
- tup(token.PERCENT, ast.rem)));
-}
-
-impure fn parse_term_expr(parser p) -> @ast.expr {
- auto sub = parse_factor_expr;
- ret parse_binops(p, sub, vec(tup(token.PLUS, ast.add),
- tup(token.MINUS, ast.sub)));
-}
-
-impure fn parse_shift_expr(parser p) -> @ast.expr {
- auto sub = parse_term_expr;
- ret parse_binops(p, sub, vec(tup(token.LSL, ast.lsl),
- tup(token.LSR, ast.lsr),
- tup(token.ASR, ast.asr)));
-}
-
-impure fn parse_bitand_expr(parser p) -> @ast.expr {
- auto sub = parse_shift_expr;
- ret parse_binops(p, sub, vec(tup(token.AND, ast.bitand)));
-}
-
-impure fn parse_bitxor_expr(parser p) -> @ast.expr {
- auto sub = parse_bitand_expr;
- ret parse_binops(p, sub, vec(tup(token.CARET, ast.bitxor)));
-}
-
-impure fn parse_bitor_expr(parser p) -> @ast.expr {
- auto sub = parse_bitxor_expr;
- ret parse_binops(p, sub, vec(tup(token.OR, ast.bitor)));
-}
-
-impure fn parse_cast_expr(parser p) -> @ast.expr {
- auto lo = p.get_span();
- auto e = parse_bitor_expr(p);
- auto hi = e.span;
- while (true) {
- alt (p.peek()) {
- case (token.AS) {
- p.bump();
- auto t = parse_ty(p);
- hi = t.span;
- e = @spanned(lo, hi, ast.expr_cast(e, t, ast.ann_none));
- }
-
- case (_) {
- ret e;
+ auto peeked = p.peek();
+ for (op_spec cur in p.get_prec_table()) {
+ if (cur.prec > min_prec && op_eq(cur.tok, peeked)) {
+ p.bump();
+ alt (cur.tok) {
+ case (token.AS) {
+ auto rhs = parse_ty(p);
+ auto _as = ast.expr_cast(lhs, rhs, ast.ann_none);
+ auto span = @spanned(lhs.span, rhs.span, _as);
+ ret parse_more_binops(p, span, min_prec);
+ }
+ case (_) {
+ auto rhs = parse_more_binops(p, parse_prefix_expr(p),
+ cur.prec);
+ auto bin = ast.expr_binary(cur.op, lhs, rhs,
+ ast.ann_none);
+ auto span = @spanned(lhs.span, rhs.span, bin);
+ ret parse_more_binops(p, span, min_prec);
+ }
}
}
}
- ret e;
-}
-
-impure fn parse_relational_expr(parser p) -> @ast.expr {
- auto sub = parse_cast_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.LT, ast.lt),
- tup(token.LE, ast.le),
- tup(token.GE, ast.ge),
- tup(token.GT, ast.gt)));
-}
-
-
-impure fn parse_equality_expr(parser p) -> @ast.expr {
- auto sub = parse_relational_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.EQEQ, ast.eq),
- tup(token.NE, ast.ne)));
-}
-
-impure fn parse_and_expr(parser p) -> @ast.expr {
- auto sub = parse_equality_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.ANDAND, ast.and)));
-}
-
-impure fn parse_or_expr(parser p) -> @ast.expr {
- auto sub = parse_and_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.OROR, ast.or)));
+ ret lhs;
}
impure fn parse_assign_expr(parser p) -> @ast.expr {
auto lo = p.get_span();
- auto lhs = parse_or_expr(p);
+ auto lhs = parse_binops(p);
alt (p.peek()) {
case (token.EQ) {
p.bump();
@@ -901,6 +1099,7 @@ impure fn parse_if_expr(parser p) -> @ast.expr {
}
case (_) { /* fall through */ }
}
+
ret @spanned(lo, hi, ast.expr_if(cond, thn, els, ast.ann_none));
}
@@ -935,8 +1134,14 @@ impure fn parse_head_local(parser p) -> @ast.decl {
impure fn parse_for_expr(parser p) -> @ast.expr {
auto lo = p.get_span();
auto hi = lo;
+ auto is_each = false;
expect(p, token.FOR);
+ if (p.peek() == token.EACH) {
+ is_each = true;
+ p.bump();
+ }
+
expect (p, token.LPAREN);
auto decl = parse_head_local(p);
@@ -946,9 +1151,16 @@ impure fn parse_for_expr(parser p) -> @ast.expr {
expect(p, token.RPAREN);
auto body = parse_block(p);
hi = body.span;
- ret @spanned(lo, hi, ast.expr_for(decl, seq, body, ast.ann_none));
+ if (is_each) {
+ ret @spanned(lo, hi, ast.expr_for_each(decl, seq, body,
+ ast.ann_none));
+ } else {
+ ret @spanned(lo, hi, ast.expr_for(decl, seq, body,
+ ast.ann_none));
+ }
}
+
impure fn parse_while_expr(parser p) -> @ast.expr {
auto lo = p.get_span();
auto hi = lo;
@@ -996,6 +1208,23 @@ impure fn parse_alt_expr(parser p) -> @ast.expr {
auto block = parse_block(p);
arms += vec(rec(pat=pat, block=block, index=index));
}
+
+ // FIXME: this is a vestigial form left over from
+ // rustboot, we're keeping it here for source-compat
+ // for the time being but it should be flushed out
+ // once we've bootstrapped. When we see 'else {' here,
+ // we pretend we saw 'case (_) {'. It has the same
+ // meaning, and only exists due to the cexp/pexp split
+ // in rustboot, which we're not maintaining.
+
+ case (token.ELSE) {
+ p.bump();
+ auto hi = p.get_span();
+ auto pat = @spanned(lo, hi, ast.pat_wild(ast.ann_none));
+ auto index = index_arm(pat);
+ auto block = parse_block(p);
+ arms += vec(rec(pat=pat, block=block, index=index));
+ }
case (token.RBRACE) { /* empty */ }
case (?tok) {
p.err("expected 'case' or '}' when parsing 'alt' statement " +
@@ -1062,10 +1291,12 @@ impure fn parse_initializer(parser p) -> option.t[@ast.expr] {
impure fn parse_pat(parser p) -> @ast.pat {
auto lo = p.get_span();
+ auto hi = lo;
+ auto pat;
- auto pat = ast.pat_wild(ast.ann_none); // FIXME: typestate bug
alt (p.peek()) {
case (token.UNDERSCORE) {
+ hi = p.get_span();
p.bump();
pat = ast.pat_wild(ast.ann_none);
}
@@ -1073,6 +1304,7 @@ impure fn parse_pat(parser p) -> @ast.pat {
p.bump();
alt (p.peek()) {
case (token.IDENT(?id)) {
+ hi = p.get_span();
p.bump();
pat = ast.pat_bind(id, p.next_def_id(), ast.ann_none);
}
@@ -1085,13 +1317,16 @@ impure fn parse_pat(parser p) -> @ast.pat {
}
case (token.IDENT(?id)) {
auto tag_path = parse_path(p, GREEDY);
+ hi = tag_path.span;
let vec[@ast.pat] args;
alt (p.peek()) {
case (token.LPAREN) {
auto f = parse_pat;
- args = parse_seq[@ast.pat](token.LPAREN, token.RPAREN,
- some(token.COMMA), f, p).node;
+ auto a = parse_seq[@ast.pat](token.LPAREN, token.RPAREN,
+ some(token.COMMA), f, p);
+ args = a.node;
+ hi = a.span;
}
case (_) { args = vec(); }
}
@@ -1099,13 +1334,13 @@ impure fn parse_pat(parser p) -> @ast.pat {
pat = ast.pat_tag(tag_path, args, none[ast.variant_def],
ast.ann_none);
}
- case (?tok) {
- p.err("expected pattern but found " + token.to_str(tok));
- fail;
+ case (_) {
+ auto lit = parse_lit(p);
+ hi = lit.span;
+ pat = ast.pat_lit(@lit, ast.ann_none);
}
}
- auto hi = p.get_span();
ret @spanned(lo, hi, pat);
}
@@ -1147,49 +1382,22 @@ impure fn parse_auto(parser p) -> @ast.decl {
}
impure fn parse_stmt(parser p) -> @ast.stmt {
- auto lo = p.get_span();
- alt (p.peek()) {
-
- case (token.LOG) {
- p.bump();
- auto e = parse_expr(p);
- auto hi = p.get_span();
- ret @spanned(lo, hi, ast.stmt_log(e));
- }
-
- case (token.CHECK) {
- p.bump();
- alt (p.peek()) {
- case (token.LPAREN) {
- auto e = parse_expr(p);
- auto hi = p.get_span();
- ret @spanned(lo, hi, ast.stmt_check_expr(e));
- }
- case (_) {
- p.get_session().unimpl("constraint-check stmt");
- }
- }
- }
+ if (p.get_file_type() == SOURCE_FILE) {
+ ret parse_source_stmt(p);
+ } else {
+ ret parse_crate_stmt(p);
+ }
+}
- case (token.FAIL) {
- p.bump();
- ret @spanned(lo, p.get_span(), ast.stmt_fail);
- }
+impure fn parse_crate_stmt(parser p) -> @ast.stmt {
+ auto cdir = parse_crate_directive(p);
+ ret @spanned(cdir.span, cdir.span,
+ ast.stmt_crate_directive(@cdir));
+}
- case (token.RET) {
- p.bump();
- alt (p.peek()) {
- case (token.SEMI) {
- ret @spanned(lo, p.get_span(),
- ast.stmt_ret(none[@ast.expr]));
- }
- case (_) {
- auto e = parse_expr(p);
- ret @spanned(lo, e.span,
- ast.stmt_ret(some[@ast.expr](e)));
- }
- }
- }
+impure fn parse_source_stmt(parser p) -> @ast.stmt {
+ auto lo = p.get_span();
+ alt (p.peek()) {
case (token.LET) {
auto decl = parse_let(p);
@@ -1260,31 +1468,28 @@ fn index_block(vec[@ast.stmt] stmts, option.t[@ast.expr] expr) -> ast.block_ {
auto index = new_str_hash[uint]();
auto u = 0u;
for (@ast.stmt s in stmts) {
- // FIXME: typestate bug requires we do this up top, not
- // down below loop. Sigh.
- u += 1u;
alt (s.node) {
case (ast.stmt_decl(?d)) {
alt (d.node) {
case (ast.decl_local(?loc)) {
- index.insert(loc.ident, u-1u);
+ index.insert(loc.ident, u);
}
case (ast.decl_item(?it)) {
alt (it.node) {
case (ast.item_fn(?i, _, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_mod(?i, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_ty(?i, _, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_tag(?i, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_obj(?i, _, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
}
}
@@ -1292,6 +1497,7 @@ fn index_block(vec[@ast.stmt] stmts, option.t[@ast.expr] expr) -> ast.block_ {
}
case (_) { /* fall through */ }
}
+ u += 1u;
}
ret rec(stmts=stmts, expr=expr, index=index);
}
@@ -1301,6 +1507,7 @@ fn index_arm(@ast.pat pat) -> hashmap[ast.ident,ast.def_id] {
alt (pat.node) {
case (ast.pat_bind(?i, ?def_id, _)) { index.insert(i, def_id); }
case (ast.pat_wild(_)) { /* empty */ }
+ case (ast.pat_lit(_, _)) { /* empty */ }
case (ast.pat_tag(_, ?pats, _, _)) {
for (@ast.pat p in pats) {
do_index_arm(index, p);
@@ -1330,15 +1537,11 @@ fn stmt_ends_with_semi(@ast.stmt stmt) -> bool {
case (ast.decl_item(_)) { ret false; }
}
}
- case (ast.stmt_ret(_)) { ret true; }
- case (ast.stmt_log(_)) { ret true; }
- case (ast.stmt_check_expr(_)) { ret true; }
- case (ast.stmt_fail) { ret true; }
case (ast.stmt_expr(?e)) {
alt (e.node) {
case (ast.expr_vec(_,_)) { ret true; }
case (ast.expr_tup(_,_)) { ret true; }
- case (ast.expr_rec(_,_)) { ret true; }
+ case (ast.expr_rec(_,_,_)) { ret true; }
case (ast.expr_call(_,_,_)) { ret true; }
case (ast.expr_binary(_,_,_,_)) { ret true; }
case (ast.expr_unary(_,_,_)) { ret true; }
@@ -1346,18 +1549,28 @@ fn stmt_ends_with_semi(@ast.stmt stmt) -> bool {
case (ast.expr_cast(_,_,_)) { ret true; }
case (ast.expr_if(_,_,_,_)) { ret false; }
case (ast.expr_for(_,_,_,_)) { ret false; }
+ case (ast.expr_for_each(_,_,_,_))
+ { ret false; }
case (ast.expr_while(_,_,_)) { ret false; }
case (ast.expr_do_while(_,_,_)) { ret false; }
case (ast.expr_alt(_,_,_)) { ret false; }
case (ast.expr_block(_,_)) { ret false; }
case (ast.expr_assign(_,_,_)) { ret true; }
case (ast.expr_assign_op(_,_,_,_))
- { ret true; }
+ { ret true; }
case (ast.expr_field(_,_,_)) { ret true; }
case (ast.expr_index(_,_,_)) { ret true; }
case (ast.expr_path(_,_,_)) { ret true; }
+ case (ast.expr_fail) { ret true; }
+ case (ast.expr_ret(_)) { ret true; }
+ case (ast.expr_put(_)) { ret true; }
+ case (ast.expr_be(_)) { ret true; }
+ case (ast.expr_log(_)) { ret true; }
+ case (ast.expr_check_expr(_)) { ret true; }
}
}
+ // We should not be calling this on a cdir.
+ case (ast.stmt_crate_directive(?cdir)) { fail; }
}
}
@@ -1401,8 +1614,13 @@ impure fn parse_block(parser p) -> ast.block {
case (none[@ast.expr]) {
// Not an expression statement.
stmts += vec(stmt);
- if (stmt_ends_with_semi(stmt)) {
- expect(p, token.SEMI);
+ // FIXME: crazy differentiation between conditions
+ // used in branches and binary expressions in rustboot
+ // means we cannot use && here. I know, right?
+ if (p.get_file_type() == SOURCE_FILE) {
+ if (stmt_ends_with_semi(stmt)) {
+ expect(p, token.SEMI);
+ }
}
}
}
@@ -1432,7 +1650,7 @@ impure fn parse_ty_params(parser p) -> vec[ast.ty_param] {
ret ty_params;
}
-impure fn parse_fn(parser p, ast.effect eff, bool is_iter) -> ast._fn {
+impure fn parse_fn_decl(parser p, ast.effect eff) -> ast.fn_decl {
auto pf = parse_arg;
let util.common.spanned[vec[ast.arg]] inputs =
// FIXME: passing parse_arg as an lval doesn't work at the
@@ -1444,34 +1662,41 @@ impure fn parse_fn(parser p, ast.effect eff, bool is_iter) -> ast._fn {
pf, p);
let @ast.ty output;
+
+ // FIXME: dropping constrs on the floor at the moment.
+ // pick them up when they're used by typestate pass.
+ parse_constrs(p);
+
if (p.peek() == token.RARROW) {
p.bump();
output = parse_ty(p);
} else {
output = @spanned(inputs.span, inputs.span, ast.ty_nil);
}
+ ret rec(effect=eff, inputs=inputs.node, output=output);
+}
+impure fn parse_fn(parser p, ast.effect eff, ast.proto proto) -> ast._fn {
+ auto decl = parse_fn_decl(p, eff);
auto body = parse_block(p);
-
- ret rec(effect = eff,
- is_iter = is_iter,
- inputs = inputs.node,
- output = output,
+ ret rec(decl = decl,
+ proto = proto,
body = body);
}
-impure fn parse_item_fn_or_iter(parser p, ast.effect eff,
- bool is_iter) -> @ast.item {
- auto lo = p.get_span();
- if (is_iter) {
- expect(p, token.ITER);
- } else {
- expect(p, token.FN);
- }
+impure fn parse_fn_header(parser p)
+ -> tup(ast.ident, vec[ast.ty_param]) {
auto id = parse_ident(p);
auto ty_params = parse_ty_params(p);
- auto f = parse_fn(p, eff, is_iter);
- auto item = ast.item_fn(id, f, ty_params,
+ ret tup(id, ty_params);
+}
+
+impure fn parse_item_fn_or_iter(parser p, ast.effect eff) -> @ast.item {
+ auto lo = p.get_span();
+ auto proto = parse_proto(p);
+ auto t = parse_fn_header(p);
+ auto f = parse_fn(p, eff, proto);
+ auto item = ast.item_fn(t._0, f, t._1,
p.next_def_id(), ast.ann_none);
ret @spanned(lo, f.body.span, item);
}
@@ -1486,14 +1711,9 @@ impure fn parse_obj_field(parser p) -> ast.obj_field {
impure fn parse_method(parser p) -> @ast.method {
auto lo = p.get_span();
auto eff = parse_effect(p);
- auto is_iter = false;
- alt (p.peek()) {
- case (token.FN) { p.bump(); }
- case (token.ITER) { p.bump(); is_iter = true; }
- case (?t) { unexpected(p, t); }
- }
+ auto proto = parse_proto(p);
auto ident = parse_ident(p);
- auto f = parse_fn(p, eff, is_iter);
+ auto f = parse_fn(p, eff, proto);
auto meth = rec(ident=ident, meth=f,
id=p.next_def_id(), ann=ast.ann_none);
ret @spanned(lo, f.body.span, meth);
@@ -1512,21 +1732,33 @@ impure fn parse_item_obj(parser p, ast.layer lyr) -> @ast.item {
some(token.COMMA),
pf, p);
- auto pm = parse_method;
- let util.common.spanned[vec[@ast.method]] meths =
- parse_seq[@ast.method]
- (token.LBRACE,
- token.RBRACE,
- none[token.token],
- pm, p);
+ let vec[@ast.method] meths = vec();
+ let option.t[ast.block] dtor = none[ast.block];
+
+ expect(p, token.LBRACE);
+ while (p.peek() != token.RBRACE) {
+ alt (p.peek()) {
+ case (token.DROP) {
+ p.bump();
+ dtor = some[ast.block](parse_block(p));
+ }
+ case (_) {
+ append[@ast.method](meths,
+ parse_method(p));
+ }
+ }
+ }
+ auto hi = p.get_span();
+ expect(p, token.RBRACE);
let ast._obj ob = rec(fields=fields.node,
- methods=meths.node);
+ methods=meths,
+ dtor=dtor);
auto item = ast.item_obj(ident, ob, ty_params,
p.next_def_id(), ast.ann_none);
- ret @spanned(lo, meths.span, item);
+ ret @spanned(lo, hi, item);
}
impure fn parse_mod_items(parser p, token.token term) -> ast._mod {
@@ -1568,18 +1800,127 @@ impure fn parse_item_mod(parser p) -> @ast.item {
ret @spanned(lo, hi, item);
}
-impure fn parse_item_type(parser p) -> @ast.item {
+impure fn parse_item_native_type(parser p) -> @ast.native_item {
+ auto t = parse_type_decl(p);
+ auto hi = p.get_span();
+ expect(p, token.SEMI);
+ auto item = ast.native_item_ty(t._1, p.next_def_id());
+ ret @spanned(t._0, hi, item);
+}
+
+impure fn parse_item_native_fn(parser p, ast.effect eff) -> @ast.native_item {
+ auto lo = p.get_span();
+ expect(p, token.FN);
+ auto t = parse_fn_header(p);
+ auto decl = parse_fn_decl(p, eff);
+ auto hi = p.get_span();
+ expect(p, token.SEMI);
+ auto item = ast.native_item_fn(t._0, decl, t._1, p.next_def_id(),
+ ast.ann_none);
+ ret @spanned(lo, hi, item);
+}
+
+impure fn parse_native_item(parser p) -> @ast.native_item {
+ let ast.effect eff = parse_effect(p);
+ let ast.opacity opa = parse_opacity(p);
+ let ast.layer lyr = parse_layer(p);
+ alt (p.peek()) {
+ case (token.TYPE) {
+ ret parse_item_native_type(p);
+ }
+ case (token.FN) {
+ ret parse_item_native_fn(p, eff);
+ }
+ case (?t) {
+ unexpected(p, t);
+ fail;
+ }
+ }
+}
+
+impure fn parse_native_mod_items(parser p,
+ str native_name,
+ ast.native_abi abi) -> ast.native_mod {
+ auto index = new_str_hash[ast.native_mod_index_entry]();
+ let vec[@ast.native_item] items = vec();
+
+ auto view_items = parse_native_view(p, index);
+
+ while (p.peek() != token.RBRACE) {
+ auto item = parse_native_item(p);
+ items += vec(item);
+
+ // Index the item.
+ ast.index_native_item(index, item);
+ }
+ ret rec(native_name=native_name, abi=abi,
+ view_items=view_items,
+ items=items,
+ index=index);
+}
+
+fn default_native_name(session.session sess, str id) -> str {
+ alt (sess.get_targ_cfg().os) {
+ case (session.os_win32) {
+ ret id + ".dll";
+ }
+ case (session.os_macos) {
+ ret "lib" + id + ".dylib";
+ }
+ case (session.os_linux) {
+ ret "lib" + id + ".so";
+ }
+ }
+}
+
+impure fn parse_item_native_mod(parser p) -> @ast.item {
+ auto lo = p.get_span();
+ expect(p, token.NATIVE);
+ auto abi = ast.native_abi_cdecl;
+ if (p.peek() != token.MOD) {
+ auto t = parse_str_lit_or_env_ident(p);
+ if (_str.eq(t, "cdecl")) {
+ } else if (_str.eq(t, "rust")) {
+ abi = ast.native_abi_rust;
+ } else {
+ p.err("unsupported abi: " + t);
+ fail;
+ }
+ }
+ expect(p, token.MOD);
+ auto id = parse_ident(p);
+ auto native_name;
+ if (p.peek() == token.EQ) {
+ expect(p, token.EQ);
+ native_name = parse_str_lit_or_env_ident(p);
+ } else {
+ native_name = default_native_name(p.get_session(), id);
+ }
+ expect(p, token.LBRACE);
+ auto m = parse_native_mod_items(p, native_name, abi);
+ auto hi = p.get_span();
+ expect(p, token.RBRACE);
+ auto item = ast.item_native_mod(id, m, p.next_def_id());
+ ret @spanned(lo, hi, item);
+}
+
+impure fn parse_type_decl(parser p) -> tup(span, ast.ident) {
auto lo = p.get_span();
expect(p, token.TYPE);
auto id = parse_ident(p);
+ ret tup(lo, id);
+}
+
+impure fn parse_item_type(parser p) -> @ast.item {
+ auto t = parse_type_decl(p);
auto tps = parse_ty_params(p);
expect(p, token.EQ);
auto ty = parse_ty(p);
auto hi = p.get_span();
expect(p, token.SEMI);
- auto item = ast.item_ty(id, ty, tps, p.next_def_id(), ast.ann_none);
- ret @spanned(lo, hi, item);
+ auto item = ast.item_ty(t._1, ty, tps, p.next_def_id(), ast.ann_none);
+ ret @spanned(t._0, hi, item);
}
impure fn parse_item_tag(parser p) -> @ast.item {
@@ -1631,6 +1972,19 @@ impure fn parse_item_tag(parser p) -> @ast.item {
ret @spanned(lo, hi, item);
}
+impure fn parse_opacity(parser p) -> ast.opacity {
+ alt (p.peek()) {
+ case (token.ABS) {
+ p.bump();
+ ret ast.op_abstract;
+ }
+ case (_) {
+ ret ast.op_transparent;
+ }
+ }
+ fail;
+}
+
impure fn parse_layer(parser p) -> ast.layer {
alt (p.peek()) {
case (token.STATE) {
@@ -1686,6 +2040,7 @@ fn peeking_at_item(parser p) -> bool {
impure fn parse_item(parser p) -> @ast.item {
let ast.effect eff = parse_effect(p);
+ let ast.opacity opa = parse_opacity(p);
let ast.layer lyr = parse_layer(p);
alt (p.peek()) {
@@ -1697,17 +2052,22 @@ impure fn parse_item(parser p) -> @ast.item {
case (token.FN) {
check (lyr == ast.layer_value);
- ret parse_item_fn_or_iter(p, eff, false);
+ ret parse_item_fn_or_iter(p, eff);
}
case (token.ITER) {
check (lyr == ast.layer_value);
- ret parse_item_fn_or_iter(p, eff, true);
+ ret parse_item_fn_or_iter(p, eff);
}
case (token.MOD) {
check (eff == ast.eff_pure);
check (lyr == ast.layer_value);
ret parse_item_mod(p);
}
+ case (token.NATIVE) {
+ check (eff == ast.eff_pure);
+ check (lyr == ast.layer_value);
+ ret parse_item_native_mod(p);
+ }
case (token.TYPE) {
check (eff == ast.eff_pure);
ret parse_item_type(p);
@@ -1840,7 +2200,16 @@ impure fn parse_import(parser p) -> @ast.view_item {
fail;
}
-impure fn parse_use_or_import(parser p) -> @ast.view_item {
+impure fn parse_export(parser p) -> @ast.view_item {
+ auto lo = p.get_span();
+ expect(p, token.EXPORT);
+ auto id = parse_ident(p);
+ auto hi = p.get_span();
+ expect(p, token.SEMI);
+ ret @spanned(lo, hi, ast.view_item_export(id));
+}
+
+impure fn parse_view_item(parser p) -> @ast.view_item {
alt (p.peek()) {
case (token.USE) {
ret parse_use(p);
@@ -1848,23 +2217,26 @@ impure fn parse_use_or_import(parser p) -> @ast.view_item {
case (token.IMPORT) {
ret parse_import(p);
}
+ case (token.EXPORT) {
+ ret parse_export(p);
+ }
}
}
-fn is_use_or_import(token.token t) -> bool {
- if (t == token.USE) {
- ret true;
- }
- if (t == token.IMPORT) {
- ret true;
+fn is_view_item(token.token t) -> bool {
+ alt (t) {
+ case (token.USE) { ret true; }
+ case (token.IMPORT) { ret true; }
+ case (token.EXPORT) { ret true; }
+ case (_) {}
}
ret false;
}
impure fn parse_view(parser p, ast.mod_index index) -> vec[@ast.view_item] {
let vec[@ast.view_item] items = vec();
- while (is_use_or_import(p.peek())) {
- auto item = parse_use_or_import(p);
+ while (is_view_item(p.peek())) {
+ auto item = parse_view_item(p);
items += vec(item);
ast.index_view_item(index, item);
@@ -1872,6 +2244,19 @@ impure fn parse_view(parser p, ast.mod_index index) -> vec[@ast.view_item] {
ret items;
}
+impure fn parse_native_view(parser p, ast.native_mod_index index)
+ -> vec[@ast.view_item] {
+ let vec[@ast.view_item] items = vec();
+ while (is_view_item(p.peek())) {
+ auto item = parse_view_item(p);
+ items += vec(item);
+
+ ast.index_native_view_item(index, item);
+ }
+ ret items;
+}
+
+
impure fn parse_crate_from_source_file(parser p) -> @ast.crate {
auto lo = p.get_span();
auto hi = lo;
@@ -1885,33 +2270,46 @@ impure fn parse_crate_from_source_file(parser p) -> @ast.crate {
//
// Each directive imperatively extends its environment with 0 or more items.
-impure fn parse_crate_directive(str prefix, parser p,
- &mutable vec[@ast.item] items,
- hashmap[ast.ident,ast.mod_index_entry] index)
+impure fn parse_crate_directive(parser p) -> ast.crate_directive
{
auto lo = p.get_span();
auto hi = lo;
alt (p.peek()) {
- case (token.CONST) {
- auto c = parse_item_const(p);
- ast.index_item(index, c);
- append[@ast.item](items, c);
- }
+ case (token.AUTH) {
+ // FIXME: currently dropping auth clauses on the floor,
+ // as there is no effect-checking pass.
+ p.bump();
+ auto n = parse_path(p, GREEDY);
+ expect(p, token.EQ);
+ auto e = parse_effect(p);
+ hi = p.get_span();
+ expect(p, token.SEMI);
+ ret spanned(lo, hi, ast.cdir_auth(n, e));
+ }
+
+ case (token.META) {
+ // FIXME: currently dropping meta clauses on the floor,
+ // as there is no crate metadata system
+ p.bump();
+ auto mis = parse_meta(p);
+ hi = p.get_span();
+ expect(p, token.SEMI);
+ ret spanned(lo, hi, ast.cdir_meta(mis));
+ }
+
case (token.MOD) {
p.bump();
auto id = parse_ident(p);
- auto file_path = id;
+ auto file_opt = none[filename];
alt (p.peek()) {
case (token.EQ) {
p.bump();
// FIXME: turn this into parse+eval expr
- file_path = parse_str_lit(p);
+ file_opt = some[filename](parse_str_lit_or_env_ident(p));
}
case (_) {}
}
- // dir-qualify file path.
- auto full_path = prefix + std.os.path_sep() + file_path;
alt (p.peek()) {
@@ -1920,29 +2318,18 @@ impure fn parse_crate_directive(str prefix, parser p,
case (token.SEMI) {
hi = p.get_span();
p.bump();
- if (!_str.ends_with(full_path, ".rs")) {
- full_path += ".rs";
- }
- auto p0 = new_parser(p.get_session(), 0, full_path);
- auto m0 = parse_mod_items(p0, token.EOF);
- auto im = ast.item_mod(id, m0, p.next_def_id());
- auto i = @spanned(lo, hi, im);
- ast.index_item(index, i);
- append[@ast.item](items, i);
+ ret spanned(lo, hi, ast.cdir_src_mod(id, file_opt));
}
// mod x = "foo_dir" { ...directives... }
case (token.LBRACE) {
p.bump();
- auto m0 = parse_crate_directives(full_path, p,
- token.RBRACE);
+ auto cdirs = parse_crate_directives(p, token.RBRACE);
hi = p.get_span();
expect(p, token.RBRACE);
- auto im = ast.item_mod(id, m0, p.next_def_id());
- auto i = @spanned(lo, hi, im);
- ast.index_item(index, i);
- append[@ast.item](items, i);
+ ret spanned(lo, hi,
+ ast.cdir_dir_mod(id, file_opt, cdirs));
}
case (?t) {
@@ -1950,28 +2337,65 @@ impure fn parse_crate_directive(str prefix, parser p,
}
}
}
+
+ case (token.LET) {
+ p.bump();
+ expect(p, token.LPAREN);
+ auto id = parse_ident(p);
+ expect(p, token.EQ);
+ auto x = parse_expr(p);
+ expect(p, token.RPAREN);
+ expect(p, token.LBRACE);
+ auto v = parse_crate_directives(p, token.RBRACE);
+ hi = p.get_span();
+ expect(p, token.RBRACE);
+ ret spanned(lo, hi, ast.cdir_let(id, x, v));
+ }
+
+ case (token.USE) {
+ auto vi = parse_view_item(p);
+ ret spanned(lo, vi.span, ast.cdir_view_item(vi));
+ }
+
+ case (token.IMPORT) {
+ auto vi = parse_view_item(p);
+ ret spanned(lo, vi.span, ast.cdir_view_item(vi));
+ }
+
+ case (token.EXPORT) {
+ auto vi = parse_view_item(p);
+ ret spanned(lo, vi.span, ast.cdir_view_item(vi));
+ }
+
+ case (_) {
+ auto x = parse_expr(p);
+ ret spanned(lo, x.span, ast.cdir_expr(x));
+ }
}
+ fail;
}
-impure fn parse_crate_directives(str prefix, parser p,
- token.token term) -> ast._mod {
- auto index = new_str_hash[ast.mod_index_entry]();
- auto view_items = parse_view(p, index);
- let vec[@ast.item] items = vec();
+impure fn parse_crate_directives(parser p, token.token term)
+ -> vec[@ast.crate_directive] {
+
+ let vec[@ast.crate_directive] cdirs = vec();
while (p.peek() != term) {
- parse_crate_directive(prefix, p, items, index);
+ auto cdir = @parse_crate_directive(p);
+ append[@ast.crate_directive](cdirs, cdir);
}
- ret rec(view_items=view_items, items=items, index=index);
+ ret cdirs;
}
impure fn parse_crate_from_crate_file(parser p) -> @ast.crate {
auto lo = p.get_span();
auto hi = lo;
auto prefix = std.path.dirname(lo.filename);
- auto m = parse_crate_directives(prefix, p, token.EOF);
+ auto cdirs = parse_crate_directives(p, token.EOF);
+ auto m = eval.eval_crate_directives_to_mod(p, p.get_env(),
+ cdirs, prefix);
hi = p.get_span();
expect(p, token.EOF);
ret @spanned(lo, hi, rec(module=m));