aboutsummaryrefslogtreecommitdiff
path: root/src/comp/front
diff options
context:
space:
mode:
Diffstat (limited to 'src/comp/front')
-rw-r--r--src/comp/front/ast.rs179
-rw-r--r--src/comp/front/eval.rs436
-rw-r--r--src/comp/front/extfmt.rs553
-rw-r--r--src/comp/front/lexer.rs24
-rw-r--r--src/comp/front/parser.rs1052
-rw-r--r--src/comp/front/pretty.rs87
6 files changed, 1998 insertions, 333 deletions
diff --git a/src/comp/front/ast.rs b/src/comp/front/ast.rs
index fb068dba..f9d609d1 100644
--- a/src/comp/front/ast.rs
+++ b/src/comp/front/ast.rs
@@ -5,6 +5,7 @@ import std._vec;
import util.common.span;
import util.common.spanned;
import util.common.ty_mach;
+import util.common.filename;
type ident = str;
@@ -36,11 +37,29 @@ tag def {
def_ty_arg(def_id);
def_binding(def_id);
def_use(def_id);
+ def_native_ty(def_id);
+ def_native_fn(def_id);
}
type crate = spanned[crate_];
type crate_ = rec(_mod module);
+tag crate_directive_ {
+ cdir_expr(@expr);
+ // FIXME: cdir_let should be eliminated
+ // and redirected to the use of const stmt_decls inside
+ // crate directive blocks.
+ cdir_let(ident, @expr, vec[@crate_directive]);
+ cdir_src_mod(ident, option.t[filename]);
+ cdir_dir_mod(ident, option.t[filename], vec[@crate_directive]);
+ cdir_view_item(@view_item);
+ cdir_meta(vec[@meta_item]);
+ cdir_syntax(path);
+ cdir_auth(path, effect);
+}
+type crate_directive = spanned[crate_directive_];
+
+
type meta_item = spanned[meta_item_];
type meta_item_ = rec(ident name, str value);
@@ -55,6 +74,7 @@ type pat = spanned[pat_];
tag pat_ {
pat_wild(ann);
pat_bind(ident, def_id, ann);
+ pat_lit(@lit, ann);
pat_tag(path, vec[@pat], option.t[variant_def], ann);
}
@@ -63,6 +83,11 @@ tag mutability {
imm;
}
+tag opacity {
+ op_abstract;
+ op_transparent;
+}
+
tag layer {
layer_value;
layer_state;
@@ -75,6 +100,11 @@ tag effect {
eff_unsafe;
}
+tag proto {
+ proto_iter;
+ proto_fn;
+}
+
tag binop {
add;
sub;
@@ -97,12 +127,49 @@ tag binop {
gt;
}
+fn binop_to_str(binop op) -> str {
+ alt (op) {
+ case (add) {ret "+";}
+ case (sub) {ret "-";}
+ case (mul) {ret "*";}
+ case (div) {ret "/";}
+ case (rem) {ret "%";}
+ case (and) {ret "&&";}
+ case (or) {ret "||";}
+ case (bitxor) {ret "^";}
+ case (bitand) {ret "&";}
+ case (bitor) {ret "|";}
+ case (lsl) {ret "<<";}
+ case (lsr) {ret ">>";}
+ case (asr) {ret ">>>";}
+ case (eq) {ret "==";}
+ case (lt) {ret "<";}
+ case (le) {ret "<=";}
+ case (ne) {ret "!=";}
+ case (ge) {ret ">=";}
+ case (gt) {ret ">";}
+ }
+}
+
+
tag unop {
box;
deref;
bitnot;
not;
neg;
+ _mutable;
+}
+
+fn unop_to_str(unop op) -> str {
+ alt (op) {
+ case (box) {ret "@";}
+ case (deref) {ret "*";}
+ case (bitnot) {ret "~";}
+ case (not) {ret "!";}
+ case (neg) {ret "-";}
+ case (_mutable) {ret "mutable";}
+ }
}
tag mode {
@@ -113,11 +180,9 @@ tag mode {
type stmt = spanned[stmt_];
tag stmt_ {
stmt_decl(@decl);
- stmt_ret(option.t[@expr]);
- stmt_log(@expr);
- stmt_check_expr(@expr);
- stmt_fail;
stmt_expr(@expr);
+ // These only exist in crate-level blocks.
+ stmt_crate_directive(@crate_directive);
}
type local = rec(option.t[@ty] ty,
@@ -142,7 +207,7 @@ type expr = spanned[expr_];
tag expr_ {
expr_vec(vec[@expr], ann);
expr_tup(vec[elt], ann);
- expr_rec(vec[field], ann);
+ expr_rec(vec[field], option.t[@expr], ann);
expr_call(@expr, vec[@expr], ann);
expr_bind(@expr, vec[option.t[@expr]], ann);
expr_binary(binop, @expr, @expr, ann);
@@ -152,6 +217,7 @@ tag expr_ {
expr_if(@expr, block, option.t[@expr], ann);
expr_while(@expr, block, ann);
expr_for(@decl, @expr, block, ann);
+ expr_for_each(@decl, @expr, block, ann);
expr_do_while(block, @expr, ann);
expr_alt(@expr, vec[arm], ann);
expr_block(block, ann);
@@ -160,6 +226,13 @@ tag expr_ {
expr_field(@expr, ident, ann);
expr_index(@expr, @expr, ann);
expr_path(path, option.t[def], ann);
+ expr_ext(path, vec[@expr], option.t[@expr], @expr, ann);
+ expr_fail;
+ expr_ret(option.t[@expr]);
+ expr_put(option.t[@expr]);
+ expr_be(@expr);
+ expr_log(@expr);
+ expr_check_expr(@expr);
}
type lit = spanned[lit_];
@@ -179,7 +252,8 @@ tag lit_ {
type ty_field = rec(ident ident, @ty ty);
type ty_arg = rec(mode mode, @ty ty);
// TODO: effect
-type ty_method = rec(ident ident, vec[ty_arg] inputs, @ty output);
+type ty_method = rec(proto proto, ident ident,
+ vec[ty_arg] inputs, @ty output);
type ty = spanned[ty_];
tag ty_ {
ty_nil;
@@ -193,17 +267,28 @@ tag ty_ {
ty_vec(@ty);
ty_tup(vec[@ty]);
ty_rec(vec[ty_field]);
- ty_fn(vec[ty_arg], @ty); // TODO: effect
+ ty_fn(proto, vec[ty_arg], @ty); // TODO: effect
ty_obj(vec[ty_method]);
ty_path(path, option.t[def]);
ty_mutable(@ty);
+ ty_type;
+ ty_constr(@ty, vec[@constr]);
+}
+
+tag constr_arg_ {
+ carg_base;
+ carg_ident(ident);
}
+type constr_arg = spanned[constr_arg_];
+type constr_ = rec(path path, vec[@constr_arg] args);
+type constr = spanned[constr_];
type arg = rec(mode mode, @ty ty, ident ident, def_id id);
-type _fn = rec(effect effect,
- bool is_iter,
- vec[arg] inputs,
- @ty output,
+type fn_decl = rec(effect effect,
+ vec[arg] inputs,
+ @ty output);
+type _fn = rec(fn_decl decl,
+ proto proto,
block body);
@@ -212,8 +297,8 @@ type method = spanned[method_];
type obj_field = rec(@ty ty, ident ident, def_id id, ann ann);
type _obj = rec(vec[obj_field] fields,
- vec[@method] methods);
-
+ vec[@method] methods,
+ option.t[block] dtor);
tag mod_index_entry {
mie_view_item(@view_item);
@@ -221,11 +306,28 @@ tag mod_index_entry {
mie_tag_variant(@item /* tag item */, uint /* variant index */);
}
+tag native_mod_index_entry {
+ nmie_view_item(@view_item);
+ nmie_item(@native_item);
+}
+
type mod_index = hashmap[ident,mod_index_entry];
type _mod = rec(vec[@view_item] view_items,
vec[@item] items,
mod_index index);
+tag native_abi {
+ native_abi_rust;
+ native_abi_cdecl;
+}
+
+type native_mod = rec(str native_name,
+ native_abi abi,
+ vec[@view_item] view_items,
+ vec[@native_item] items,
+ native_mod_index index);
+type native_mod_index = hashmap[ident,native_mod_index_entry];
+
type variant_arg = rec(@ty ty, def_id id);
type variant = rec(str name, vec[variant_arg] args, def_id id, ann ann);
@@ -233,6 +335,7 @@ type view_item = spanned[view_item_];
tag view_item_ {
view_item_use(ident, vec[@meta_item], def_id);
view_item_import(ident, vec[ident], def_id, option.t[def]);
+ view_item_export(ident);
}
type item = spanned[item_];
@@ -240,11 +343,18 @@ tag item_ {
item_const(ident, @ty, @expr, def_id, ann);
item_fn(ident, _fn, vec[ty_param], def_id, ann);
item_mod(ident, _mod, def_id);
+ item_native_mod(ident, native_mod, def_id);
item_ty(ident, @ty, vec[ty_param], def_id, ann);
item_tag(ident, vec[variant], vec[ty_param], def_id);
item_obj(ident, _obj, vec[ty_param], def_id, ann);
}
+type native_item = spanned[native_item_];
+tag native_item_ {
+ native_item_ty(ident, def_id);
+ native_item_fn(ident, fn_decl, vec[ty_param], def_id, ann);
+}
+
fn index_view_item(mod_index index, @view_item it) {
alt (it.node) {
case(ast.view_item_use(?id, _, _)) {
@@ -253,6 +363,11 @@ fn index_view_item(mod_index index, @view_item it) {
case(ast.view_item_import(?def_ident,_,_,_)) {
index.insert(def_ident, ast.mie_view_item(it));
}
+ case(ast.view_item_export(_)) {
+ // NB: don't index these, they might collide with
+ // the import or use that they're exporting. Have
+ // to do linear search for exports.
+ }
}
}
@@ -267,6 +382,9 @@ fn index_item(mod_index index, @item it) {
case (ast.item_mod(?id, _, _)) {
index.insert(id, ast.mie_item(it));
}
+ case (ast.item_native_mod(?id, _, _)) {
+ index.insert(id, ast.mie_item(it));
+ }
case (ast.item_ty(?id, _, _, _, _)) {
index.insert(id, ast.mie_item(it));
}
@@ -285,6 +403,41 @@ fn index_item(mod_index index, @item it) {
}
}
+fn index_native_item(native_mod_index index, @native_item it) {
+ alt (it.node) {
+ case (ast.native_item_ty(?id, _)) {
+ index.insert(id, ast.nmie_item(it));
+ }
+ case (ast.native_item_fn(?id, _, _, _, _)) {
+ index.insert(id, ast.nmie_item(it));
+ }
+ }
+}
+
+fn index_native_view_item(native_mod_index index, @view_item it) {
+ alt (it.node) {
+ case(ast.view_item_import(?def_ident,_,_,_)) {
+ index.insert(def_ident, ast.nmie_view_item(it));
+ }
+ case(ast.view_item_export(_)) {
+ // NB: don't index these, they might collide with
+ // the import or use that they're exporting. Have
+ // to do linear search for exports.
+ }
+ }
+}
+
+fn is_call_expr(@expr e) -> bool {
+ alt (e.node) {
+ case (expr_call(_, _, _)) {
+ ret true;
+ }
+ case (_) {
+ ret false;
+ }
+ }
+}
+
//
// Local Variables:
// mode: rust
diff --git a/src/comp/front/eval.rs b/src/comp/front/eval.rs
new file mode 100644
index 00000000..881797c9
--- /dev/null
+++ b/src/comp/front/eval.rs
@@ -0,0 +1,436 @@
+import std._vec;
+import std._str;
+import std.option;
+import std.option.some;
+import std.option.none;
+import std.map.hashmap;
+
+import driver.session;
+import ast.ident;
+import front.parser.parser;
+import front.parser.spanned;
+import front.parser.new_parser;
+import front.parser.parse_mod_items;
+import util.common;
+import util.common.filename;
+import util.common.append;
+import util.common.span;
+import util.common.new_str_hash;
+
+
+// Simple dynamic-typed value type for eval_expr.
+tag val {
+ val_bool(bool);
+ val_int(int);
+ val_str(str);
+}
+
+type env = vec[tup(ident, val)];
+
+fn mk_env() -> env {
+ let env e = vec();
+ ret e;
+}
+
+fn val_is_bool(val v) -> bool {
+ alt (v) {
+ case (val_bool(_)) { ret true; }
+ case (_) { }
+ }
+ ret false;
+}
+
+fn val_is_int(val v) -> bool {
+ alt (v) {
+ case (val_bool(_)) { ret true; }
+ case (_) { }
+ }
+ ret false;
+}
+
+fn val_is_str(val v) -> bool {
+ alt (v) {
+ case (val_str(_)) { ret true; }
+ case (_) { }
+ }
+ ret false;
+}
+
+fn val_as_bool(val v) -> bool {
+ alt (v) {
+ case (val_bool(?b)) { ret b; }
+ case (_) { }
+ }
+ fail;
+}
+
+fn val_as_int(val v) -> int {
+ alt (v) {
+ case (val_int(?i)) { ret i; }
+ case (_) { }
+ }
+ fail;
+}
+
+fn val_as_str(val v) -> str {
+ alt (v) {
+ case (val_str(?s)) { ret s; }
+ case (_) { }
+ }
+ fail;
+}
+
+fn lookup(session.session sess, env e, span sp, ident i) -> val {
+ for (tup(ident, val) pair in e) {
+ if (_str.eq(i, pair._0)) {
+ ret pair._1;
+ }
+ }
+ sess.span_err(sp, "unknown variable: " + i);
+ fail;
+}
+
+fn eval_lit(session.session sess, env e, span sp, @ast.lit lit) -> val {
+ alt (lit.node) {
+ case (ast.lit_bool(?b)) { ret val_bool(b); }
+ case (ast.lit_int(?i)) { ret val_int(i); }
+ case (ast.lit_str(?s)) { ret val_str(s); }
+ case (_) {
+ sess.span_err(sp, "evaluating unsupported literal");
+ }
+ }
+ fail;
+}
+
+fn eval_expr(session.session sess, env e, @ast.expr x) -> val {
+ alt (x.node) {
+ case (ast.expr_path(?pth, _, _)) {
+ if (_vec.len[ident](pth.node.idents) == 1u &&
+ _vec.len[@ast.ty](pth.node.types) == 0u) {
+ ret lookup(sess, e, x.span, pth.node.idents.(0));
+ }
+ sess.span_err(x.span, "evaluating structured path-name");
+ }
+
+ case (ast.expr_lit(?lit, _)) {
+ ret eval_lit(sess, e, x.span, lit);
+ }
+
+ case (ast.expr_unary(?op, ?a, _)) {
+ auto av = eval_expr(sess, e, a);
+ alt (op) {
+ case (ast.not) {
+ if (val_is_bool(av)) {
+ ret val_bool(!val_as_bool(av));
+ }
+ sess.span_err(x.span, "bad types in '!' expression");
+ }
+ case (_) {
+ sess.span_err(x.span, "evaluating unsupported unop");
+ }
+ }
+ }
+
+ case (ast.expr_binary(?op, ?a, ?b, _)) {
+ auto av = eval_expr(sess, e, a);
+ auto bv = eval_expr(sess, e, b);
+ alt (op) {
+ case (ast.add) {
+ if (val_is_int(av) && val_is_int(bv)) {
+ ret val_int(val_as_int(av) + val_as_int(bv));
+ }
+ if (val_is_str(av) && val_is_str(bv)) {
+ ret val_str(val_as_str(av) + val_as_str(bv));
+ }
+ sess.span_err(x.span, "bad types in '+' expression");
+ }
+
+ case (ast.sub) {
+ if (val_is_int(av) && val_is_int(bv)) {
+ ret val_int(val_as_int(av) - val_as_int(bv));
+ }
+ sess.span_err(x.span, "bad types in '-' expression");
+ }
+
+ case (ast.mul) {
+ if (val_is_int(av) && val_is_int(bv)) {
+ ret val_int(val_as_int(av) * val_as_int(bv));
+ }
+ sess.span_err(x.span, "bad types in '*' expression");
+ }
+
+ case (ast.div) {
+ if (val_is_int(av) && val_is_int(bv)) {
+ ret val_int(val_as_int(av) / val_as_int(bv));
+ }
+ sess.span_err(x.span, "bad types in '/' expression");
+ }
+
+ case (ast.rem) {
+ if (val_is_int(av) && val_is_int(bv)) {
+ ret val_int(val_as_int(av) % val_as_int(bv));
+ }
+ sess.span_err(x.span, "bad types in '%' expression");
+ }
+
+ case (ast.and) {
+ if (val_is_bool(av) && val_is_bool(bv)) {
+ ret val_bool(val_as_bool(av) && val_as_bool(bv));
+ }
+ sess.span_err(x.span, "bad types in '&&' expression");
+ }
+
+ case (ast.or) {
+ if (val_is_bool(av) && val_is_bool(bv)) {
+ ret val_bool(val_as_bool(av) || val_as_bool(bv));
+ }
+ sess.span_err(x.span, "bad types in '||' expression");
+ }
+
+ case (ast.eq) {
+ ret val_bool(val_eq(sess, x.span, av, bv));
+ }
+
+ case (ast.ne) {
+ ret val_bool(! val_eq(sess, x.span, av, bv));
+ }
+
+ case (_) {
+ sess.span_err(x.span, "evaluating unsupported binop");
+ }
+ }
+ }
+ case (_) {
+ sess.span_err(x.span, "evaluating unsupported expression");
+ }
+ }
+ fail;
+}
+
+fn val_eq(session.session sess, span sp, val av, val bv) -> bool {
+ if (val_is_bool(av) && val_is_bool(bv)) {
+ ret val_as_bool(av) == val_as_bool(bv);
+ }
+ if (val_is_int(av) && val_is_int(bv)) {
+ ret val_as_int(av) == val_as_int(bv);
+ }
+ if (val_is_str(av) && val_is_str(bv)) {
+ ret _str.eq(val_as_str(av),
+ val_as_str(bv));
+ }
+ sess.span_err(sp, "bad types in comparison");
+ fail;
+}
+
+impure fn eval_crate_directives(parser p,
+ env e,
+ vec[@ast.crate_directive] cdirs,
+ str prefix,
+ &mutable vec[@ast.view_item] view_items,
+ &mutable vec[@ast.item] items,
+ hashmap[ast.ident,
+ ast.mod_index_entry] index) {
+
+ for (@ast.crate_directive sub_cdir in cdirs) {
+ eval_crate_directive(p, e, sub_cdir, prefix,
+ view_items, items, index);
+ }
+}
+
+
+impure fn eval_crate_directives_to_mod(parser p,
+ env e,
+ vec[@ast.crate_directive] cdirs,
+ str prefix) -> ast._mod {
+ let vec[@ast.view_item] view_items = vec();
+ let vec[@ast.item] items = vec();
+ auto index = new_str_hash[ast.mod_index_entry]();
+
+ eval_crate_directives(p, e, cdirs, prefix,
+ view_items, items, index);
+
+ ret rec(view_items=view_items, items=items, index=index);
+}
+
+
+impure fn eval_crate_directive_block(parser p,
+ env e,
+ &ast.block blk,
+ str prefix,
+ &mutable vec[@ast.view_item] view_items,
+ &mutable vec[@ast.item] items,
+ hashmap[ast.ident,
+ ast.mod_index_entry] index) {
+
+ for (@ast.stmt s in blk.node.stmts) {
+ alt (s.node) {
+ case (ast.stmt_crate_directive(?cdir)) {
+ eval_crate_directive(p, e, cdir, prefix,
+ view_items, items, index);
+ }
+ case (_) {
+ auto sess = p.get_session();
+ sess.span_err(s.span,
+ "unsupported stmt in crate-directive block");
+ }
+ }
+ }
+}
+
+impure fn eval_crate_directive_expr(parser p,
+ env e,
+ @ast.expr x,
+ str prefix,
+ &mutable vec[@ast.view_item] view_items,
+ &mutable vec[@ast.item] items,
+ hashmap[ast.ident,
+ ast.mod_index_entry] index) {
+ auto sess = p.get_session();
+
+ alt (x.node) {
+
+ case (ast.expr_if(?cond, ?thn, ?elopt, _)) {
+ auto cv = eval_expr(sess, e, cond);
+ if (!val_is_bool(cv)) {
+ sess.span_err(x.span, "bad cond type in 'if'");
+ }
+
+ if (val_as_bool(cv)) {
+ ret eval_crate_directive_block(p, e, thn, prefix,
+ view_items, items,
+ index);
+ }
+
+ alt (elopt) {
+ case (some[@ast.expr](?els)) {
+ ret eval_crate_directive_expr(p, e, els, prefix,
+ view_items, items,
+ index);
+ }
+ case (_) {
+ // Absent-else is ok.
+ }
+ }
+ }
+
+ case (ast.expr_alt(?v, ?arms, _)) {
+ auto vv = eval_expr(sess, e, v);
+ for (ast.arm arm in arms) {
+ alt (arm.pat.node) {
+ case (ast.pat_lit(?lit, _)) {
+ auto pv = eval_lit(sess, e,
+ arm.pat.span, lit);
+ if (val_eq(sess, arm.pat.span, vv, pv)) {
+ ret eval_crate_directive_block
+ (p, e, arm.block, prefix,
+ view_items, items, index);
+ }
+ }
+ case (ast.pat_wild(_)) {
+ ret eval_crate_directive_block
+ (p, e, arm.block, prefix,
+ view_items, items, index);
+ }
+ case (_) {
+ sess.span_err(arm.pat.span,
+ "bad pattern type in 'alt'");
+ }
+ }
+ }
+ sess.span_err(x.span, "no cases matched in 'alt'");
+ }
+
+ case (ast.expr_block(?block, _)) {
+ ret eval_crate_directive_block(p, e, block, prefix,
+ view_items, items,
+ index);
+ }
+
+ case (_) {
+ sess.span_err(x.span, "unsupported expr type");
+ }
+ }
+}
+
+impure fn eval_crate_directive(parser p,
+ env e,
+ @ast.crate_directive cdir,
+ str prefix,
+ &mutable vec[@ast.view_item] view_items,
+ &mutable vec[@ast.item] items,
+ hashmap[ast.ident,
+ ast.mod_index_entry] index) {
+ alt (cdir.node) {
+
+ case (ast.cdir_let(?id, ?x, ?cdirs)) {
+ auto v = eval_expr(p.get_session(), e, x);
+ auto e0 = vec(tup(id, v)) + e;
+ eval_crate_directives(p, e0, cdirs, prefix,
+ view_items, items, index);
+ }
+
+ case (ast.cdir_expr(?x)) {
+ eval_crate_directive_expr(p, e, x, prefix,
+ view_items, items, index);
+ }
+
+ case (ast.cdir_src_mod(?id, ?file_opt)) {
+
+ auto file_path = id + ".rs";
+ alt (file_opt) {
+ case (some[filename](?f)) {
+ file_path = f;
+ }
+ case (none[filename]) {}
+ }
+
+ auto full_path = prefix + std.os.path_sep() + file_path;
+
+ auto p0 = new_parser(p.get_session(), e, 0, full_path);
+ auto m0 = parse_mod_items(p0, token.EOF);
+ auto im = ast.item_mod(id, m0, p.next_def_id());
+ auto i = @spanned(cdir.span, cdir.span, im);
+ ast.index_item(index, i);
+ append[@ast.item](items, i);
+ }
+
+ case (ast.cdir_dir_mod(?id, ?dir_opt, ?cdirs)) {
+
+ auto path = id;
+ alt (dir_opt) {
+ case (some[filename](?d)) {
+ path = d;
+ }
+ case (none[filename]) {}
+ }
+
+ auto full_path = prefix + std.os.path_sep() + path;
+ auto m0 = eval_crate_directives_to_mod(p, e, cdirs, full_path);
+ auto im = ast.item_mod(id, m0, p.next_def_id());
+ auto i = @spanned(cdir.span, cdir.span, im);
+ ast.index_item(index, i);
+ append[@ast.item](items, i);
+ }
+
+ case (ast.cdir_view_item(?vi)) {
+ append[@ast.view_item](view_items, vi);
+ ast.index_view_item(index, vi);
+ }
+
+ case (ast.cdir_meta(?mi)) {}
+ case (ast.cdir_syntax(?pth)) {}
+ case (ast.cdir_auth(?pth, ?eff)) {}
+ }
+}
+
+
+//
+// Local Variables:
+// mode: rust
+// fill-column: 78;
+// indent-tabs-mode: nil
+// c-basic-offset: 4
+// buffer-file-coding-system: utf-8-unix
+// compile-command: "make -k -C ../.. 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
+// End:
+//
diff --git a/src/comp/front/extfmt.rs b/src/comp/front/extfmt.rs
new file mode 100644
index 00000000..255614d0
--- /dev/null
+++ b/src/comp/front/extfmt.rs
@@ -0,0 +1,553 @@
+/* The 'fmt' extension is modeled on the posix printf system.
+ *
+ * A posix conversion ostensibly looks like this:
+ *
+ * %[parameter][flags][width][.precision][length]type
+ *
+ * Given the different numeric type bestiary we have, we omit the 'length'
+ * parameter and support slightly different conversions for 'type':
+ *
+ * %[parameter][flags][width][.precision]type
+ *
+ * we also only support translating-to-rust a tiny subset of the possible
+ * combinations at the moment.
+ */
+
+import util.common;
+
+import std._str;
+import std._vec;
+import std.option;
+import std.option.none;
+import std.option.some;
+
+export expand_syntax_ext;
+
+tag signedness {
+ signed;
+ unsigned;
+}
+
+tag caseness {
+ case_upper;
+ case_lower;
+}
+
+tag ty {
+ ty_bool;
+ ty_str;
+ ty_char;
+ ty_int(signedness);
+ ty_bits;
+ ty_hex(caseness);
+ // FIXME: More types
+}
+
+tag flag {
+ flag_left_justify;
+ flag_left_zero_pad;
+ flag_left_space_pad;
+ flag_plus_if_positive;
+ flag_alternate;
+}
+
+tag count {
+ count_is(int);
+ count_is_param(int);
+ count_is_next_param;
+ count_implied;
+}
+
+// A formatted conversion from an expression to a string
+type conv = rec(option.t[int] param,
+ vec[flag] flags,
+ count width,
+ count precision,
+ ty ty);
+
+// A fragment of the output sequence
+tag piece {
+ piece_string(str);
+ piece_conv(conv);
+}
+
+// TODO: Need to thread parser through here to handle errors correctly
+fn expand_syntax_ext(vec[@ast.expr] args,
+ option.t[@ast.expr] body) -> @ast.expr {
+
+ if (_vec.len[@ast.expr](args) == 0u) {
+ log "malformed #fmt call";
+ fail;
+ }
+
+ auto fmt = expr_to_str(args.(0));
+
+ // log "Format string:";
+ // log fmt;
+
+ auto pieces = parse_fmt_string(fmt);
+ auto args_len = _vec.len[@ast.expr](args);
+ auto fmt_args = _vec.slice[@ast.expr](args, 1u, args_len - 1u);
+ ret pieces_to_expr(pieces, args);
+}
+
+fn expr_to_str(@ast.expr expr) -> str {
+ alt (expr.node) {
+ case (ast.expr_lit(?l, _)) {
+ alt (l.node) {
+ case (ast.lit_str(?s)) {
+ ret s;
+ }
+ }
+ }
+ }
+ log "malformed #fmt call";
+ fail;
+}
+
+fn parse_fmt_string(str s) -> vec[piece] {
+ let vec[piece] pieces = vec();
+ auto lim = _str.byte_len(s);
+ auto buf = "";
+
+ fn flush_buf(str buf, &vec[piece] pieces) -> str {
+ if (_str.byte_len(buf) > 0u) {
+ auto piece = piece_string(buf);
+ pieces += piece;
+ }
+ ret "";
+ }
+
+ auto i = 0u;
+ while (i < lim) {
+ auto curr = _str.substr(s, i, 1u);
+ if (_str.eq(curr, "%")) {
+ i += 1u;
+ if (i >= lim) {
+ log "unterminated conversion at end of string";
+ fail;
+ }
+ auto curr2 = _str.substr(s, i, 1u);
+ if (_str.eq(curr2, "%")) {
+ i += 1u;
+ } else {
+ buf = flush_buf(buf, pieces);
+ auto res = parse_conversion(s, i, lim);
+ pieces += res._0;
+ i = res._1;
+ }
+ } else {
+ buf += curr;
+ i += 1u;
+ }
+ }
+ buf = flush_buf(buf, pieces);
+ ret pieces;
+}
+
+fn peek_num(str s, uint i, uint lim) -> option.t[tup(uint, uint)] {
+ if (i >= lim) {
+ ret none[tup(uint, uint)];
+ }
+
+ auto c = s.(i);
+ if (!('0' as u8 <= c && c <= '9' as u8)) {
+ ret option.none[tup(uint, uint)];
+ }
+
+ auto n = (c - ('0' as u8)) as uint;
+ alt (peek_num(s, i + 1u, lim)) {
+ case (none[tup(uint, uint)]) {
+ ret some[tup(uint, uint)](tup(n, i + 1u));
+ }
+ case (some[tup(uint, uint)](?next)) {
+ auto m = next._0;
+ auto j = next._1;
+ ret some[tup(uint, uint)](tup(n * 10u + m, j));
+ }
+ }
+
+}
+
+fn parse_conversion(str s, uint i, uint lim) -> tup(piece, uint) {
+ auto parm = parse_parameter(s, i, lim);
+ auto flags = parse_flags(s, parm._1, lim);
+ auto width = parse_count(s, flags._1, lim);
+ auto prec = parse_precision(s, width._1, lim);
+ auto ty = parse_type(s, prec._1, lim);
+ ret tup(piece_conv(rec(param = parm._0,
+ flags = flags._0,
+ width = width._0,
+ precision = prec._0,
+ ty = ty._0)),
+ ty._1);
+}
+
+fn parse_parameter(str s, uint i, uint lim) -> tup(option.t[int], uint) {
+ if (i >= lim) {
+ ret tup(none[int], i);
+ }
+
+ auto num = peek_num(s, i, lim);
+ alt (num) {
+ case (none[tup(uint, uint)]) {
+ ret tup(none[int], i);
+ }
+ case (some[tup(uint, uint)](?t)) {
+ auto n = t._0;
+ auto j = t._1;
+ if (j < lim && s.(j) == '$' as u8) {
+ ret tup(some[int](n as int), j + 1u);
+ }
+ else {
+ ret tup(none[int], i);
+ }
+ }
+ }
+}
+
+fn parse_flags(str s, uint i, uint lim) -> tup(vec[flag], uint) {
+ let vec[flag] noflags = vec();
+
+ if (i >= lim) {
+ ret tup(noflags, i);
+ }
+
+ fn more_(flag f, str s, uint i, uint lim) -> tup(vec[flag], uint) {
+ auto next = parse_flags(s, i + 1u, lim);
+ auto rest = next._0;
+ auto j = next._1;
+ let vec[flag] curr = vec(f);
+ ret tup(curr + rest, j);
+ }
+
+ auto more = bind more_(_, s, i, lim);
+
+ auto f = s.(i);
+ if (f == ('-' as u8)) {
+ ret more(flag_left_justify);
+ } else if (f == ('0' as u8)) {
+ ret more(flag_left_zero_pad);
+ } else if (f == (' ' as u8)) {
+ ret more(flag_left_space_pad);
+ } else if (f == ('+' as u8)) {
+ ret more(flag_plus_if_positive);
+ } else if (f == ('#' as u8)) {
+ ret more(flag_alternate);
+ } else {
+ ret tup(noflags, i);
+ }
+}
+
+fn parse_count(str s, uint i, uint lim) -> tup(count, uint) {
+ if (i >= lim) {
+ ret tup(count_implied, i);
+ }
+
+ if (s.(i) == ('*' as u8)) {
+ auto param = parse_parameter(s, i + 1u, lim);
+ auto j = param._1;
+ alt (param._0) {
+ case (none[int]) {
+ ret tup(count_is_next_param, j);
+ }
+ case (some[int](?n)) {
+ ret tup(count_is_param(n), j);
+ }
+ }
+ } else {
+ auto num = peek_num(s, i, lim);
+ alt (num) {
+ case (none[tup(uint, uint)]) {
+ ret tup(count_implied, i);
+ }
+ case (some[tup(uint, uint)](?num)) {
+ ret tup(count_is(num._0 as int), num._1);
+ }
+ }
+ }
+}
+
+fn parse_precision(str s, uint i, uint lim) -> tup(count, uint) {
+ if (i >= lim) {
+ ret tup(count_implied, i);
+ }
+
+ if (s.(i) == '.' as u8) {
+ ret parse_count(s, i + 1u, lim);
+ } else {
+ ret tup(count_implied, i);
+ }
+}
+
+fn parse_type(str s, uint i, uint lim) -> tup(ty, uint) {
+ if (i >= lim) {
+ log "missing type in conversion";
+ fail;
+ }
+
+ auto t;
+ auto tstr = _str.substr(s, i, 1u);
+ if (_str.eq(tstr, "b")) {
+ t = ty_bool;
+ } else if (_str.eq(tstr, "s")) {
+ t = ty_str;
+ } else if (_str.eq(tstr, "c")) {
+ t = ty_char;
+ } else if (_str.eq(tstr, "d")
+ || _str.eq(tstr, "i")) {
+ // TODO: Do we really want two signed types here?
+ // How important is it to be printf compatible?
+ t = ty_int(signed);
+ } else if (_str.eq(tstr, "u")) {
+ t = ty_int(unsigned);
+ } else if (_str.eq(tstr, "x")) {
+ t = ty_hex(case_lower);
+ } else if (_str.eq(tstr, "X")) {
+ t = ty_hex(case_upper);
+ } else if (_str.eq(tstr, "t")) {
+ t = ty_bits;
+ } else {
+ log "unknown type in conversion";
+ fail;
+ }
+
+ ret tup(t, i + 1u);
+}
+
+fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
+
+ fn make_new_lit(common.span sp, ast.lit_ lit) -> @ast.expr {
+ auto sp_lit = @parser.spanned[ast.lit_](sp, sp, lit);
+ auto expr = ast.expr_lit(sp_lit, ast.ann_none);
+ ret @parser.spanned[ast.expr_](sp, sp, expr);
+ }
+
+ fn make_new_str(common.span sp, str s) -> @ast.expr {
+ auto lit = ast.lit_str(s);
+ ret make_new_lit(sp, lit);
+ }
+
+ fn make_new_uint(common.span sp, uint u) -> @ast.expr {
+ auto lit = ast.lit_uint(u);
+ ret make_new_lit(sp, lit);
+ }
+
+ fn make_add_expr(common.span sp,
+ @ast.expr lhs, @ast.expr rhs) -> @ast.expr {
+ auto binexpr = ast.expr_binary(ast.add, lhs, rhs, ast.ann_none);
+ ret @parser.spanned[ast.expr_](sp, sp, binexpr);
+ }
+
+ fn make_call(common.span sp, vec[ast.ident] fn_path,
+ vec[@ast.expr] args) -> @ast.expr {
+ let vec[ast.ident] path_idents = fn_path;
+ let vec[@ast.ty] path_types = vec();
+ auto path = rec(idents = path_idents, types = path_types);
+ auto sp_path = parser.spanned[ast.path_](sp, sp, path);
+ auto pathexpr = ast.expr_path(sp_path, none[ast.def], ast.ann_none);
+ auto sp_pathexpr = @parser.spanned[ast.expr_](sp, sp, pathexpr);
+ auto callexpr = ast.expr_call(sp_pathexpr, args, ast.ann_none);
+ auto sp_callexpr = @parser.spanned[ast.expr_](sp, sp, callexpr);
+ ret sp_callexpr;
+ }
+
+ fn make_new_conv(conv cnv, @ast.expr arg) -> @ast.expr {
+
+ auto unsupported = "conversion not supported in #fmt string";
+
+ alt (cnv.param) {
+ case (option.none[int]) {
+ }
+ case (_) {
+ log unsupported;
+ fail;
+ }
+ }
+
+ if (_vec.len[flag](cnv.flags) != 0u) {
+ log unsupported;
+ fail;
+ }
+
+ alt (cnv.width) {
+ case (count_implied) {
+ }
+ case (_) {
+ log unsupported;
+ fail;
+ }
+ }
+
+ alt (cnv.precision) {
+ case (count_implied) {
+ }
+ case (_) {
+ log unsupported;
+ fail;
+ }
+ }
+
+ alt (cnv.ty) {
+ case (ty_str) {
+ ret arg;
+ }
+ case (ty_int(?sign)) {
+ alt (sign) {
+ case (signed) {
+ let vec[str] path = vec("std", "_int", "to_str");
+ auto radix_expr = make_new_uint(arg.span, 10u);
+ let vec[@ast.expr] args = vec(arg, radix_expr);
+ ret make_call(arg.span, path, args);
+ }
+ case (unsigned) {
+ let vec[str] path = vec("std", "_uint", "to_str");
+ auto radix_expr = make_new_uint(arg.span, 10u);
+ let vec[@ast.expr] args = vec(arg, radix_expr);
+ ret make_call(arg.span, path, args);
+ }
+ }
+ }
+ case (_) {
+ log unsupported;
+ fail;
+ }
+ }
+ }
+
+ fn log_conv(conv c) {
+ alt (c.param) {
+ case (some[int](?p)) {
+ log "param: " + std._int.to_str(p, 10u);
+ }
+ case (_) {
+ log "param: none";
+ }
+ }
+ for (flag f in c.flags) {
+ alt (f) {
+ case (flag_left_justify) {
+ log "flag: left justify";
+ }
+ case (flag_left_zero_pad) {
+ log "flag: left zero pad";
+ }
+ case (flag_left_space_pad) {
+ log "flag: left space pad";
+ }
+ case (flag_plus_if_positive) {
+ log "flag: plus if positive";
+ }
+ case (flag_alternate) {
+ log "flag: alternate";
+ }
+ }
+ }
+ alt (c.width) {
+ case (count_is(?i)) {
+ log "width: count is " + std._int.to_str(i, 10u);
+ }
+ case (count_is_param(?i)) {
+ log "width: count is param " + std._int.to_str(i, 10u);
+ }
+ case (count_is_next_param) {
+ log "width: count is next param";
+ }
+ case (count_implied) {
+ log "width: count is implied";
+ }
+ }
+ alt (c.precision) {
+ case (count_is(?i)) {
+ log "prec: count is " + std._int.to_str(i, 10u);
+ }
+ case (count_is_param(?i)) {
+ log "prec: count is param " + std._int.to_str(i, 10u);
+ }
+ case (count_is_next_param) {
+ log "prec: count is next param";
+ }
+ case (count_implied) {
+ log "prec: count is implied";
+ }
+ }
+ alt (c.ty) {
+ case (ty_bool) {
+ log "type: bool";
+ }
+ case (ty_str) {
+ log "type: str";
+ }
+ case (ty_char) {
+ log "type: char";
+ }
+ case (ty_int(?s)) {
+ alt (s) {
+ case (signed) {
+ log "type: signed";
+ }
+ case (unsigned) {
+ log "type: unsigned";
+ }
+ }
+ }
+ case (ty_bits) {
+ log "type: bits";
+ }
+ case (ty_hex(?cs)) {
+ alt (cs) {
+ case (case_upper) {
+ log "type: uhex";
+ }
+ case (case_lower) {
+ log "type: lhex";
+ }
+ }
+ }
+ }
+ }
+
+ auto sp = args.(0).span;
+ auto n = 0u;
+ auto tmp_expr = make_new_str(sp, "");
+
+ for (piece p in pieces) {
+ alt (p) {
+ case (piece_string(?s)) {
+ auto s_expr = make_new_str(sp, s);
+ tmp_expr = make_add_expr(sp, tmp_expr, s_expr);
+ }
+ case (piece_conv(?conv)) {
+ if (n >= _vec.len[@ast.expr](args)) {
+ log "too many conversions in #fmt string";
+ fail;
+ }
+
+ // TODO: Remove debug logging
+ // log "Building conversion:";
+ // log_conv(conv);
+
+ n += 1u;
+ auto arg_expr = args.(n);
+ auto c_expr = make_new_conv(conv, arg_expr);
+ tmp_expr = make_add_expr(sp, tmp_expr, c_expr);
+ }
+ }
+ }
+
+ // TODO: Remove this debug logging
+ // log "dumping expanded ast:";
+ // log pretty.print_expr(tmp_expr);
+ ret tmp_expr;
+}
+
+//
+// Local Variables:
+// mode: rust
+// fill-column: 78;
+// indent-tabs-mode: nil
+// c-basic-offset: 4
+// buffer-file-coding-system: utf-8-unix
+// compile-command: "make -k -C ../.. 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
+// End:
+//
diff --git a/src/comp/front/lexer.rs b/src/comp/front/lexer.rs
index a45b1113..0e15e3d8 100644
--- a/src/comp/front/lexer.rs
+++ b/src/comp/front/lexer.rs
@@ -1,4 +1,4 @@
-import std._io.stdio_reader;
+import std.io.stdio_reader;
import std._str;
import std.map;
import std.map.hashmap;
@@ -90,7 +90,6 @@ fn new_reader(stdio_reader rdr, str filename) -> reader
}
auto keywords = new_str_hash[token.token]();
- auto reserved = new_str_hash[()]();
keywords.insert("mod", token.MOD);
keywords.insert("use", token.USE);
@@ -191,6 +190,16 @@ fn new_reader(stdio_reader rdr, str filename) -> reader
keywords.insert("f32", token.MACH(common.ty_f32));
keywords.insert("f64", token.MACH(common.ty_f64));
+ auto reserved = new_str_hash[()]();
+
+ reserved.insert("f16", ()); // IEEE 754-2008 'binary16' interchange fmt
+ reserved.insert("f80", ()); // IEEE 754-1985 'extended'
+ reserved.insert("f128", ()); // IEEE 754-2008 'binary128'
+ reserved.insert("m32", ()); // IEEE 754-2008 'decimal32'
+ reserved.insert("m64", ()); // IEEE 754-2008 'decimal64'
+ reserved.insert("m128", ()); // IEEE 754-2008 'decimal128'
+ reserved.insert("dec", ()); // One of m32, m64, m128
+
ret reader(rdr, filename, rdr.getc() as char, rdr.getc() as char,
1u, 0u, 1u, 0u, keywords, reserved);
}
@@ -425,6 +434,12 @@ impure fn next_token(reader rdr) -> token.token {
ret kwds.get(accum_str);
}
+ auto rsvd = rdr.get_reserved();
+ if (rsvd.contains_key(accum_str)) {
+ log "reserved keyword";
+ fail;
+ }
+
ret token.IDENT(accum_str);
}
@@ -650,12 +665,9 @@ impure fn next_token(reader rdr) -> token.token {
case ('%') {
ret binop(rdr, token.PERCENT);
}
-
}
- log "lexer stopping at ";
- log c;
- ret token.EOF;
+ fail;
}
diff --git a/src/comp/front/parser.rs b/src/comp/front/parser.rs
index e629683c..bb7b8558 100644
--- a/src/comp/front/parser.rs
+++ b/src/comp/front/parser.rs
@@ -1,4 +1,4 @@
-import std._io;
+import std.io;
import std._vec;
import std._str;
import std.option;
@@ -8,6 +8,7 @@ import std.map.hashmap;
import driver.session;
import util.common;
+import util.common.filename;
import util.common.append;
import util.common.span;
import util.common.new_str_hash;
@@ -17,6 +18,11 @@ tag restriction {
RESTRICT_NO_CALL_EXPRS;
}
+tag file_type {
+ CRATE_FILE;
+ SOURCE_FILE;
+}
+
state type parser =
state obj {
fn peek() -> token.token;
@@ -24,28 +30,37 @@ state type parser =
impure fn err(str s);
impure fn restrict(restriction r);
fn get_restriction() -> restriction;
+ fn get_file_type() -> file_type;
+ fn get_env() -> eval.env;
fn get_session() -> session.session;
fn get_span() -> common.span;
fn next_def_id() -> ast.def_id;
+ fn get_prec_table() -> vec[op_spec];
};
impure fn new_parser(session.session sess,
- ast.crate_num crate, str path) -> parser {
+ eval.env env,
+ ast.crate_num crate,
+ str path) -> parser {
state obj stdio_parser(session.session sess,
+ eval.env env,
+ file_type ftype,
mutable token.token tok,
mutable common.pos lo,
mutable common.pos hi,
mutable ast.def_num def,
mutable restriction res,
ast.crate_num crate,
- lexer.reader rdr)
+ lexer.reader rdr,
+ vec[op_spec] precs)
{
fn peek() -> token.token {
- // log token.to_str(tok);
ret tok;
}
impure fn bump() {
+ // log rdr.get_filename()
+ // + ":" + common.istr(lo.line as int);
tok = lexer.next_token(rdr);
lo = rdr.get_mark_pos();
hi = rdr.get_curr_pos();
@@ -78,12 +93,29 @@ impure fn new_parser(session.session sess,
def += 1;
ret tup(crate, def);
}
+
+ fn get_file_type() -> file_type {
+ ret ftype;
+ }
+
+ fn get_env() -> eval.env {
+ ret env;
+ }
+
+ fn get_prec_table() -> vec[op_spec] {
+ ret precs;
+ }
}
- auto srdr = _io.new_stdio_reader(path);
+ auto ftype = SOURCE_FILE;
+ if (_str.ends_with(path, ".rc")) {
+ ftype = CRATE_FILE;
+ }
+ auto srdr = io.new_stdio_reader(path);
auto rdr = lexer.new_reader(srdr, path);
auto npos = rdr.get_curr_pos();
- ret stdio_parser(sess, lexer.next_token(rdr),
- npos, npos, 0, UNRESTRICTED, crate, rdr);
+ ret stdio_parser(sess, env, ftype, lexer.next_token(rdr),
+ npos, npos, 0, UNRESTRICTED, crate, rdr,
+ prec_table());
}
impure fn unexpected(parser p, token.token t) {
@@ -121,9 +153,23 @@ impure fn parse_ident(parser p) -> ast.ident {
}
-impure fn parse_str_lit(parser p) -> ast.ident {
+/* FIXME: gross hack copied from rustboot to make certain configuration-based
+ * decisions work at build-time. We should probably change it to use a
+ * lexical sytnax-extension or something similar. For now we just imitate
+ * rustboot.
+ */
+impure fn parse_str_lit_or_env_ident(parser p) -> ast.ident {
alt (p.peek()) {
case (token.LIT_STR(?s)) { p.bump(); ret s; }
+ case (token.IDENT(?i)) {
+ auto v = eval.lookup(p.get_session(), p.get_env(),
+ p.get_span(), i);
+ if (!eval.val_is_str(v)) {
+ p.err("expecting string-valued variable");
+ }
+ p.bump();
+ ret eval.val_as_str(v);
+ }
case (_) {
p.err("expecting string literal");
fail;
@@ -132,7 +178,8 @@ impure fn parse_str_lit(parser p) -> ast.ident {
}
-impure fn parse_ty_fn(parser p, ast.span lo) -> ast.ty_ {
+impure fn parse_ty_fn(ast.proto proto, parser p,
+ ast.span lo) -> ast.ty_ {
impure fn parse_fn_input_ty(parser p) -> rec(ast.mode mode, @ast.ty ty) {
auto mode;
if (p.peek() == token.BINOP(token.AND)) {
@@ -158,6 +205,10 @@ impure fn parse_ty_fn(parser p, ast.span lo) -> ast.ty_ {
auto inputs = parse_seq[rec(ast.mode mode, @ast.ty ty)](token.LPAREN,
token.RPAREN, some(token.COMMA), f, p);
+ // FIXME: dropping constrs on the floor at the moment.
+ // pick them up when they're used by typestate pass.
+ parse_constrs(p);
+
let @ast.ty output;
if (p.peek() == token.RARROW) {
p.bump();
@@ -166,20 +217,33 @@ impure fn parse_ty_fn(parser p, ast.span lo) -> ast.ty_ {
output = @spanned(lo, inputs.span, ast.ty_nil);
}
- ret ast.ty_fn(inputs.node, output);
+ ret ast.ty_fn(proto, inputs.node, output);
+}
+
+impure fn parse_proto(parser p) -> ast.proto {
+ alt (p.peek()) {
+ case (token.ITER) { p.bump(); ret ast.proto_iter; }
+ case (token.FN) { p.bump(); ret ast.proto_fn; }
+ case (?t) { unexpected(p, t); }
+ }
+ fail;
}
impure fn parse_ty_obj(parser p, &mutable ast.span hi) -> ast.ty_ {
expect(p, token.OBJ);
impure fn parse_method_sig(parser p) -> ast.ty_method {
auto flo = p.get_span();
- expect(p, token.FN);
+
+ // FIXME: do something with this, currently it's dropped on the floor.
+ let ast.effect eff = parse_effect(p);
+ let ast.proto proto = parse_proto(p);
auto ident = parse_ident(p);
- auto f = parse_ty_fn(p, flo);
+ auto f = parse_ty_fn(proto, p, flo);
expect(p, token.SEMI);
alt (f) {
- case (ast.ty_fn(?inputs, ?output)) {
- ret rec(ident=ident, inputs=inputs, output=output);
+ case (ast.ty_fn(?proto, ?inputs, ?output)) {
+ ret rec(proto=proto, ident=ident,
+ inputs=inputs, output=output);
}
}
fail;
@@ -200,10 +264,72 @@ impure fn parse_ty_field(parser p) -> ast.ty_field {
ret rec(ident=id, ty=ty);
}
+impure fn parse_constr_arg(parser p) -> @ast.constr_arg {
+ auto lo = p.get_span();
+ auto carg = ast.carg_base;
+ if (p.peek() == token.BINOP(token.STAR)) {
+ p.bump();
+ } else {
+ carg = ast.carg_ident(parse_ident(p));
+ }
+ ret @spanned(lo, lo, carg);
+}
+
+impure fn parse_ty_constr(parser p) -> @ast.constr {
+ auto lo = p.get_span();
+ auto path = parse_path(p, GREEDY);
+ auto pf = parse_constr_arg;
+ auto args = parse_seq[@ast.constr_arg](token.LPAREN,
+ token.RPAREN,
+ some(token.COMMA), pf, p);
+ auto hi = args.span;
+ ret @spanned(lo, hi, rec(path=path, args=args.node));
+}
+
+impure fn parse_constrs(parser p) -> common.spanned[vec[@ast.constr]] {
+ auto lo = p.get_span();
+ auto hi = lo;
+ let vec[@ast.constr] constrs = vec();
+ if (p.peek() == token.COLON) {
+ p.bump();
+ let bool more = true;
+ while (more) {
+ alt (p.peek()) {
+ case (token.IDENT(_)) {
+ auto constr = parse_ty_constr(p);
+ hi = constr.span;
+ append[@ast.constr](constrs, constr);
+ if (p.peek() == token.COMMA) {
+ p.bump();
+ more = false;
+ }
+ }
+ case (_) { more = false; }
+ }
+ }
+ }
+ ret spanned(lo, hi, constrs);
+}
+
+impure fn parse_ty_constrs(@ast.ty t, parser p) -> @ast.ty {
+ if (p.peek() == token.COLON) {
+ auto constrs = parse_constrs(p);
+ ret @spanned(t.span, constrs.span,
+ ast.ty_constr(t, constrs.node));
+ }
+ ret t;
+}
+
impure fn parse_ty(parser p) -> @ast.ty {
auto lo = p.get_span();
auto hi = lo;
let ast.ty_ t;
+
+ // FIXME: do something with these; currently they're
+ // dropped on the floor.
+ let ast.effect eff = parse_effect(p);
+ let ast.layer lyr = parse_layer(p);
+
alt (p.peek()) {
case (token.BOOL) { p.bump(); t = ast.ty_bool; }
case (token.INT) { p.bump(); t = ast.ty_int; }
@@ -275,9 +401,20 @@ impure fn parse_ty(parser p) -> @ast.ty {
case (token.FN) {
auto flo = p.get_span();
p.bump();
- t = parse_ty_fn(p, flo);
+ t = parse_ty_fn(ast.proto_fn, p, flo);
+ alt (t) {
+ case (ast.ty_fn(_, _, ?out)) {
+ hi = out.span;
+ }
+ }
+ }
+
+ case (token.ITER) {
+ auto flo = p.get_span();
+ p.bump();
+ t = parse_ty_fn(ast.proto_iter, p, flo);
alt (t) {
- case (ast.ty_fn(_, ?out)) {
+ case (ast.ty_fn(_, _, ?out)) {
hi = out.span;
}
}
@@ -297,7 +434,8 @@ impure fn parse_ty(parser p) -> @ast.ty {
fail;
}
}
- ret @spanned(lo, hi, t);
+
+ ret parse_ty_constrs(@spanned(lo, hi, t), p);
}
impure fn parse_arg(parser p) -> ast.arg {
@@ -341,9 +479,9 @@ impure fn parse_seq[T](token.token bra,
ret spanned(lo, hi, v);
}
-impure fn parse_lit(parser p) -> option.t[ast.lit] {
+impure fn parse_lit(parser p) -> ast.lit {
auto lo = p.get_span();
- let ast.lit_ lit;
+ let ast.lit_ lit = ast.lit_nil;
alt (p.peek()) {
case (token.LIT_INT(?i)) {
p.bump();
@@ -369,12 +507,11 @@ impure fn parse_lit(parser p) -> option.t[ast.lit] {
p.bump();
lit = ast.lit_str(s);
}
- case (_) {
- lit = ast.lit_nil; // FIXME: typestate bug requires this
- ret none[ast.lit];
+ case (?t) {
+ unexpected(p, t);
}
}
- ret some(spanned(lo, lo, lit));
+ ret spanned(lo, lo, lit);
}
fn is_ident(token.token t) -> bool {
@@ -520,14 +657,37 @@ impure fn parse_bottom_expr(parser p) -> @ast.expr {
case (token.REC) {
p.bump();
- auto pf = parse_field;
- auto fs =
- parse_seq[ast.field](token.LPAREN,
- token.RPAREN,
- some(token.COMMA),
- pf, p);
- hi = fs.span;
- ex = ast.expr_rec(fs.node, ast.ann_none);
+ expect(p, token.LPAREN);
+ auto fields = vec(parse_field(p));
+
+ auto more = true;
+ auto base = none[@ast.expr];
+ while (more) {
+ alt (p.peek()) {
+ case (token.RPAREN) {
+ hi = p.get_span();
+ p.bump();
+ more = false;
+ }
+ case (token.WITH) {
+ p.bump();
+ base = some[@ast.expr](parse_expr(p));
+ hi = p.get_span();
+ expect(p, token.RPAREN);
+ more = false;
+ }
+ case (token.COMMA) {
+ p.bump();
+ fields += parse_field(p);
+ }
+ case (?t) {
+ unexpected(p, t);
+ }
+ }
+
+ }
+
+ ex = ast.expr_rec(fields, base, ast.ann_none);
}
case (token.BIND) {
@@ -554,22 +714,124 @@ impure fn parse_bottom_expr(parser p) -> @ast.expr {
ex = ast.expr_bind(e, es.node, ast.ann_none);
}
- case (_) {
- alt (parse_lit(p)) {
- case (some[ast.lit](?lit)) {
- hi = lit.span;
- ex = ast.expr_lit(@lit, ast.ann_none);
+ case (token.POUND) {
+ p.bump();
+ auto pth = parse_path(p, GREEDY);
+ auto pf = parse_expr;
+ auto es = parse_seq[@ast.expr](token.LPAREN,
+ token.RPAREN,
+ some(token.COMMA),
+ pf, p);
+ hi = es.span;
+ ex = expand_syntax_ext(p, es.span, pth, es.node,
+ none[@ast.expr]);
+ }
+
+ case (token.FAIL) {
+ p.bump();
+ ex = ast.expr_fail;
+ }
+
+ case (token.LOG) {
+ p.bump();
+ auto e = parse_expr(p);
+ auto hi = e.span;
+ ex = ast.expr_log(e);
+ }
+
+ case (token.CHECK) {
+ p.bump();
+ alt (p.peek()) {
+ case (token.LPAREN) {
+ auto e = parse_expr(p);
+ auto hi = e.span;
+ ex = ast.expr_check_expr(e);
}
- case (none[ast.lit]) {
- p.err("expecting expression");
+ case (_) {
+ p.get_session().unimpl("constraint-check stmt");
}
}
}
+
+ case (token.RET) {
+ p.bump();
+ alt (p.peek()) {
+ case (token.SEMI) {
+ ex = ast.expr_ret(none[@ast.expr]);
+ }
+ case (_) {
+ auto e = parse_expr(p);
+ hi = e.span;
+ ex = ast.expr_ret(some[@ast.expr](e));
+ }
+ }
+ }
+
+ case (token.PUT) {
+ p.bump();
+ alt (p.peek()) {
+ case (token.SEMI) {
+ ex = ast.expr_put(none[@ast.expr]);
+ }
+ case (_) {
+ auto e = parse_expr(p);
+ hi = e.span;
+ ex = ast.expr_put(some[@ast.expr](e));
+ }
+ }
+ }
+
+ case (token.BE) {
+ p.bump();
+ auto e = parse_expr(p);
+ // FIXME: Is this the right place for this check?
+ if /*check*/ (ast.is_call_expr(e)) {
+ hi = e.span;
+ ex = ast.expr_be(e);
+ }
+ else {
+ p.err("Non-call expression in tail call");
+ }
+ }
+
+ case (_) {
+ auto lit = parse_lit(p);
+ hi = lit.span;
+ ex = ast.expr_lit(@lit, ast.ann_none);
+ }
}
ret @spanned(lo, hi, ex);
}
+/*
+ * FIXME: This is a crude approximation of the syntax-extension system,
+ * for purposes of prototyping and/or hard-wiring any extensions we
+ * wish to use while bootstrapping. The eventual aim is to permit
+ * loading rust crates to process extensions, but this will likely
+ * require a rust-based frontend, or an ocaml-FFI-based connection to
+ * rust crates. At the moment we have neither.
+ */
+
+impure fn expand_syntax_ext(parser p, ast.span sp,
+ &ast.path path, vec[@ast.expr] args,
+ option.t[@ast.expr] body) -> ast.expr_ {
+
+ check (_vec.len[ast.ident](path.node.idents) > 0u);
+ auto extname = path.node.idents.(0);
+ if (_str.eq(extname, "fmt")) {
+ auto expanded = extfmt.expand_syntax_ext(args, body);
+ auto newexpr = ast.expr_ext(path, args, body,
+ expanded,
+ ast.ann_none);
+
+ ret newexpr;
+ } else {
+ p.err("unknown syntax extension");
+ fail;
+ }
+}
+
impure fn extend_expr_by_ident(parser p, span lo, span hi,
@ast.expr e, ast.ident i) -> @ast.expr {
auto e_ = e.node;
@@ -705,6 +967,13 @@ impure fn parse_prefix_expr(parser p) -> @ast.expr {
ex = ast.expr_unary(ast.box, e, ast.ann_none);
}
+ case (token.MUTABLE) {
+ p.bump();
+ auto e = parse_prefix_expr(p);
+ hi = e.span;
+ ex = ast.expr_unary(ast._mutable, e, ast.ann_none);
+ }
+
case (_) {
ret parse_dot_or_call_expr(p);
}
@@ -712,144 +981,73 @@ impure fn parse_prefix_expr(parser p) -> @ast.expr {
ret @spanned(lo, hi, ex);
}
-impure fn parse_binops(parser p,
- (impure fn(parser) -> @ast.expr) sub,
- vec[tup(token.binop, ast.binop)] ops)
+type op_spec = rec(token.token tok, ast.binop op, int prec);
+
+// FIXME make this a const, don't store it in parser state
+fn prec_table() -> vec[op_spec] {
+ ret vec(rec(tok=token.BINOP(token.STAR), op=ast.mul, prec=11),
+ rec(tok=token.BINOP(token.SLASH), op=ast.div, prec=11),
+ rec(tok=token.BINOP(token.PERCENT), op=ast.rem, prec=11),
+ rec(tok=token.BINOP(token.PLUS), op=ast.add, prec=10),
+ rec(tok=token.BINOP(token.MINUS), op=ast.sub, prec=10),
+ rec(tok=token.BINOP(token.LSL), op=ast.lsl, prec=9),
+ rec(tok=token.BINOP(token.LSR), op=ast.lsr, prec=9),
+ rec(tok=token.BINOP(token.ASR), op=ast.asr, prec=9),
+ rec(tok=token.BINOP(token.AND), op=ast.bitand, prec=8),
+ rec(tok=token.BINOP(token.CARET), op=ast.bitxor, prec=6),
+ rec(tok=token.BINOP(token.OR), op=ast.bitor, prec=6),
+ // ast.mul is a bogus placeholder here, AS is special
+ // cased in parse_more_binops
+ rec(tok=token.AS, op=ast.mul, prec=5),
+ rec(tok=token.LT, op=ast.lt, prec=4),
+ rec(tok=token.LE, op=ast.le, prec=4),
+ rec(tok=token.GE, op=ast.ge, prec=4),
+ rec(tok=token.GT, op=ast.gt, prec=4),
+ rec(tok=token.EQEQ, op=ast.eq, prec=3),
+ rec(tok=token.NE, op=ast.ne, prec=3),
+ rec(tok=token.ANDAND, op=ast.and, prec=2),
+ rec(tok=token.OROR, op=ast.or, prec=1));
+}
+
+impure fn parse_binops(parser p) -> @ast.expr {
+ ret parse_more_binops(p, parse_prefix_expr(p), 0);
+}
+
+impure fn parse_more_binops(parser p, @ast.expr lhs, int min_prec)
-> @ast.expr {
- auto lo = p.get_span();
- auto hi = lo;
- auto e = sub(p);
- auto more = true;
- while (more) {
- more = false;
- for (tup(token.binop, ast.binop) pair in ops) {
- alt (p.peek()) {
- case (token.BINOP(?op)) {
- if (pair._0 == op) {
- p.bump();
- auto rhs = sub(p);
- hi = rhs.span;
- auto exp = ast.expr_binary(pair._1, e, rhs,
- ast.ann_none);
- e = @spanned(lo, hi, exp);
- more = true;
- }
- }
- case (_) { /* fall through */ }
- }
- }
+ // Magic nonsense to work around rustboot bug
+ fn op_eq(token.token a, token.token b) -> bool {
+ if (a == b) {ret true;}
+ else {ret false;}
}
- ret e;
-}
-
-impure fn parse_binary_exprs(parser p,
- (impure fn(parser) -> @ast.expr) sub,
- vec[tup(token.token, ast.binop)] ops)
- -> @ast.expr {
- auto lo = p.get_span();
- auto hi = lo;
- auto e = sub(p);
- auto more = true;
- while (more) {
- more = false;
- for (tup(token.token, ast.binop) pair in ops) {
- if (pair._0 == p.peek()) {
- p.bump();
- auto rhs = sub(p);
- hi = rhs.span;
- auto exp = ast.expr_binary(pair._1, e, rhs, ast.ann_none);
- e = @spanned(lo, hi, exp);
- more = true;
- }
- }
- }
- ret e;
-}
-
-impure fn parse_factor_expr(parser p) -> @ast.expr {
- auto sub = parse_prefix_expr;
- ret parse_binops(p, sub, vec(tup(token.STAR, ast.mul),
- tup(token.SLASH, ast.div),
- tup(token.PERCENT, ast.rem)));
-}
-
-impure fn parse_term_expr(parser p) -> @ast.expr {
- auto sub = parse_factor_expr;
- ret parse_binops(p, sub, vec(tup(token.PLUS, ast.add),
- tup(token.MINUS, ast.sub)));
-}
-
-impure fn parse_shift_expr(parser p) -> @ast.expr {
- auto sub = parse_term_expr;
- ret parse_binops(p, sub, vec(tup(token.LSL, ast.lsl),
- tup(token.LSR, ast.lsr),
- tup(token.ASR, ast.asr)));
-}
-
-impure fn parse_bitand_expr(parser p) -> @ast.expr {
- auto sub = parse_shift_expr;
- ret parse_binops(p, sub, vec(tup(token.AND, ast.bitand)));
-}
-
-impure fn parse_bitxor_expr(parser p) -> @ast.expr {
- auto sub = parse_bitand_expr;
- ret parse_binops(p, sub, vec(tup(token.CARET, ast.bitxor)));
-}
-
-impure fn parse_bitor_expr(parser p) -> @ast.expr {
- auto sub = parse_bitxor_expr;
- ret parse_binops(p, sub, vec(tup(token.OR, ast.bitor)));
-}
-
-impure fn parse_cast_expr(parser p) -> @ast.expr {
- auto lo = p.get_span();
- auto e = parse_bitor_expr(p);
- auto hi = e.span;
- while (true) {
- alt (p.peek()) {
- case (token.AS) {
- p.bump();
- auto t = parse_ty(p);
- hi = t.span;
- e = @spanned(lo, hi, ast.expr_cast(e, t, ast.ann_none));
- }
-
- case (_) {
- ret e;
+ auto peeked = p.peek();
+ for (op_spec cur in p.get_prec_table()) {
+ if (cur.prec > min_prec && op_eq(cur.tok, peeked)) {
+ p.bump();
+ alt (cur.tok) {
+ case (token.AS) {
+ auto rhs = parse_ty(p);
+ auto _as = ast.expr_cast(lhs, rhs, ast.ann_none);
+ auto span = @spanned(lhs.span, rhs.span, _as);
+ ret parse_more_binops(p, span, min_prec);
+ }
+ case (_) {
+ auto rhs = parse_more_binops(p, parse_prefix_expr(p),
+ cur.prec);
+ auto bin = ast.expr_binary(cur.op, lhs, rhs,
+ ast.ann_none);
+ auto span = @spanned(lhs.span, rhs.span, bin);
+ ret parse_more_binops(p, span, min_prec);
+ }
}
}
}
- ret e;
-}
-
-impure fn parse_relational_expr(parser p) -> @ast.expr {
- auto sub = parse_cast_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.LT, ast.lt),
- tup(token.LE, ast.le),
- tup(token.GE, ast.ge),
- tup(token.GT, ast.gt)));
-}
-
-
-impure fn parse_equality_expr(parser p) -> @ast.expr {
- auto sub = parse_relational_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.EQEQ, ast.eq),
- tup(token.NE, ast.ne)));
-}
-
-impure fn parse_and_expr(parser p) -> @ast.expr {
- auto sub = parse_equality_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.ANDAND, ast.and)));
-}
-
-impure fn parse_or_expr(parser p) -> @ast.expr {
- auto sub = parse_and_expr;
- ret parse_binary_exprs(p, sub, vec(tup(token.OROR, ast.or)));
+ ret lhs;
}
impure fn parse_assign_expr(parser p) -> @ast.expr {
auto lo = p.get_span();
- auto lhs = parse_or_expr(p);
+ auto lhs = parse_binops(p);
alt (p.peek()) {
case (token.EQ) {
p.bump();
@@ -901,6 +1099,7 @@ impure fn parse_if_expr(parser p) -> @ast.expr {
}
case (_) { /* fall through */ }
}
+
ret @spanned(lo, hi, ast.expr_if(cond, thn, els, ast.ann_none));
}
@@ -935,8 +1134,14 @@ impure fn parse_head_local(parser p) -> @ast.decl {
impure fn parse_for_expr(parser p) -> @ast.expr {
auto lo = p.get_span();
auto hi = lo;
+ auto is_each = false;
expect(p, token.FOR);
+ if (p.peek() == token.EACH) {
+ is_each = true;
+ p.bump();
+ }
+
expect (p, token.LPAREN);
auto decl = parse_head_local(p);
@@ -946,9 +1151,16 @@ impure fn parse_for_expr(parser p) -> @ast.expr {
expect(p, token.RPAREN);
auto body = parse_block(p);
hi = body.span;
- ret @spanned(lo, hi, ast.expr_for(decl, seq, body, ast.ann_none));
+ if (is_each) {
+ ret @spanned(lo, hi, ast.expr_for_each(decl, seq, body,
+ ast.ann_none));
+ } else {
+ ret @spanned(lo, hi, ast.expr_for(decl, seq, body,
+ ast.ann_none));
+ }
}
+
impure fn parse_while_expr(parser p) -> @ast.expr {
auto lo = p.get_span();
auto hi = lo;
@@ -996,6 +1208,23 @@ impure fn parse_alt_expr(parser p) -> @ast.expr {
auto block = parse_block(p);
arms += vec(rec(pat=pat, block=block, index=index));
}
+
+ // FIXME: this is a vestigial form left over from
+ // rustboot, we're keeping it here for source-compat
+ // for the time being but it should be flushed out
+ // once we've bootstrapped. When we see 'else {' here,
+ // we pretend we saw 'case (_) {'. It has the same
+ // meaning, and only exists due to the cexp/pexp split
+ // in rustboot, which we're not maintaining.
+
+ case (token.ELSE) {
+ p.bump();
+ auto hi = p.get_span();
+ auto pat = @spanned(lo, hi, ast.pat_wild(ast.ann_none));
+ auto index = index_arm(pat);
+ auto block = parse_block(p);
+ arms += vec(rec(pat=pat, block=block, index=index));
+ }
case (token.RBRACE) { /* empty */ }
case (?tok) {
p.err("expected 'case' or '}' when parsing 'alt' statement " +
@@ -1062,10 +1291,12 @@ impure fn parse_initializer(parser p) -> option.t[@ast.expr] {
impure fn parse_pat(parser p) -> @ast.pat {
auto lo = p.get_span();
+ auto hi = lo;
+ auto pat;
- auto pat = ast.pat_wild(ast.ann_none); // FIXME: typestate bug
alt (p.peek()) {
case (token.UNDERSCORE) {
+ hi = p.get_span();
p.bump();
pat = ast.pat_wild(ast.ann_none);
}
@@ -1073,6 +1304,7 @@ impure fn parse_pat(parser p) -> @ast.pat {
p.bump();
alt (p.peek()) {
case (token.IDENT(?id)) {
+ hi = p.get_span();
p.bump();
pat = ast.pat_bind(id, p.next_def_id(), ast.ann_none);
}
@@ -1085,13 +1317,16 @@ impure fn parse_pat(parser p) -> @ast.pat {
}
case (token.IDENT(?id)) {
auto tag_path = parse_path(p, GREEDY);
+ hi = tag_path.span;
let vec[@ast.pat] args;
alt (p.peek()) {
case (token.LPAREN) {
auto f = parse_pat;
- args = parse_seq[@ast.pat](token.LPAREN, token.RPAREN,
- some(token.COMMA), f, p).node;
+ auto a = parse_seq[@ast.pat](token.LPAREN, token.RPAREN,
+ some(token.COMMA), f, p);
+ args = a.node;
+ hi = a.span;
}
case (_) { args = vec(); }
}
@@ -1099,13 +1334,13 @@ impure fn parse_pat(parser p) -> @ast.pat {
pat = ast.pat_tag(tag_path, args, none[ast.variant_def],
ast.ann_none);
}
- case (?tok) {
- p.err("expected pattern but found " + token.to_str(tok));
- fail;
+ case (_) {
+ auto lit = parse_lit(p);
+ hi = lit.span;
+ pat = ast.pat_lit(@lit, ast.ann_none);
}
}
- auto hi = p.get_span();
ret @spanned(lo, hi, pat);
}
@@ -1147,49 +1382,22 @@ impure fn parse_auto(parser p) -> @ast.decl {
}
impure fn parse_stmt(parser p) -> @ast.stmt {
- auto lo = p.get_span();
- alt (p.peek()) {
-
- case (token.LOG) {
- p.bump();
- auto e = parse_expr(p);
- auto hi = p.get_span();
- ret @spanned(lo, hi, ast.stmt_log(e));
- }
-
- case (token.CHECK) {
- p.bump();
- alt (p.peek()) {
- case (token.LPAREN) {
- auto e = parse_expr(p);
- auto hi = p.get_span();
- ret @spanned(lo, hi, ast.stmt_check_expr(e));
- }
- case (_) {
- p.get_session().unimpl("constraint-check stmt");
- }
- }
- }
+ if (p.get_file_type() == SOURCE_FILE) {
+ ret parse_source_stmt(p);
+ } else {
+ ret parse_crate_stmt(p);
+ }
+}
- case (token.FAIL) {
- p.bump();
- ret @spanned(lo, p.get_span(), ast.stmt_fail);
- }
+impure fn parse_crate_stmt(parser p) -> @ast.stmt {
+ auto cdir = parse_crate_directive(p);
+ ret @spanned(cdir.span, cdir.span,
+ ast.stmt_crate_directive(@cdir));
+}
- case (token.RET) {
- p.bump();
- alt (p.peek()) {
- case (token.SEMI) {
- ret @spanned(lo, p.get_span(),
- ast.stmt_ret(none[@ast.expr]));
- }
- case (_) {
- auto e = parse_expr(p);
- ret @spanned(lo, e.span,
- ast.stmt_ret(some[@ast.expr](e)));
- }
- }
- }
+impure fn parse_source_stmt(parser p) -> @ast.stmt {
+ auto lo = p.get_span();
+ alt (p.peek()) {
case (token.LET) {
auto decl = parse_let(p);
@@ -1260,31 +1468,28 @@ fn index_block(vec[@ast.stmt] stmts, option.t[@ast.expr] expr) -> ast.block_ {
auto index = new_str_hash[uint]();
auto u = 0u;
for (@ast.stmt s in stmts) {
- // FIXME: typestate bug requires we do this up top, not
- // down below loop. Sigh.
- u += 1u;
alt (s.node) {
case (ast.stmt_decl(?d)) {
alt (d.node) {
case (ast.decl_local(?loc)) {
- index.insert(loc.ident, u-1u);
+ index.insert(loc.ident, u);
}
case (ast.decl_item(?it)) {
alt (it.node) {
case (ast.item_fn(?i, _, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_mod(?i, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_ty(?i, _, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_tag(?i, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
case (ast.item_obj(?i, _, _, _, _)) {
- index.insert(i, u-1u);
+ index.insert(i, u);
}
}
}
@@ -1292,6 +1497,7 @@ fn index_block(vec[@ast.stmt] stmts, option.t[@ast.expr] expr) -> ast.block_ {
}
case (_) { /* fall through */ }
}
+ u += 1u;
}
ret rec(stmts=stmts, expr=expr, index=index);
}
@@ -1301,6 +1507,7 @@ fn index_arm(@ast.pat pat) -> hashmap[ast.ident,ast.def_id] {
alt (pat.node) {
case (ast.pat_bind(?i, ?def_id, _)) { index.insert(i, def_id); }
case (ast.pat_wild(_)) { /* empty */ }
+ case (ast.pat_lit(_, _)) { /* empty */ }
case (ast.pat_tag(_, ?pats, _, _)) {
for (@ast.pat p in pats) {
do_index_arm(index, p);
@@ -1330,15 +1537,11 @@ fn stmt_ends_with_semi(@ast.stmt stmt) -> bool {
case (ast.decl_item(_)) { ret false; }
}
}
- case (ast.stmt_ret(_)) { ret true; }
- case (ast.stmt_log(_)) { ret true; }
- case (ast.stmt_check_expr(_)) { ret true; }
- case (ast.stmt_fail) { ret true; }
case (ast.stmt_expr(?e)) {
alt (e.node) {
case (ast.expr_vec(_,_)) { ret true; }
case (ast.expr_tup(_,_)) { ret true; }
- case (ast.expr_rec(_,_)) { ret true; }
+ case (ast.expr_rec(_,_,_)) { ret true; }
case (ast.expr_call(_,_,_)) { ret true; }
case (ast.expr_binary(_,_,_,_)) { ret true; }
case (ast.expr_unary(_,_,_)) { ret true; }
@@ -1346,18 +1549,28 @@ fn stmt_ends_with_semi(@ast.stmt stmt) -> bool {
case (ast.expr_cast(_,_,_)) { ret true; }
case (ast.expr_if(_,_,_,_)) { ret false; }
case (ast.expr_for(_,_,_,_)) { ret false; }
+ case (ast.expr_for_each(_,_,_,_))
+ { ret false; }
case (ast.expr_while(_,_,_)) { ret false; }
case (ast.expr_do_while(_,_,_)) { ret false; }
case (ast.expr_alt(_,_,_)) { ret false; }
case (ast.expr_block(_,_)) { ret false; }
case (ast.expr_assign(_,_,_)) { ret true; }
case (ast.expr_assign_op(_,_,_,_))
- { ret true; }
+ { ret true; }
case (ast.expr_field(_,_,_)) { ret true; }
case (ast.expr_index(_,_,_)) { ret true; }
case (ast.expr_path(_,_,_)) { ret true; }
+ case (ast.expr_fail) { ret true; }
+ case (ast.expr_ret(_)) { ret true; }
+ case (ast.expr_put(_)) { ret true; }
+ case (ast.expr_be(_)) { ret true; }
+ case (ast.expr_log(_)) { ret true; }
+ case (ast.expr_check_expr(_)) { ret true; }
}
}
+ // We should not be calling this on a cdir.
+ case (ast.stmt_crate_directive(?cdir)) { fail; }
}
}
@@ -1401,8 +1614,13 @@ impure fn parse_block(parser p) -> ast.block {
case (none[@ast.expr]) {
// Not an expression statement.
stmts += vec(stmt);
- if (stmt_ends_with_semi(stmt)) {
- expect(p, token.SEMI);
+ // FIXME: crazy differentiation between conditions
+ // used in branches and binary expressions in rustboot
+ // means we cannot use && here. I know, right?
+ if (p.get_file_type() == SOURCE_FILE) {
+ if (stmt_ends_with_semi(stmt)) {
+ expect(p, token.SEMI);
+ }
}
}
}
@@ -1432,7 +1650,7 @@ impure fn parse_ty_params(parser p) -> vec[ast.ty_param] {
ret ty_params;
}
-impure fn parse_fn(parser p, ast.effect eff, bool is_iter) -> ast._fn {
+impure fn parse_fn_decl(parser p, ast.effect eff) -> ast.fn_decl {
auto pf = parse_arg;
let util.common.spanned[vec[ast.arg]] inputs =
// FIXME: passing parse_arg as an lval doesn't work at the
@@ -1444,34 +1662,41 @@ impure fn parse_fn(parser p, ast.effect eff, bool is_iter) -> ast._fn {
pf, p);
let @ast.ty output;
+
+ // FIXME: dropping constrs on the floor at the moment.
+ // pick them up when they're used by typestate pass.
+ parse_constrs(p);
+
if (p.peek() == token.RARROW) {
p.bump();
output = parse_ty(p);
} else {
output = @spanned(inputs.span, inputs.span, ast.ty_nil);
}
+ ret rec(effect=eff, inputs=inputs.node, output=output);
+}
+impure fn parse_fn(parser p, ast.effect eff, ast.proto proto) -> ast._fn {
+ auto decl = parse_fn_decl(p, eff);
auto body = parse_block(p);
-
- ret rec(effect = eff,
- is_iter = is_iter,
- inputs = inputs.node,
- output = output,
+ ret rec(decl = decl,
+ proto = proto,
body = body);
}
-impure fn parse_item_fn_or_iter(parser p, ast.effect eff,
- bool is_iter) -> @ast.item {
- auto lo = p.get_span();
- if (is_iter) {
- expect(p, token.ITER);
- } else {
- expect(p, token.FN);
- }
+impure fn parse_fn_header(parser p)
+ -> tup(ast.ident, vec[ast.ty_param]) {
auto id = parse_ident(p);
auto ty_params = parse_ty_params(p);
- auto f = parse_fn(p, eff, is_iter);
- auto item = ast.item_fn(id, f, ty_params,
+ ret tup(id, ty_params);
+}
+
+impure fn parse_item_fn_or_iter(parser p, ast.effect eff) -> @ast.item {
+ auto lo = p.get_span();
+ auto proto = parse_proto(p);
+ auto t = parse_fn_header(p);
+ auto f = parse_fn(p, eff, proto);
+ auto item = ast.item_fn(t._0, f, t._1,
p.next_def_id(), ast.ann_none);
ret @spanned(lo, f.body.span, item);
}
@@ -1486,14 +1711,9 @@ impure fn parse_obj_field(parser p) -> ast.obj_field {
impure fn parse_method(parser p) -> @ast.method {
auto lo = p.get_span();
auto eff = parse_effect(p);
- auto is_iter = false;
- alt (p.peek()) {
- case (token.FN) { p.bump(); }
- case (token.ITER) { p.bump(); is_iter = true; }
- case (?t) { unexpected(p, t); }
- }
+ auto proto = parse_proto(p);
auto ident = parse_ident(p);
- auto f = parse_fn(p, eff, is_iter);
+ auto f = parse_fn(p, eff, proto);
auto meth = rec(ident=ident, meth=f,
id=p.next_def_id(), ann=ast.ann_none);
ret @spanned(lo, f.body.span, meth);
@@ -1512,21 +1732,33 @@ impure fn parse_item_obj(parser p, ast.layer lyr) -> @ast.item {
some(token.COMMA),
pf, p);
- auto pm = parse_method;
- let util.common.spanned[vec[@ast.method]] meths =
- parse_seq[@ast.method]
- (token.LBRACE,
- token.RBRACE,
- none[token.token],
- pm, p);
+ let vec[@ast.method] meths = vec();
+ let option.t[ast.block] dtor = none[ast.block];
+
+ expect(p, token.LBRACE);
+ while (p.peek() != token.RBRACE) {
+ alt (p.peek()) {
+ case (token.DROP) {
+ p.bump();
+ dtor = some[ast.block](parse_block(p));
+ }
+ case (_) {
+ append[@ast.method](meths,
+ parse_method(p));
+ }
+ }
+ }
+ auto hi = p.get_span();
+ expect(p, token.RBRACE);
let ast._obj ob = rec(fields=fields.node,
- methods=meths.node);
+ methods=meths,
+ dtor=dtor);
auto item = ast.item_obj(ident, ob, ty_params,
p.next_def_id(), ast.ann_none);
- ret @spanned(lo, meths.span, item);
+ ret @spanned(lo, hi, item);
}
impure fn parse_mod_items(parser p, token.token term) -> ast._mod {
@@ -1568,18 +1800,127 @@ impure fn parse_item_mod(parser p) -> @ast.item {
ret @spanned(lo, hi, item);
}
-impure fn parse_item_type(parser p) -> @ast.item {
+impure fn parse_item_native_type(parser p) -> @ast.native_item {
+ auto t = parse_type_decl(p);
+ auto hi = p.get_span();
+ expect(p, token.SEMI);
+ auto item = ast.native_item_ty(t._1, p.next_def_id());
+ ret @spanned(t._0, hi, item);
+}
+
+impure fn parse_item_native_fn(parser p, ast.effect eff) -> @ast.native_item {
+ auto lo = p.get_span();
+ expect(p, token.FN);
+ auto t = parse_fn_header(p);
+ auto decl = parse_fn_decl(p, eff);
+ auto hi = p.get_span();
+ expect(p, token.SEMI);
+ auto item = ast.native_item_fn(t._0, decl, t._1, p.next_def_id(),
+ ast.ann_none);
+ ret @spanned(lo, hi, item);
+}
+
+impure fn parse_native_item(parser p) -> @ast.native_item {
+ let ast.effect eff = parse_effect(p);
+ let ast.opacity opa = parse_opacity(p);
+ let ast.layer lyr = parse_layer(p);
+ alt (p.peek()) {
+ case (token.TYPE) {
+ ret parse_item_native_type(p);
+ }
+ case (token.FN) {
+ ret parse_item_native_fn(p, eff);
+ }
+ case (?t) {
+ unexpected(p, t);
+ fail;
+ }
+ }
+}
+
+impure fn parse_native_mod_items(parser p,
+ str native_name,
+ ast.native_abi abi) -> ast.native_mod {
+ auto index = new_str_hash[ast.native_mod_index_entry]();
+ let vec[@ast.native_item] items = vec();
+
+ auto view_items = parse_native_view(p, index);
+
+ while (p.peek() != token.RBRACE) {
+ auto item = parse_native_item(p);
+ items += vec(item);
+
+ // Index the item.
+ ast.index_native_item(index, item);
+ }
+ ret rec(native_name=native_name, abi=abi,
+ view_items=view_items,
+ items=items,
+ index=index);
+}
+
+fn default_native_name(session.session sess, str id) -> str {
+ alt (sess.get_targ_cfg().os) {
+ case (session.os_win32) {
+ ret id + ".dll";
+ }
+ case (session.os_macos) {
+ ret "lib" + id + ".dylib";
+ }
+ case (session.os_linux) {
+ ret "lib" + id + ".so";
+ }
+ }
+}
+
+impure fn parse_item_native_mod(parser p) -> @ast.item {
+ auto lo = p.get_span();
+ expect(p, token.NATIVE);
+ auto abi = ast.native_abi_cdecl;
+ if (p.peek() != token.MOD) {
+ auto t = parse_str_lit_or_env_ident(p);
+ if (_str.eq(t, "cdecl")) {
+ } else if (_str.eq(t, "rust")) {
+ abi = ast.native_abi_rust;
+ } else {
+ p.err("unsupported abi: " + t);
+ fail;
+ }
+ }
+ expect(p, token.MOD);
+ auto id = parse_ident(p);
+ auto native_name;
+ if (p.peek() == token.EQ) {
+ expect(p, token.EQ);
+ native_name = parse_str_lit_or_env_ident(p);
+ } else {
+ native_name = default_native_name(p.get_session(), id);
+ }
+ expect(p, token.LBRACE);
+ auto m = parse_native_mod_items(p, native_name, abi);
+ auto hi = p.get_span();
+ expect(p, token.RBRACE);
+ auto item = ast.item_native_mod(id, m, p.next_def_id());
+ ret @spanned(lo, hi, item);
+}
+
+impure fn parse_type_decl(parser p) -> tup(span, ast.ident) {
auto lo = p.get_span();
expect(p, token.TYPE);
auto id = parse_ident(p);
+ ret tup(lo, id);
+}
+
+impure fn parse_item_type(parser p) -> @ast.item {
+ auto t = parse_type_decl(p);
auto tps = parse_ty_params(p);
expect(p, token.EQ);
auto ty = parse_ty(p);
auto hi = p.get_span();
expect(p, token.SEMI);
- auto item = ast.item_ty(id, ty, tps, p.next_def_id(), ast.ann_none);
- ret @spanned(lo, hi, item);
+ auto item = ast.item_ty(t._1, ty, tps, p.next_def_id(), ast.ann_none);
+ ret @spanned(t._0, hi, item);
}
impure fn parse_item_tag(parser p) -> @ast.item {
@@ -1631,6 +1972,19 @@ impure fn parse_item_tag(parser p) -> @ast.item {
ret @spanned(lo, hi, item);
}
+impure fn parse_opacity(parser p) -> ast.opacity {
+ alt (p.peek()) {
+ case (token.ABS) {
+ p.bump();
+ ret ast.op_abstract;
+ }
+ case (_) {
+ ret ast.op_transparent;
+ }
+ }
+ fail;
+}
+
impure fn parse_layer(parser p) -> ast.layer {
alt (p.peek()) {
case (token.STATE) {
@@ -1686,6 +2040,7 @@ fn peeking_at_item(parser p) -> bool {
impure fn parse_item(parser p) -> @ast.item {
let ast.effect eff = parse_effect(p);
+ let ast.opacity opa = parse_opacity(p);
let ast.layer lyr = parse_layer(p);
alt (p.peek()) {
@@ -1697,17 +2052,22 @@ impure fn parse_item(parser p) -> @ast.item {
case (token.FN) {
check (lyr == ast.layer_value);
- ret parse_item_fn_or_iter(p, eff, false);
+ ret parse_item_fn_or_iter(p, eff);
}
case (token.ITER) {
check (lyr == ast.layer_value);
- ret parse_item_fn_or_iter(p, eff, true);
+ ret parse_item_fn_or_iter(p, eff);
}
case (token.MOD) {
check (eff == ast.eff_pure);
check (lyr == ast.layer_value);
ret parse_item_mod(p);
}
+ case (token.NATIVE) {
+ check (eff == ast.eff_pure);
+ check (lyr == ast.layer_value);
+ ret parse_item_native_mod(p);
+ }
case (token.TYPE) {
check (eff == ast.eff_pure);
ret parse_item_type(p);
@@ -1840,7 +2200,16 @@ impure fn parse_import(parser p) -> @ast.view_item {
fail;
}
-impure fn parse_use_or_import(parser p) -> @ast.view_item {
+impure fn parse_export(parser p) -> @ast.view_item {
+ auto lo = p.get_span();
+ expect(p, token.EXPORT);
+ auto id = parse_ident(p);
+ auto hi = p.get_span();
+ expect(p, token.SEMI);
+ ret @spanned(lo, hi, ast.view_item_export(id));
+}
+
+impure fn parse_view_item(parser p) -> @ast.view_item {
alt (p.peek()) {
case (token.USE) {
ret parse_use(p);
@@ -1848,23 +2217,26 @@ impure fn parse_use_or_import(parser p) -> @ast.view_item {
case (token.IMPORT) {
ret parse_import(p);
}
+ case (token.EXPORT) {
+ ret parse_export(p);
+ }
}
}
-fn is_use_or_import(token.token t) -> bool {
- if (t == token.USE) {
- ret true;
- }
- if (t == token.IMPORT) {
- ret true;
+fn is_view_item(token.token t) -> bool {
+ alt (t) {
+ case (token.USE) { ret true; }
+ case (token.IMPORT) { ret true; }
+ case (token.EXPORT) { ret true; }
+ case (_) {}
}
ret false;
}
impure fn parse_view(parser p, ast.mod_index index) -> vec[@ast.view_item] {
let vec[@ast.view_item] items = vec();
- while (is_use_or_import(p.peek())) {
- auto item = parse_use_or_import(p);
+ while (is_view_item(p.peek())) {
+ auto item = parse_view_item(p);
items += vec(item);
ast.index_view_item(index, item);
@@ -1872,6 +2244,19 @@ impure fn parse_view(parser p, ast.mod_index index) -> vec[@ast.view_item] {
ret items;
}
+impure fn parse_native_view(parser p, ast.native_mod_index index)
+ -> vec[@ast.view_item] {
+ let vec[@ast.view_item] items = vec();
+ while (is_view_item(p.peek())) {
+ auto item = parse_view_item(p);
+ items += vec(item);
+
+ ast.index_native_view_item(index, item);
+ }
+ ret items;
+}
+
+
impure fn parse_crate_from_source_file(parser p) -> @ast.crate {
auto lo = p.get_span();
auto hi = lo;
@@ -1885,33 +2270,46 @@ impure fn parse_crate_from_source_file(parser p) -> @ast.crate {
//
// Each directive imperatively extends its environment with 0 or more items.
-impure fn parse_crate_directive(str prefix, parser p,
- &mutable vec[@ast.item] items,
- hashmap[ast.ident,ast.mod_index_entry] index)
+impure fn parse_crate_directive(parser p) -> ast.crate_directive
{
auto lo = p.get_span();
auto hi = lo;
alt (p.peek()) {
- case (token.CONST) {
- auto c = parse_item_const(p);
- ast.index_item(index, c);
- append[@ast.item](items, c);
- }
+ case (token.AUTH) {
+ // FIXME: currently dropping auth clauses on the floor,
+ // as there is no effect-checking pass.
+ p.bump();
+ auto n = parse_path(p, GREEDY);
+ expect(p, token.EQ);
+ auto e = parse_effect(p);
+ hi = p.get_span();
+ expect(p, token.SEMI);
+ ret spanned(lo, hi, ast.cdir_auth(n, e));
+ }
+
+ case (token.META) {
+ // FIXME: currently dropping meta clauses on the floor,
+ // as there is no crate metadata system
+ p.bump();
+ auto mis = parse_meta(p);
+ hi = p.get_span();
+ expect(p, token.SEMI);
+ ret spanned(lo, hi, ast.cdir_meta(mis));
+ }
+
case (token.MOD) {
p.bump();
auto id = parse_ident(p);
- auto file_path = id;
+ auto file_opt = none[filename];
alt (p.peek()) {
case (token.EQ) {
p.bump();
// FIXME: turn this into parse+eval expr
- file_path = parse_str_lit(p);
+ file_opt = some[filename](parse_str_lit_or_env_ident(p));
}
case (_) {}
}
- // dir-qualify file path.
- auto full_path = prefix + std.os.path_sep() + file_path;
alt (p.peek()) {
@@ -1920,29 +2318,18 @@ impure fn parse_crate_directive(str prefix, parser p,
case (token.SEMI) {
hi = p.get_span();
p.bump();
- if (!_str.ends_with(full_path, ".rs")) {
- full_path += ".rs";
- }
- auto p0 = new_parser(p.get_session(), 0, full_path);
- auto m0 = parse_mod_items(p0, token.EOF);
- auto im = ast.item_mod(id, m0, p.next_def_id());
- auto i = @spanned(lo, hi, im);
- ast.index_item(index, i);
- append[@ast.item](items, i);
+ ret spanned(lo, hi, ast.cdir_src_mod(id, file_opt));
}
// mod x = "foo_dir" { ...directives... }
case (token.LBRACE) {
p.bump();
- auto m0 = parse_crate_directives(full_path, p,
- token.RBRACE);
+ auto cdirs = parse_crate_directives(p, token.RBRACE);
hi = p.get_span();
expect(p, token.RBRACE);
- auto im = ast.item_mod(id, m0, p.next_def_id());
- auto i = @spanned(lo, hi, im);
- ast.index_item(index, i);
- append[@ast.item](items, i);
+ ret spanned(lo, hi,
+ ast.cdir_dir_mod(id, file_opt, cdirs));
}
case (?t) {
@@ -1950,28 +2337,65 @@ impure fn parse_crate_directive(str prefix, parser p,
}
}
}
+
+ case (token.LET) {
+ p.bump();
+ expect(p, token.LPAREN);
+ auto id = parse_ident(p);
+ expect(p, token.EQ);
+ auto x = parse_expr(p);
+ expect(p, token.RPAREN);
+ expect(p, token.LBRACE);
+ auto v = parse_crate_directives(p, token.RBRACE);
+ hi = p.get_span();
+ expect(p, token.RBRACE);
+ ret spanned(lo, hi, ast.cdir_let(id, x, v));
+ }
+
+ case (token.USE) {
+ auto vi = parse_view_item(p);
+ ret spanned(lo, vi.span, ast.cdir_view_item(vi));
+ }
+
+ case (token.IMPORT) {
+ auto vi = parse_view_item(p);
+ ret spanned(lo, vi.span, ast.cdir_view_item(vi));
+ }
+
+ case (token.EXPORT) {
+ auto vi = parse_view_item(p);
+ ret spanned(lo, vi.span, ast.cdir_view_item(vi));
+ }
+
+ case (_) {
+ auto x = parse_expr(p);
+ ret spanned(lo, x.span, ast.cdir_expr(x));
+ }
}
+ fail;
}
-impure fn parse_crate_directives(str prefix, parser p,
- token.token term) -> ast._mod {
- auto index = new_str_hash[ast.mod_index_entry]();
- auto view_items = parse_view(p, index);
- let vec[@ast.item] items = vec();
+impure fn parse_crate_directives(parser p, token.token term)
+ -> vec[@ast.crate_directive] {
+
+ let vec[@ast.crate_directive] cdirs = vec();
while (p.peek() != term) {
- parse_crate_directive(prefix, p, items, index);
+ auto cdir = @parse_crate_directive(p);
+ append[@ast.crate_directive](cdirs, cdir);
}
- ret rec(view_items=view_items, items=items, index=index);
+ ret cdirs;
}
impure fn parse_crate_from_crate_file(parser p) -> @ast.crate {
auto lo = p.get_span();
auto hi = lo;
auto prefix = std.path.dirname(lo.filename);
- auto m = parse_crate_directives(prefix, p, token.EOF);
+ auto cdirs = parse_crate_directives(p, token.EOF);
+ auto m = eval.eval_crate_directives_to_mod(p, p.get_env(),
+ cdirs, prefix);
hi = p.get_span();
expect(p, token.EOF);
ret @spanned(lo, hi, rec(module=m));
diff --git a/src/comp/front/pretty.rs b/src/comp/front/pretty.rs
new file mode 100644
index 00000000..2fd58126
--- /dev/null
+++ b/src/comp/front/pretty.rs
@@ -0,0 +1,87 @@
+import std._int;
+import std._str;
+import std._uint;
+import std._vec;
+
+export print_expr;
+
+// FIXME this is superseded by ../pretty/pprust.rs. can it be dropped?
+
+fn unknown() -> str {
+ ret "<unknown ast node>";
+}
+
+fn print_expr(@ast.expr expr) -> str {
+ alt (expr.node) {
+ case (ast.expr_lit(?lit, _)) {
+ ret print_lit(lit);
+ }
+ case (ast.expr_binary(?op, ?lhs, ?rhs, _)) {
+ ret print_expr_binary(op, lhs, rhs);
+ }
+ case (ast.expr_call(?path, ?args, _)) {
+ ret print_expr_call(path, args);
+ }
+ case (ast.expr_path(?path, _, _)) {
+ ret print_path(path);
+ }
+ case (_) {
+ ret unknown();
+ }
+ }
+}
+
+fn print_lit(@ast.lit lit) -> str {
+ alt (lit.node) {
+ case (ast.lit_str(?s)) {
+ ret "\"" + s + "\"";
+ }
+ case (ast.lit_int(?i)) {
+ ret _int.to_str(i, 10u);
+ }
+ case (ast.lit_uint(?u)) {
+ ret _uint.to_str(u, 10u);
+ }
+ case (_) {
+ ret unknown();
+ }
+ }
+}
+
+fn print_expr_binary(ast.binop op, @ast.expr lhs, @ast.expr rhs) -> str {
+ alt (op) {
+ case (ast.add) {
+ auto l = print_expr(lhs);
+ auto r = print_expr(rhs);
+ ret l + " + " + r;
+ }
+ }
+}
+
+fn print_expr_call(@ast.expr path_expr, vec[@ast.expr] args) -> str {
+ auto s = print_expr(path_expr);
+
+ s += "(";
+ fn print_expr_ref(&@ast.expr e) -> str { ret print_expr(e); }
+ auto mapfn = print_expr_ref;
+ auto argstrs = _vec.map[@ast.expr, str](mapfn, args);
+ s += _str.connect(argstrs, ", ");
+ s += ")";
+
+ ret s;
+}
+
+fn print_path(ast.path path) -> str {
+ ret _str.connect(path.node.idents, ".");
+}
+
+//
+// Local Variables:
+// mode: rust
+// fill-column: 78;
+// indent-tabs-mode: nil
+// c-basic-offset: 4
+// buffer-file-coding-system: utf-8-unix
+// compile-command: "make -k -C ../.. 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
+// End:
+//