commit 05681f7b44582e8d7a3f2fd88f106a51d04bcf7c
parent 957cdf0e7229eb3b3e968d9e7260ac85659ed4dd
Author: Drew DeVault <sir@cmpwn.com>
Date: Mon, 22 Feb 2021 14:51:06 -0500
hare::parse: parse import list
Diffstat:
3 files changed, 92 insertions(+), 0 deletions(-)
diff --git a/hare/parse/+test.ha b/hare/parse/+test.ha
@@ -2,6 +2,7 @@ use bufio;
use hare::ast;
use hare::lex;
use strings;
+use fmt;
@test fn ident() void = {
{
@@ -73,3 +74,40 @@ use strings;
assert(tok.0 as lex::btoken == hare::lex::btoken::SEMICOLON);
};
};
+
+@test fn imports() void = {
+ {
+ const in = "use foo;";
+ let buf = bufio::fixed(strings::to_utf8(in));
+ let lexer = lex::lexer_init(buf, "<test>");
+ let mods = imports(&lexer) as []ast::import;
+ assert(len(mods) == 1);
+ assert(mods[0] is ast::import_module);
+ let mod = mods[0] as ast::import_module;
+ assert(mod.name == "foo");
+ assert(lex::lex(&lexer) is io::EOF);
+ };
+
+ {
+ const in =
+ "use foo;\n"
+ "use bar;\n"
+ "use baz::bat;\n\n"
+ "export fn main() void = void;";
+ let buf = bufio::fixed(strings::to_utf8(in));
+ let lexer = lex::lexer_init(buf, "<test>");
+ let mods = imports(&lexer) as []ast::import;
+
+ assert(len(mods) == 3);
+ let expected = ["foo", "bar", "baz"];
+
+ for (let i = 0z; i < len(mods); i += 1) {
+ assert(mods[i] is ast::import_module);
+ let mod = mods[i] as ast::import_module;
+ assert(mod.name == expected[i]);
+ };
+
+ let tok = lex::lex(&lexer) as (lex::token, lex::location);
+ assert(tok.0 as lex::btoken == hare::lex::btoken::EXPORT);
+ };
+};
diff --git a/hare/parse/parse.ha b/hare/parse/parse.ha
@@ -3,6 +3,7 @@ use hare::lex;
// Parses a single identifier, i.e. foo::bar::baz
export fn ident(lexer: *lex::lexer) (ast::ident | error) = {
+ // TODO: This is actually wrong, it builds the idents backwards. Whoops
let ident = ast::ident { ... }, cur = &ident;
for (true) {
cur.name = match (want_name(lexer)) {
@@ -22,3 +23,29 @@ export fn ident(lexer: *lex::lexer) (ast::ident | error) = {
};
return ident;
};
+
+// Parses the import list for a sub-unit
+export fn imports(lexer: *lex::lexer) ([]ast::import | error) = {
+ let imports: []ast::import = [];
+ for (true) {
+ match (try_btoken(lexer, hare::lex::btoken::USE)) {
+ err: error => return err,
+ void => break,
+ * => void,
+ };
+
+ let name = match (ident(lexer)) {
+ err: error => return err,
+ ident: ast::ident => ident,
+ };
+
+ // TODO: Parse alternate import syntaxes
+ match (want_btoken(lexer, hare::lex::btoken::SEMICOLON)) {
+ err: error => return err,
+ lex::btoken => void,
+ };
+
+ append(imports, name: ast::import_module);
+ };
+ return imports;
+};
diff --git a/hare/parse/util.ha b/hare/parse/util.ha
@@ -16,6 +16,33 @@ fn want_name(lexer: *lex::lexer) (lex::name | error) = {
};
};
+// Requires the next token to be a name. Returns that name, or an error.
+fn want_btoken(
+ lexer: *lex::lexer,
+ want: lex::btoken...
+) (lex::btoken | error) = {
+ match (lex::lex(lexer)) {
+ io::EOF => return syntaxerr(mkloc(lexer),
+ "Expected name, found EOF"),
+ err: lex::error => return err,
+ t: (lex::token, lex::location) => match (t.0) {
+ b: lex::btoken => {
+ for (let i = 0z; i < len(want); i += 1) {
+ if (b == want[i]) {
+ return b;
+ };
+ };
+ // TODO: Use fmt+lex::tokstr here:
+ return syntaxerr(mkloc(lexer),
+ "Expected <something>, got <something else>");
+ },
+ // TODO: Use fmt+lex::tokstr here:
+ * => return syntaxerr(mkloc(lexer),
+ "Expected <something>, got <something else>"),
+ },
+ };
+};
+
// Looks for a matching btoken from the lexer, and if not present, unlexes the
// token and returns void. If found, the token is consumed from the lexer and is
// returned.