hare

The Hare programming language
git clone https://git.torresjrjr.com/hare.git
Log | Files | Refs | README | LICENSE

commit f5c13ad6db272a8b4976ba9966b30cf2d283f347
parent 7dc94d38d40651c471932a186a3a2a4eeb1f6227
Author: Eyal Sawady <ecs@d2evs.net>
Date:   Thu, 25 Feb 2021 08:34:26 -0500

hare::parse: implement alternative import syntaxes

Diffstat:
Mhare/parse/+test.ha | 65+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Mhare/parse/parse.ha | 69++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++---------
Mhare/parse/util.ha | 21+++++++++++++++++++++
3 files changed, 146 insertions(+), 9 deletions(-)

diff --git a/hare/parse/+test.ha b/hare/parse/+test.ha @@ -99,4 +99,69 @@ use strings; let tok = lex::lex(&lexer) as (lex::token, lex::location); assert(tok.0 as lex::btoken == lex::btoken::EXPORT); }; + + { + const in = + "use foo = bar;\n" + "use baz = bat;\n" + "use qux = quux::corge;\n" + "export fn main() void = void;"; + let buf = bufio::fixed(strings::to_utf8(in), mode::READ); + let lexer = lex::lexer_init(buf, "<test>"); + let mods = imports(&lexer) as []ast::import; + defer for (let i = 0z; i < len(mods); i += 1) { + ast::import_free(mods[i]); + }; + + assert(len(mods) == 3); + let expected: [_](str, []str) = [ + ("foo", ["bar"]), + ("baz", ["bat"]), + ("qux", ["corge", "quux"]) + ]; + + for (let i = 0z; i < len(mods); i += 1) { + assert(mods[i] is ast::import_alias); + let mod = mods[i] as ast::import_alias; + assert(mod.alias == expected[i].0); + assert(len(mod.ident) == len(expected[i].1)); + for (let j = 0z; j < len(expected[i].1); j += 1z) { + assert(mod.ident[j] == expected[i].1[j]); + }; + }; + }; + + { + const in = + "use foo::{bar};\n" + "use baz::{bat, qux};\n" + "use quux::corge::{grault, garply};\n" + "export fn main() void = void;"; + let buf = bufio::fixed(strings::to_utf8(in), mode::READ); + let lexer = lex::lexer_init(buf, "<test>"); + let mods = imports(&lexer) as []ast::import; + defer for (let i = 0z; i < len(mods); i += 1) { + ast::import_free(mods[i]); + }; + + assert(len(mods) == 3); + let expected: [_]([]str, []str) = [ + (["foo"], ["bar"]), + (["baz"], ["bat", "qux"]), + (["corge", "quux"], ["grault", "garply"]) + ]; + + for (let i = 0z; i < len(mods); i += 1) { + assert(mods[i] is ast::import_objects); + let mod = mods[i] as ast::import_objects; + assert(len(mod.objects) == len(expected[i].1)); + for (let j = 0z; j < len(expected[i].0); j += 1z) { + assert(mod.objects[j] == expected[i].1[j]); + }; + assert(len(mod.ident) == len(expected[i].0)); + for (let j = 0z; j < len(expected[i].0); j += 1z) { + assert(mod.ident[j] == expected[i].0[j]); + }; + }; + }; }; diff --git a/hare/parse/parse.ha b/hare/parse/parse.ha @@ -3,11 +3,16 @@ use hare::lex; use hare::lex::{btoken}; use slice; -// Parses a single identifier, i.e. foo::bar::baz -export fn ident(lexer: *lex::lexer) (ast::ident | error) = { +fn ident_trailing(lexer: *lex::lexer) ((ast::ident, bool) | error) = { let ident: []str = []; for (true) { - let name = want_name(lexer)?; + let name = match (try_name(lexer)?) { + n: lex::name => n, + void => { + slice::reverse(ident, size(ast::ident)); + return (ident: ast::ident, true); + }, + }; append(ident, name: str); match (try_btoken(lexer, btoken::DOUBLE_COLON)?) { void => break, @@ -15,7 +20,27 @@ export fn ident(lexer: *lex::lexer) (ast::ident | error) = { }; }; slice::reverse(ident, size(ast::ident)); - return ident; + return (ident: ast::ident, false); +}; + +// Parses a single identifier, i.e. foo::bar::baz +export fn ident(lexer: *lex::lexer) (ast::ident | error) = { + let ident = ident_trailing(lexer)?; + synassert(mkloc(lexer), !ident.1, "Unexpected trailing :: in ident"); + return ident.0; +}; + +fn parse_name_list(lexer: *lex::lexer) ([]str | error) = { + let names: []str = []; + for (true) { + append(names, want_name(lexer)?: str); + switch (want_btoken(lexer, btoken::COMMA, btoken::RBRACE)?) { + btoken::COMMA => void, + btoken::RBRACE => return names, + * => abort(), // Unreachable + }; + }; + abort(); }; // Parses the import list for a sub-unit @@ -27,12 +52,38 @@ export fn imports(lexer: *lex::lexer) ([]ast::import | error) = { * => void, }; - let name = ident(lexer)?; - - // TODO: Parse alternate import syntaxes - want_btoken(lexer, btoken::SEMICOLON)?; + let name = ident_trailing(lexer)?; - append(imports, name: ast::import_module); + switch (want_btoken(lexer, btoken::SEMICOLON, btoken::LBRACE, + btoken::EQUAL)?) { + btoken::SEMICOLON => { + synassert(mkloc(lexer), !name.1, + "Unexpected trailing :: in ident")?; + append(imports, name.0: ast::import_module); + }, + btoken::LBRACE => { + synassert(mkloc(lexer), name.1, + "Expected trailing :: in ident")?; + let objects = parse_name_list(lexer)?; + append(imports, ast::import_objects { + ident = name.0, + objects = objects, + }); + want_btoken(lexer, btoken::SEMICOLON)?; + }, + btoken::EQUAL => { + synassert(mkloc(lexer), + len(name.0) == 1 && !name.1, + "Expected name, not ident")?; + let ident = ident(lexer)?; + append(imports, ast::import_alias { + ident = ident, + alias = name.0[0], + }); + want_btoken(lexer, btoken::SEMICOLON)?; + }, + * => abort(), // Unreachable + }; }; return imports; }; diff --git a/hare/parse/util.ha b/hare/parse/util.ha @@ -15,6 +15,20 @@ fn want_name(lexer: *lex::lexer) (lex::name | error) = { }; }; +// Looks for a matching name from the lexer, and if not present, unlexes the +// token and returns void. If found, the token is consumed from the lexer and is +// returned. +fn try_name(lexer: *lex::lexer) (lex::name | error | void) = { + let tuple = match (lex::lex(lexer)?) { + io::EOF => return, + t: (lex::token, lex::location) => match (t.0) { + n: lex::name => return n, + * => t, + }, + }; + lex::unlex(lexer, tuple); +}; + // Requires the next token to be a name. Returns that name, or an error. fn want_btoken( lexer: *lex::lexer, @@ -66,3 +80,10 @@ fn try_btoken( }; lex::unlex(lexer, tuple); }; + +// Returns a syntax error if cond is false and void otherwise +fn synassert(loc: lex::location, cond: bool, msg: str) (void | error) = { + if (!cond) { + return syntaxerr(loc, msg); + }; +};