commit 74138f7c068c675feee42967151085089c41c62a
parent aff104e64c42129c2a7eef3680840049c775618e
Author: Drew DeVault <sir@cmpwn.com>
Date: Wed, 11 May 2022 14:34:25 +0200
encoding::json: add iter(object)
Signed-off-by: Drew DeVault <sir@cmpwn.com>
Diffstat:
4 files changed, 60 insertions(+), 25 deletions(-)
diff --git a/encoding/json/+test/lexer.ha b/encoding/json/+test/lexer.ha
@@ -21,16 +21,16 @@ use io;
for (let i = 0z; i < len(cases); i += 1) {
const src = strings::toutf8(cases[i].0);
const src = bufio::fixed(src, io::mode::READ);
- const lexer = lex(&src);
+ const lexer = newlexer(&src);
defer close(&lexer);
for (let j = 0z; j < len(cases[i].1); j += 1) {
const want = cases[i].1[j];
- const have = next(&lexer)! as token;
+ const have = lex(&lexer)! as token;
assert(tokeq(want, have));
};
- assert(next(&lexer) is io::EOF);
+ assert(lex(&lexer) is io::EOF);
};
};
diff --git a/encoding/json/lex.ha b/encoding/json/lex.ha
@@ -16,9 +16,9 @@ export type lexer = struct {
un: (token | void),
};
-// Creates a new JSON lexer. The caller can obtain tokens with [[next]] and
+// Creates a new JSON lexer. The caller may obtain tokens with [[lex]] and
// should pass the result to [[close]] when they're done with it.
-export fn lex(src: io::handle) lexer = {
+export fn newlexer(src: io::handle) lexer = {
let buf: []u8 = alloc([0...], os::BUFSIZ);
return lexer {
src = bufio::buffered(src, buf, []),
@@ -36,7 +36,7 @@ export fn close(lex: *lexer) void = {
// Returns the next token from a JSON lexer. The return value is borrowed from
// the lexer and will be overwritten on subsequent calls.
-export fn next(lex: *lexer) (token | io::EOF | error) = {
+export fn lex(lex: *lexer) (token | io::EOF | error) = {
match (lex.un) {
case void => void;
case let tok: token =>
@@ -91,7 +91,7 @@ export fn next(lex: *lexer) (token | io::EOF | error) = {
};
};
-// "Unlexes" a token from the lexer, such that the next call to [[next]] will
+// "Unlexes" a token from the lexer, such that the next call to [[lex]] will
// return that token again. Only one token can be unlexed at a time, otherwise
// the program will abort.
fn unlex(lex: *lexer, tok: token) void = {
diff --git a/encoding/json/load.ha b/encoding/json/load.ha
@@ -10,13 +10,13 @@ use strings;
// [[io::limitreader]] or similar, or use the JSON lexer ([[lex]]) directly into
// your program if dealing with potentially malicious inputs.
export fn load(src: io::handle) (value | error) = {
- const lex = lex(src);
+ const lex = newlexer(src);
defer close(&lex);
return _load(&lex);
};
-fn _load(lex: *lexer) (value | error) = {
- const tok = mustscan(lex)?;
+fn _load(lexer: *lexer) (value | error) = {
+ const tok = mustscan(lexer)?;
match (tok) {
case _null =>
return _null;
@@ -27,28 +27,28 @@ fn _load(lex: *lexer) (value | error) = {
case let s: str =>
return s;
case arraystart =>
- return _load_array(lex);
+ return _load_array(lexer);
case objstart =>
- return _load_obj(lex);
+ return _load_obj(lexer);
case (arrayend | objend | colon | comma) =>
return invalid;
};
};
-fn _load_array(lex: *lexer) (value | error) = {
+fn _load_array(lexer: *lexer) (value | error) = {
let array: []value = [];
for (true) {
- let tok = mustscan(lex)?;
+ let tok = mustscan(lexer)?;
match (tok) {
case arrayend =>
break;
case =>
- unlex(lex, tok);
+ unlex(lexer, tok);
};
- append(array, _load(lex)?);
+ append(array, _load(lexer)?);
- tok = mustscan(lex)?;
+ tok = mustscan(lexer)?;
match (tok) {
case comma => void;
case arrayend => break;
@@ -58,10 +58,10 @@ fn _load_array(lex: *lexer) (value | error) = {
return array;
};
-fn _load_obj(lex: *lexer) (value | error) = {
+fn _load_obj(lexer: *lexer) (value | error) = {
let obj = newobject();
for (true) {
- let tok = mustscan(lex)?;
+ let tok = mustscan(lexer)?;
const key = match (tok) {
case let s: str =>
yield strings::dup(s);
@@ -70,15 +70,15 @@ fn _load_obj(lex: *lexer) (value | error) = {
};
defer free(key);
- tok = mustscan(lex)?;
+ tok = mustscan(lexer)?;
if (!(tok is colon)) {
return invalid;
};
- const val = _load(lex)?;
+ const val = _load(lexer)?;
set(&obj, key, val);
- tok = mustscan(lex)?;
+ tok = mustscan(lexer)?;
match (tok) {
case comma => void;
case objend => break;
@@ -89,8 +89,8 @@ fn _load_obj(lex: *lexer) (value | error) = {
return obj;
};
-fn mustscan(lex: *lexer) (token | error) = {
- match (next(lex)?) {
+fn mustscan(lexer: *lexer) (token | error) = {
+ match (lex(lexer)?) {
case io::EOF =>
return invalid;
case let tok: token =>
diff --git a/encoding/json/value.ha b/encoding/json/value.ha
@@ -59,6 +59,30 @@ export fn del(obj: *object, key: const str) void = {
};
};
+export type iterator = struct {
+ obj: *object,
+ i: size,
+ j: size,
+};
+
+// Creates an iterator that enumerates over the key/value pairs in an
+// [[object]].
+export fn iter(obj: *object) iterator = {
+ return iterator { obj = obj, ... };
+};
+
+// Returns the next key/value pair from this iterator, or void if none remain.
+export fn next(iter: *iterator) ((const str, const *value) | void) = {
+ for (iter.i < len(iter.obj.buckets); iter.i += 1) {
+ const bucket = &iter.obj.buckets[iter.i];
+ for (iter.j < len(bucket); iter.j += 1) {
+ const key = bucket[iter.j].0;
+ const val = &bucket[iter.j].1;
+ return (key, val);
+ };
+ };
+};
+
// Duplicates a JSON value. The caller must pass the return value to [[finish]]
// to free associated resources when they're done using it.
export fn dup(val: value) value = {
@@ -72,7 +96,18 @@ export fn dup(val: value) value = {
};
return new;
case let o: object =>
- abort(); // TODO: Implement me after iter
+ let new = newobject();
+ const i = iter(&o);
+ for (true) {
+ const pair = match (next(&i)) {
+ case void =>
+ break;
+ case let pair: (const str, const *value) =>
+ yield pair;
+ };
+ set(&new, pair.0, *pair.1);
+ };
+ return new;
case =>
return val;
};