hare

The Hare programming language
git clone https://git.torresjrjr.com/hare.git
Log | Files | Refs | README | LICENSE

commit e8655af4fc9dfbe8407e7c5477d8ec3a286a006a
parent ec8a9995ecfc171720ec8f5e742e1704535712b3
Author: Drew DeVault <sir@cmpwn.com>
Date:   Wed, 21 Apr 2021 11:47:49 -0400

bytes: docs improvements

Diffstat:
Abytes/README | 2++
Mbytes/copy.ha | 2+-
Mbytes/index.ha | 4++--
Mbytes/tokenize.ha | 7+++----
4 files changed, 8 insertions(+), 7 deletions(-)

diff --git a/bytes/README b/bytes/README @@ -0,0 +1,2 @@ +The bytes modules provides support functions for working with slices of bytes +([]u8). diff --git a/bytes/copy.ha b/bytes/copy.ha @@ -1,4 +1,4 @@ -// Copies bytes from src to dest. dest must have the same length as src. +// Copies bytes from "src" to "dest". "dest" must have the same length as "src". export fn copy(dest: []u8, src: []u8) void = { assert(len(dest) == len(src), "Destination slice must have same length as source slice"); diff --git a/bytes/index.ha b/bytes/index.ha @@ -1,4 +1,4 @@ -// Returns the offset of the first instance of 'needle' in a 'haystack' of +// Returns the offset of the first instance of "needle" in a "haystack" of // bytes, or void if it is not found. export fn index(haystack: []u8, needle: (u8 | []u8)) (size | void) = { return match (needle) { @@ -22,7 +22,7 @@ fn index_slice(haystack: []u8, b: []u8) (size | void) = switch (len(b)) { * => index_tw(haystack, b), }; -// Returns the offset of the last instance of 'needle' in a 'haystack' of +// Returns the offset of the last instance of "needle" in a "haystack" of // bytes, or void if it is not found. export fn rindex(haystack: []u8, needle: (u8 | []u8)) (size | void) = { return match (needle) { diff --git a/bytes/tokenize.ha b/bytes/tokenize.ha @@ -1,12 +1,11 @@ use types; -// The state for a tokenizer. export type tokenizer = struct { s: []u8, d: []u8, p: size }; -// Returns a tokenizer which yields sub-slices tokenized by a delimiter. -// Caller should ensure delim is not an empty slice +// Returns a tokenizer which yields sub-slices tokenized by a delimiter. The +// caller must ensure "delim" is not an empty slice. export fn tokenize(s: []u8, delim: []u8) tokenizer = { - assert(len(delim) > 0); + assert(len(delim) > 0, "bytes::tokenize called with empty slice"); if (len(s) == 0) { delim = []; };