diff --git a/src/errors.fnk b/src/errors.fnk index 6f18fd0..a79c2ed 100644 --- a/src/errors.fnk +++ b/src/errors.fnk @@ -1,10 +1,16 @@ {highlight_code_loc} = import '@fink/snippet' +token_error = fn msg, token, {tokenizer: {code, filename}}: + {loc} = token + {start: {line, column}} = loc -token_error = fn msg, token, {tokenizer: {code}}: - snippet = highlight_code_loc(code, token.loc) - # TODO: remove first line of err.stack - new Error(`${msg}\n${snippet}`) + snippet = ` + ${filename}:${line}:${column} + ${highlight_code_loc(code, loc)} + ${msg} + ` + # TODO: remove first line of err.stack + new SyntaxError(snippet) diff --git a/src/index.fnk b/src/index.fnk index 6494d25..7db3e9a 100644 --- a/src/index.fnk +++ b/src/index.fnk @@ -42,7 +42,7 @@ assert_token = fn token, expected, ctx: match true: token.value != expected: throw token_error( - `Expected ${inspect(expected)} but found ${inspect(token.value)}:`, + `Expected ${inspect(expected)} but found ${inspect(token.value)}.`, token, ctx ) @@ -60,7 +60,7 @@ assert_curr = fn ctx, expected: assert_not_end = fn ctx: match true: next_is_end(ctx): - throw token_error(`Unexpected end of code:`, ctx.curr_token, ctx) + throw token_error(`Unexpected end of code.`, ctx.curr_token, ctx) advance = fn ctx: @@ -143,7 +143,7 @@ expression = fn ctx, rbp: [left, next_ctx] -init_parser = fn {code}: +init_parser = fn {code, filename}: { curr_token: null, next_token: { @@ -154,7 +154,7 @@ init_parser = fn {code}: }, ignored_tokens: [], - tokenizer: init_tokenizer(code), + tokenizer: init_tokenizer(code, filename), symbols: init_symbols() } diff --git a/src/index.test.fnk b/src/index.test.fnk index f47ac4c..382076f 100644 --- a/src/index.test.fnk +++ b/src/index.test.fnk @@ -51,7 +51,7 @@ init_test_lang = fn ctx: parse = fn code: - ctx = pipe {code}: + ctx = pipe {code, filename: 'test.fnk'}: init_parser init_test_lang start_parser @@ -121,9 +121,12 @@ describe:: 'curr token tests, values, and assertions', fn: (fn: assert_curr(ctx, '123')) not_to_throw anything (fn: assert_curr(ctx, 'foobar')) to_throw ` - Expected 'foobar' but found '123': + test.fnk:1:9 1| foobar = 123 - ^` + ^ + + Expected 'foobar' but found '123'. + ` describe:: 'next token tests and assertions', fn: @@ -154,9 +157,12 @@ describe:: 'next token tests and assertions', fn: (fn: assert_next(ctx, end_token)) not_to_throw anything (fn: assert_next(ctx, '123')) to_throw ` - Expected '123' but found Symbol(end): + test.fnk:1:12 1| foobar = 123 - ^` + ^ + + Expected '123' but found Symbol(end). + ` describe:: 'advance to next token or throw', fn: @@ -173,52 +179,59 @@ describe:: 'advance to next token or throw', fn: [, ctx] = parse(`foobar = 123; shrub = ni`) (fn: assert_advance(ctx, '==')) to_throw ` - Expected '==' but found ';': + test.fnk:1:12 1| foobar = 123; shrub = ni - ^` + ^ + + Expected '==' but found ';'. + ` describe:: 'parse exceptions', fn: it:: 'throws unexpected end of code', fn: - test_parse = fn: parse(` - foobar = - `) + test_parse = fn: parse(`foobar =`) test_parse to_throw ` - Unexpected end of code: + test.fnk:1:7 1| foobar = - ^` + ^ + + Unexpected end of code. + ` it:: 'throws unexpected infix', fn: - test_parse = fn: parse(` - = 123`) + test_parse = fn: parse(`= 123`) test_parse to_throw ` - Cannot use '123' as an infix operator: + test.fnk:1:2 1| = 123 - ^` + ^ + + Cannot use '123' as an infix operator. + ` it:: 'throws non infix operator', fn: - test_parse = fn: parse(` - foo = spam ni - `) + test_parse = fn: parse(`foo = spam ni`) test_parse to_throw ` - Cannot use 'ni' as an infix operator: + test.fnk:1:11 1| foo = spam ni ^ - 2| ` + + Cannot use 'ni' as an infix operator. + ` it:: 'throws collecting text when not finding expected end', fn: - test_parse = fn: parse(` - 'foo bar` - ) + test_parse = fn: parse(`'foo bar`) test_parse to_throw ` - Unexpected end of code: + test.fnk:1:5 1| 'foo bar - ^` + ^ + + Unexpected end of code. + ` diff --git a/src/symbols.fnk b/src/symbols.fnk index 0779628..53b921f 100644 --- a/src/symbols.fnk +++ b/src/symbols.fnk @@ -104,7 +104,7 @@ led = fn ctx, left: match !led_fn: true: throw token_error( - `Cannot use ${inspect(curr_token.value)} as an infix operator:`, + `Cannot use ${inspect(curr_token.value)} as an infix operator.`, curr_token, ctx ) else: led_fn(ctx, left) diff --git a/src/tokenizer.fnk b/src/tokenizer.fnk index db31cc7..b5a6cbe 100644 --- a/src/tokenizer.fnk +++ b/src/tokenizer.fnk @@ -69,9 +69,10 @@ get_next_token = fn ctx: [foo, next_ctx] -init_tokenizer = fn code: +init_tokenizer = fn code, filename: { code, + filename, partials: {}, separators: {}, partial_token: { diff --git a/src/tokenizer.test.fnk b/src/tokenizer.test.fnk index f09b723..db74db5 100644 --- a/src/tokenizer.test.fnk +++ b/src/tokenizer.test.fnk @@ -6,7 +6,7 @@ describe:: 'tokenizer', fn: tokenize = fn code: - ctx = pipe init_tokenizer(code): + ctx = pipe init_tokenizer(code, 'test.fnk'): add_token(' ') add_token('\n') add_token('=')