Compare commits
No commits in common. "1c5e487e6125766a6699d9019f77fb2f4195823d" and "c36f1a348c937432b5bc0aeed6e205f25fab6de7" have entirely different histories.
1c5e487e61
...
c36f1a348c
7 changed files with 66 additions and 129 deletions
|
@ -1,4 +1,3 @@
|
|||
# Akhamoth
|
||||
|
||||
An attempt at making something very much like Elixir, but with syntax I prefer
|
||||
and static typing as a mandatory part of the language from the beginning.
|
||||
statically typed functional language
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
# Syntax Reference
|
||||
|
||||
In Akhamoth, everything is an expression.
|
||||
|
||||
## Literals
|
||||
|
||||
### Identifiers
|
||||
|
||||
Identifiers are composed of ASCII letters, digits, and underscores and may not
|
||||
begin with a digit. The preferred naming convention is `UpperCamelCase` for type
|
||||
names, `snake_case` for variables, functions, and modules, and
|
||||
`SCREAMING_SNAKE_CASE` for global constants. Identifiers, beginning with an
|
||||
underscore are typically intentionally unused variables and will silence
|
||||
warnings about this from the compiler.
|
||||
|
||||
> #### Unicode Identifiers {: .info}
|
||||
>
|
||||
> For version 1.0 of Akhamoth, the goal is only to support ASCII identifiers in
|
||||
> order to make parsing easier. In the future however, it would be good to look
|
||||
> at implementing [UAX #31](https://www.unicode.org/reports/tr31/) unicode
|
||||
> identifiers.
|
||||
|
||||
### Atoms
|
||||
|
||||
Atoms have the exact same syntax as identifiers, but are prefixed with `:`, e.g.
|
||||
`:akhamoth`.
|
||||
|
||||
### Strings
|
||||
|
||||
A String literal consists of `"` followed by any number of other characters and
|
||||
then another `"`. Currently there is no support for character escapes of any
|
||||
kind. This will be rectified before version 1.0.
|
||||
|
||||
### Integers
|
||||
|
||||
Integer literals must begin with a digit, followed by any number of digits and
|
||||
underscores. Underscores are intended to be used for grouping digits in long
|
||||
numbers, e.g. `1_000_000_000`.
|
||||
|
||||
## Expressions
|
||||
|
||||
### Operators
|
||||
|
||||
The following is a list of all operators in Akhamoth, ordered from highest
|
||||
precedence to lowest, along with their associativity:
|
||||
|
||||
Operator | Associativity
|
||||
-------- | -------------
|
||||
`.` | left
|
||||
`-` | unary
|
||||
`*` `/` | left
|
||||
`+` `-` | left
|
||||
`\|>` | left
|
||||
`==` | requires parens
|
||||
`..` | requires parens
|
||||
`=>` | right
|
||||
`->` | right
|
||||
`=` | right
|
||||
|
||||
### Function Calls
|
||||
|
||||
## AST
|
||||
|
||||
The design of Akhamoth's AST is essentially the same as [Elixir's][1].
|
||||
|
||||
[1]: https://hexdocs.pm/elixir/main/syntax-reference.html#the-elixir-ast
|
|
@ -1,12 +1,3 @@
|
|||
{erl_opts, [debug_info]}.
|
||||
{project_plugins, [rebar3_ex_doc]}.
|
||||
{deps, []}.
|
||||
{ex_doc, [
|
||||
{extras, [
|
||||
{"README.md", #{title => <<"Overview">>}},
|
||||
"pages/syntax_reference.md"
|
||||
]},
|
||||
{main, "README.md"},
|
||||
{source_url, "https://git.wires.systems/wires/akhamoth"},
|
||||
{api_reference, false}
|
||||
]}.
|
||||
|
|
|
@ -1,64 +1,54 @@
|
|||
-module(akh_lexer).
|
||||
-moduledoc """
|
||||
This module contains functions for tokenizing Akhamoth source code.
|
||||
""".
|
||||
-include("akhamoth.hrl").
|
||||
-export([
|
||||
new/1,
|
||||
next/1
|
||||
]).
|
||||
|
||||
-export([new/1, next/1]).
|
||||
-type token_kind() ::
|
||||
binary()
|
||||
| '{'
|
||||
| '}'
|
||||
| '['
|
||||
| ']'
|
||||
| '('
|
||||
| ')'
|
||||
| ','
|
||||
| ';'
|
||||
| ':'
|
||||
| '.'
|
||||
| '->'
|
||||
| '+'.
|
||||
|
||||
-define(is_digit(C), C >= $0, C =< $9).
|
||||
-define(is_id_start(C), C >= $a, C =< $z; C >= $A, C =< $Z; C =:= $_).
|
||||
-type token() :: {token_kind(), non_neg_integer() | inserted}.
|
||||
|
||||
-doc """
|
||||
A token in the input stream
|
||||
""".
|
||||
-type token() :: any().
|
||||
-define(is_id(C), (C >= $A andalso C =< $Z); (C >= $a andalso C =< $z); C =:= $_).
|
||||
|
||||
-record(lexer, {
|
||||
source :: binary(),
|
||||
offset = 0 :: non_neg_integer()
|
||||
}).
|
||||
|
||||
%%% exports
|
||||
|
||||
-doc """
|
||||
Initializes a lexer to tokenize the given binary.
|
||||
""".
|
||||
-spec new(binary()) -> #lexer{}.
|
||||
new(Source) -> #lexer{source = Source}.
|
||||
|
||||
-doc """
|
||||
Attempts to get the next token in the input.
|
||||
""".
|
||||
-spec next(#lexer{}) -> none | {ok, token(), #lexer{}}.
|
||||
next(#lexer{source = <<C, _/bytes>>} = Lx) when ?is_id_start(C) ->
|
||||
lex_id(Lx, 1);
|
||||
next(#lexer{source = <<C, _/bytes>>} = Lx) when ?is_digit(C) ->
|
||||
lex_number(Lx, 1, C - $0);
|
||||
next(#lexer{source = <<$\n, Rest/bytes>>, offset = Offset} = Lexer) ->
|
||||
next(Lexer#lexer{source = Rest, offset = Offset + 1});
|
||||
next(#lexer{source = <<C, _/bytes>>} = Lexer) when ?is_id(C) ->
|
||||
lex_id(Lexer, 1);
|
||||
next(#lexer{source = <<>>}) ->
|
||||
none.
|
||||
|
||||
%%% local functions
|
||||
|
||||
lex_id(#lexer{source = Source, offset = Offset} = Lx, Len) when Len < byte_size(Source) ->
|
||||
-spec lex_id(#lexer{}, pos_integer()) -> {ok, token(), #lexer{}}.
|
||||
lex_id(#lexer{source = Source, offset = Offset} = Lexer, Len) when Len < byte_size(Source) ->
|
||||
C = binary:at(Source, Len),
|
||||
if
|
||||
?is_id_start(C); ?is_digit(C) ->
|
||||
lex_id(Lx, Len + 1);
|
||||
?is_id(C) ->
|
||||
lex_id(Lexer, Len + 1);
|
||||
true ->
|
||||
{Id, Rest} = split_binary(Source, Len),
|
||||
{ok, {id, Offset, Id}, Lx#lexer{source = Rest, offset = Offset + Len}}
|
||||
{ok, {Id, Offset}, Lexer#lexer{source = Rest, offset = Offset + Len}}
|
||||
end;
|
||||
lex_id(#lexer{source = Source, offset = Offset} = Lx, Len) ->
|
||||
{ok, {id, Offset, Source}, Lx#lexer{source = <<>>, offset = Offset + Len}}.
|
||||
|
||||
lex_number(#lexer{source = Source, offset = Offset} = Lx, Len, Acc) when Len < byte_size(Source) ->
|
||||
C = binary:at(Source, Len),
|
||||
if
|
||||
?is_digit(C) ->
|
||||
lex_number(Lx, Len + 1, Acc * 10 + C - $0);
|
||||
true ->
|
||||
{_, Rest} = split_binary(Source, Len),
|
||||
{ok, {number, Offset, Acc}, Lx#lexer{source = Rest, offset = Offset + Len}}
|
||||
end;
|
||||
lex_number(#lexer{offset = Offset} = Lx, Len, Acc) ->
|
||||
{ok, {number, Offset, Acc}, Lx#lexer{source = <<>>, offset = Offset + Len}}.
|
||||
lex_id(#lexer{source = Source, offset = Offset} = Lexer, Len) ->
|
||||
{ok, {Source, Offset}, Lexer#lexer{source = <<>>, offset = Offset + Len}}.
|
||||
|
|
21
src/akh_source_map.erl
Normal file
21
src/akh_source_map.erl
Normal file
|
@ -0,0 +1,21 @@
|
|||
-module(akh_source_map).
|
||||
-include("akh_source_map.hrl").
|
||||
-export([
|
||||
empty/0,
|
||||
insert/3,
|
||||
get_loc/2
|
||||
]).
|
||||
|
||||
-spec empty() -> source_map().
|
||||
empty() -> gb_trees:empty().
|
||||
|
||||
-spec insert(Offset :: non_neg_integer(), Line :: pos_integer(), source_map()) ->
|
||||
source_map().
|
||||
insert(Offset, Line, SourceMap) -> gb_trees:insert(Offset, Line, SourceMap).
|
||||
|
||||
-spec get_loc(Offset :: non_neg_integer(), source_map()) -> #loc{}.
|
||||
get_loc(Offset, SourceMap) ->
|
||||
case gb_trees:smaller(Offset, SourceMap) of
|
||||
{Start, Line} -> #loc{line = Line, col = Offset - Start - 1};
|
||||
none -> #loc{line = 0, col = Offset}
|
||||
end.
|
5
src/akh_source_map.hrl
Normal file
5
src/akh_source_map.hrl
Normal file
|
@ -0,0 +1,5 @@
|
|||
-type source_map() :: gb_trees:tree(non_neg_integer(), pos_integer()).
|
||||
|
||||
-type span() :: {non_neg_integer(), pos_integer()}.
|
||||
|
||||
-record(loc, {line :: pos_integer(), col :: non_neg_integer()}).
|
|
@ -1,17 +1,14 @@
|
|||
-module(akhamoth).
|
||||
|
||||
-export([compile_file/1, compile_binary/1, collect/2]).
|
||||
-export([compile/1, collect/2]).
|
||||
|
||||
compile_file(Path) ->
|
||||
{ok, Source} = file:read_file(Path),
|
||||
compile_binary(Source).
|
||||
compile(Path) ->
|
||||
{ok, Src} = file:read_file(Path),
|
||||
Lexer = akh_lexer:new(Src),
|
||||
collect(Lexer, []).
|
||||
|
||||
compile_binary(Source) ->
|
||||
Lx = akh_lexer:new(Source),
|
||||
collect(Lx, []).
|
||||
|
||||
collect(Lx, Acc) ->
|
||||
case akh_lexer:next(Lx) of
|
||||
collect(Lexer, Acc) ->
|
||||
case akh_lexer:next(Lexer) of
|
||||
none -> Acc;
|
||||
{ok, T, L} -> collect(L, [T | Acc])
|
||||
end.
|
||||
|
|
Loading…
Add table
Reference in a new issue