Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ __pycache__
.envrc
*.idea
*.prof
.ruff_cache/
.mypy_cache/

aaa-stdlib/target/*
aaa-stdlib-user/target/*
16 changes: 11 additions & 5 deletions aaa/parser/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,12 +341,18 @@ def __init__(
def get_source_file(self) -> Path:
source_path = Path(self.source.value)

if source_path.is_file() and self.source.value.endswith(".aaa"):
return source_path
if source_path.is_absolute():
if source_path.is_file() and self.source.value.endswith(".aaa"):
return source_path

else:
return self.position.file.parent / (
self.source.value.replace(".", os.sep) + ".aaa"
)
source_path = (self.position.file.parent / source_path).resolve()
if source_path.is_file() and self.source.value.endswith(".aaa"):
return source_path

return self.position.file.parent / (
self.source.value.replace(".", os.sep) + ".aaa"
)

@classmethod
def load(cls, children: list[AaaParseModel | Token]) -> Import:
Expand Down
8 changes: 8 additions & 0 deletions aaa/type_checker/type_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,10 @@ def _confirm_return_types(
if not isinstance(computed_value, VariableType):
return False

# A struct is never equal to an enum
if not isinstance(computed_value.type, type(expected_value.type)):
return False

if computed_value.type != expected_value.type:
return False

Expand Down Expand Up @@ -312,6 +316,10 @@ def _match_signature_items(
if not isinstance(var_type, VariableType):
raise SignatureItemMismatch

# prevent comparing enum with struct
if not isinstance(expected_var_type.type, type(var_type.type)):
raise SignatureItemMismatch

if expected_var_type.type != var_type.type:
raise SignatureItemMismatch

Expand Down
19 changes: 19 additions & 0 deletions examples/selfhosting/main.aaa
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from "syntax_loader" import
syntax_loader_new_from_file,
SyntaxLoaderResult,

fn main {
"syntax.json" syntax_loader_new_from_file
match {
case SyntaxLoaderResult:error as error {
error .
"\n" .
1 exit
}
case SyntaxLoaderResult:ok as syntax_loader {
syntax_loader
}
}

drop // TODO do something with SyntaxLoader
}
200 changes: 200 additions & 0 deletions examples/selfhosting/syntax_loader.aaa
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
from "../json/parser.aaa" import json_from_str, Json, JsonResult, JsonError

enum SyntaxLoaderResult {
ok as SyntaxLoader,
error as str,
}

struct SyntaxLoader {
tokens as map[str, regex],
nodes as map[str, str],
filtered_tokens as vec[str],
root_node as str,
}

enum FileReadError {
open_error as str,
read_error as str,
}

fn FileReadError:to_str args error as FileReadError return str {
error
match {
case FileReadError:open_error as path { "Could not open " path str:append }
case FileReadError:read_error as path { "Could not read " path str:append }
}
}

enum FileReadResult {
ok as str,
error as FileReadError,
}

fn read_file args path as const str, return FileReadResult {
path 0 0 open

use fd, open_ok {
if open_ok not {
path
copy swap drop // TODO make ticket for const arguments of enum-ctors
FileReadError:open_error FileReadResult:error return
}

""
use content {
while true {
fd 4096 read

use buff, read_ok {
if read_ok not {
path
copy swap drop
FileReadError:read_error FileReadResult:error return
}

if buff "" = {
content FileReadResult:ok return
}

content <- { content buff str:append }
}
}
}
}
}

fn syntax_loader_new_from_file args path as str return SyntaxLoaderResult {
path read_file
match {
case FileReadResult:ok as text { text syntax_loader_new_from_str }
case FileReadResult:error as error { error FileReadError:to_str SyntaxLoaderResult:error }
}
}

enum LoadTokensResult {
ok as map[str, regex],
error as str,
}

enum LoadNodesResult {
ok as map[str, str],
error as str,
}

enum LoadFilteredTokensResult {
ok as vec[str],
error as str,
}

enum LoadRootNodeResult {
ok as str,
error as str,
}

fn load_regular_tokens args root_object as map[str, Json] return LoadTokensResult {
todo // TODO
}

fn load_keyword_tokens args root_object as map[str, Json] return LoadTokensResult {
todo // TODO
}

enum MergeMapsResult {
ok as map[str, regex],
error as vec[str],
}

// Merge maps but fail if maps have overlapping keys
fn merge_maps args lhs as map[str, regex], rhs as map[str, regex] return MergeMapsResult {
vec[str] map[str, regex]
use overlap, merged {
lhs foreach {
use key, value {
if rhs key map:has_key {
overlap key vec:push
}
merged key value map:set
}
}

if overlap vec:empty not {
overlap MergeMapsResult:error return
}

rhs foreach {
use key, value {
merged key value map:set
}
}

merged MergeMapsResult:ok
}
}

fn load_tokens args root_object as map[str, Json] return LoadTokensResult {
root_object load_keyword_tokens
match {
case LoadTokensResult:error as error { error LoadTokensResult:error return }
case LoadTokensResult:ok as keyword_tokens {
root_object load_regular_tokens
match {
case LoadTokensResult:error as error { error LoadTokensResult:error return }
case LoadTokensResult:ok as regular_tokens { keyword_tokens regular_tokens }
}
}
}

use keyword_tokens, regular_tokens {
keyword_tokens regular_tokens merge_maps
match {
case MergeMapsResult:error { todo }
case MergeMapsResult:ok as tokens { tokens LoadTokensResult:ok }
}
}
}

fn load_nodes args root_object as map[str, Json] return LoadNodesResult {
todo // TODO
}

fn load_filtered_tokens args root_object as map[str, Json] return LoadFilteredTokensResult {
todo // TODO
}

fn load_root_node args root_object as map[str, Json] return LoadRootNodeResult {
todo // TODO
}

fn syntax_loader_new_from_str args text as str return SyntaxLoaderResult {
text json_from_str
match {
case JsonResult:error as error { error JsonError:to_str SyntaxLoaderResult:error return }
case JsonResult:ok as json { json }
}

use json {
json
match {
case Json:object as object { object }
default { "json root should be an object" SyntaxLoaderResult:error return }
}
}

SyntaxLoader
use root_object, syntax_loader {
root_object load_tokens
match {
case LoadTokensResult:ok as tokens { syntax_loader "tokens" { tokens } ! }
case LoadTokensResult:error as error { error SyntaxLoaderResult:error return }
}

todo
// TODO filtered tokens
// TODO load nodes
// TODO root node
// TODO check for extra values in root_dict
}

// TODO run equivalent of `_check_values()`
// TODO run equivalent of `_load_parsers()`
}
Loading