Fix closing over adt variables, starting work on ast_transformation
This commit is contained in:
@@ -1076,9 +1076,11 @@ std::set<NodeTree<ASTData>*> ASTTransformation::findVariablesToClose(NodeTree<AS
|
||||
return closed;
|
||||
}
|
||||
// if it's an identifier and not in the scope chain, and isn't an enum name
|
||||
if (stat->getDataRef()->type == identifier && !inScopeChain(stat, func) &&
|
||||
(!stat->getDataRef()->valueType->typeDefinition ||
|
||||
stat->getDataRef()->valueType->typeDefinition->getDataRef()->type != adt_def) )
|
||||
if (stat->getDataRef()->type == identifier && !inScopeChain(stat, func))
|
||||
// used to be this because C style enums, but now those are functions and we should definitly close over variables of type adt...
|
||||
//if (stat->getDataRef()->type == identifier && !inScopeChain(stat, func)
|
||||
// && (!stat->getDataRef()->valueType->typeDefinition ||
|
||||
//stat->getDataRef()->valueType->typeDefinition->getDataRef()->type != adt_def) )
|
||||
closed.insert(stat);
|
||||
for (auto child: stat->getChildren()) {
|
||||
auto recClosed = findVariablesToClose(func, child, scope);
|
||||
|
||||
71
stdlib/ast_transformation.krak
Normal file
71
stdlib/ast_transformation.krak
Normal file
@@ -0,0 +1,71 @@
|
||||
import symbol:*
|
||||
import tree:*
|
||||
import vector:*
|
||||
import stack:*
|
||||
import map:*
|
||||
import util:*
|
||||
import string:*
|
||||
import mem:*
|
||||
import io:*
|
||||
|
||||
adt ast_node {
|
||||
undef
|
||||
}
|
||||
|
||||
|
||||
obj ast_transformation (Object) {
|
||||
fun construct(): *ast_transformation {
|
||||
return this
|
||||
}
|
||||
fun copy_construct(old: *ast_transformation) {
|
||||
}
|
||||
fun operator=(old: ref ast_transformation) {
|
||||
destruct()
|
||||
copy_construct(&old)
|
||||
}
|
||||
fun destruct() {
|
||||
}
|
||||
|
||||
fun transform(parse_tree: *tree<symbol>): *ast_node {
|
||||
return null<ast_node>()
|
||||
}
|
||||
}
|
||||
|
||||
fun get_ast_children(node: *ast_node): vector<*ast_node> {
|
||||
return vector<*ast_node>()
|
||||
}
|
||||
fun get_ast_name(node: *ast_node): string {
|
||||
return string("ast_node")
|
||||
}
|
||||
|
||||
fun ast_to_dot(root: *ast_node): string {
|
||||
var ret = string("digraph Kaken {\n")
|
||||
var counter = 0
|
||||
var node_name_map = map<*ast_node, string>()
|
||||
var get_name = fun(node: *ast_node): string {
|
||||
if (node_name_map.contains_key(node))
|
||||
return node_name_map[node]
|
||||
var escaped = string("")
|
||||
get_ast_name(node).data.for_each(fun(c: char) {
|
||||
if (c != '"')
|
||||
escaped += c
|
||||
else
|
||||
escaped += "\\\""
|
||||
})
|
||||
escaped += to_string(counter++)
|
||||
node_name_map.set(node, escaped)
|
||||
return escaped
|
||||
}
|
||||
var helper: fun(*ast_node):void = fun(node: *ast_node) {
|
||||
get_ast_children(node).for_each(fun(child: *ast_node) {
|
||||
if (!child)
|
||||
return; // where on earth does the null come from
|
||||
ret += string("\"") + get_name(node) + "\" -> \"" + get_name(child) + "\"\n";
|
||||
helper(child)
|
||||
})
|
||||
}
|
||||
if (root)
|
||||
helper(root)
|
||||
return ret + "}"
|
||||
}
|
||||
|
||||
73
tests/test_ast.krak
Normal file
73
tests/test_ast.krak
Normal file
@@ -0,0 +1,73 @@
|
||||
import io:*
|
||||
import grammer:*
|
||||
import parser:*
|
||||
import lexer:*
|
||||
import ast_transformation:*
|
||||
import string:*
|
||||
import util:*
|
||||
import symbol:*
|
||||
import tree:*
|
||||
import serialize:*
|
||||
|
||||
fun main():int {
|
||||
|
||||
var gram.construct(): grammer
|
||||
|
||||
var file_name = string("../krakenGrammer.kgm")
|
||||
|
||||
var compiled_name = file_name + string(".comp_new")
|
||||
var file_contents = read_file(file_name)
|
||||
var loaded_and_valid = false
|
||||
|
||||
if (file_exists(compiled_name)) {
|
||||
println("cached file exists")
|
||||
var pos = 0
|
||||
var binary = read_file_binary(compiled_name)
|
||||
println("read file!")
|
||||
var cached_contents = string()
|
||||
unpack(cached_contents, pos) = unserialize<string>(binary, pos)
|
||||
if (cached_contents == file_contents) {
|
||||
println("loaded_and_valid, using cached version!")
|
||||
loaded_and_valid = true
|
||||
unpack(gram, pos) = unserialize<grammer>(binary, pos)
|
||||
println("finished unserializeing!!")
|
||||
} else {
|
||||
println("file contents do not match:")
|
||||
println("CACHED:")
|
||||
println(cached_contents)
|
||||
println("REAL:")
|
||||
println(file_contents)
|
||||
println("END")
|
||||
}
|
||||
} else {
|
||||
println("cached file does not exist")
|
||||
}
|
||||
if (!loaded_and_valid) {
|
||||
println("Not loaded_and_valid, re-generating and writing out")
|
||||
gram = load_grammer(file_contents)
|
||||
println("grammer loaded, calculate_first_set")
|
||||
gram.calculate_first_set()
|
||||
println("grammer loaded, calculate_state_automaton")
|
||||
gram.calculate_state_automaton()
|
||||
println("calculated, writing out")
|
||||
write_file_binary(compiled_name, serialize(file_contents) + serialize(gram))
|
||||
println("done writing")
|
||||
}
|
||||
println(gram.to_string())
|
||||
|
||||
var parse.construct(gram): parser
|
||||
var parse_tree = parse.parse_input(read_file(string("to_parse.krak")), string("fun name"))
|
||||
println("the tree")
|
||||
println(syntax_tree_to_dot(parse_tree))
|
||||
write_file(string("syntax_tree.dot"), syntax_tree_to_dot(parse_tree))
|
||||
|
||||
var ast_pass.construct(): ast_transformation
|
||||
var ast = ast_pass.transform(parse_tree)
|
||||
println("the AST")
|
||||
println(ast_to_dot(ast))
|
||||
write_file(string("ast.dot"), ast_to_dot(ast))
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user