Fix case_statement/lambda-close-over-variables bug, rename ast_node file to make ast_node:: unambigious, change test_ast to test_compiler and add a little skeleton c_generator file
This commit is contained in:
89
tests/test_compiler.krak
Normal file
89
tests/test_compiler.krak
Normal file
@@ -0,0 +1,89 @@
|
||||
import io:*
|
||||
import grammer:*
|
||||
import parser:*
|
||||
import ast_transformation:*
|
||||
import string:*
|
||||
import util:*
|
||||
import symbol:*
|
||||
import tree:*
|
||||
import serialize:*
|
||||
import c_generator:*
|
||||
|
||||
fun main():int {
|
||||
|
||||
/*var gram.construct(): grammer*/
|
||||
// delay construction until we either load it or copy construct it
|
||||
var gram: grammer
|
||||
|
||||
var file_name = string("../krakenGrammer.kgm")
|
||||
|
||||
var compiled_name = file_name + string(".comp_new")
|
||||
var file_contents = read_file(file_name)
|
||||
var loaded_and_valid = false
|
||||
|
||||
if (file_exists(compiled_name)) {
|
||||
println("cached file exists")
|
||||
var pos = 0
|
||||
var binary = read_file_binary(compiled_name)
|
||||
println("read file!")
|
||||
var cached_contents = string()
|
||||
unpack(cached_contents, pos) = unserialize<string>(binary, pos)
|
||||
if (cached_contents == file_contents) {
|
||||
println("loaded_and_valid, using cached version!")
|
||||
loaded_and_valid = true
|
||||
/*unpack(gram, pos) = unserialize<grammer>(binary, pos)*/
|
||||
// skip unnecessary copies this way
|
||||
pos = gram.unserialize(binary, pos)
|
||||
println("finished unserializeing!!")
|
||||
} else {
|
||||
println("file contents do not match:")
|
||||
println("CACHED:")
|
||||
println(cached_contents)
|
||||
println("REAL:")
|
||||
println(file_contents)
|
||||
println("END")
|
||||
}
|
||||
} else {
|
||||
println("cached file does not exist")
|
||||
}
|
||||
if (!loaded_and_valid) {
|
||||
println("Not loaded_and_valid, re-generating and writing out")
|
||||
/*gram = load_grammer(file_contents)*/
|
||||
// since we now don't construct before hand
|
||||
gram.copy_construct(&load_grammer(file_contents))
|
||||
println("grammer loaded, calculate_first_set")
|
||||
gram.calculate_first_set()
|
||||
println("grammer loaded, calculate_state_automaton")
|
||||
gram.calculate_state_automaton()
|
||||
println("calculated, writing out")
|
||||
write_file_binary(compiled_name, serialize(file_contents) + serialize(gram))
|
||||
println("done writing")
|
||||
}
|
||||
/*println(gram.to_string())*/
|
||||
|
||||
/*var parse.construct(gram): parser*/
|
||||
/*var parse_tree = parse.parse_input(read_file(string("to_parse.krak")), string("fun name"))*/
|
||||
/*println("the tree")*/
|
||||
/*println(syntax_tree_to_dot(parse_tree))*/
|
||||
/*write_file(string("syntax_tree.dot"), syntax_tree_to_dot(parse_tree))*/
|
||||
|
||||
/*var ast_pass.construct(): ast_transformation*/
|
||||
/*var ast = ast_pass.transform(parse_tree)*/
|
||||
/*println("the AST")*/
|
||||
/*println(ast_to_dot(ast))*/
|
||||
/*write_file(string("ast.dot"), ast_to_dot(ast))*/
|
||||
|
||||
var kraken_file_name = string("to_parse.krak")
|
||||
var parse.construct(gram): parser
|
||||
var ast_pass.construct(): ast_transformation
|
||||
var importer.construct(parse, ast_pass): importer
|
||||
importer.import(kraken_file_name)
|
||||
var c_generator.construct(): c_generator
|
||||
var c_output_pair = c_generator.generate(importer.name_ast_map)
|
||||
write_file(kraken_file_name + ".c", c_output_pair.first)
|
||||
println(string("linker string: ") + c_output_pair.second)
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user