2015-12-01 16:19:44 -05:00
|
|
|
import io:*
|
|
|
|
|
import grammer:*
|
|
|
|
|
import parser:*
|
|
|
|
|
import ast_transformation:*
|
|
|
|
|
import string:*
|
|
|
|
|
import util:*
|
|
|
|
|
import symbol:*
|
|
|
|
|
import tree:*
|
|
|
|
|
import serialize:*
|
|
|
|
|
|
|
|
|
|
fun main():int {
|
2016-01-02 13:44:31 -05:00
|
|
|
|
|
|
|
|
/*var gram.construct(): grammer*/
|
|
|
|
|
// delay construction until we either load it or copy construct it
|
|
|
|
|
var gram: grammer
|
2015-12-01 16:19:44 -05:00
|
|
|
|
|
|
|
|
var file_name = string("../krakenGrammer.kgm")
|
|
|
|
|
|
|
|
|
|
var compiled_name = file_name + string(".comp_new")
|
|
|
|
|
var file_contents = read_file(file_name)
|
|
|
|
|
var loaded_and_valid = false
|
|
|
|
|
|
|
|
|
|
if (file_exists(compiled_name)) {
|
|
|
|
|
println("cached file exists")
|
|
|
|
|
var pos = 0
|
|
|
|
|
var binary = read_file_binary(compiled_name)
|
|
|
|
|
println("read file!")
|
|
|
|
|
var cached_contents = string()
|
|
|
|
|
unpack(cached_contents, pos) = unserialize<string>(binary, pos)
|
|
|
|
|
if (cached_contents == file_contents) {
|
|
|
|
|
println("loaded_and_valid, using cached version!")
|
|
|
|
|
loaded_and_valid = true
|
2016-01-02 13:44:31 -05:00
|
|
|
/*unpack(gram, pos) = unserialize<grammer>(binary, pos)*/
|
|
|
|
|
// skip unnecessary copies this way
|
|
|
|
|
pos = gram.unserialize(binary, pos)
|
2015-12-01 16:19:44 -05:00
|
|
|
println("finished unserializeing!!")
|
|
|
|
|
} else {
|
|
|
|
|
println("file contents do not match:")
|
|
|
|
|
println("CACHED:")
|
|
|
|
|
println(cached_contents)
|
|
|
|
|
println("REAL:")
|
|
|
|
|
println(file_contents)
|
|
|
|
|
println("END")
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
println("cached file does not exist")
|
|
|
|
|
}
|
|
|
|
|
if (!loaded_and_valid) {
|
|
|
|
|
println("Not loaded_and_valid, re-generating and writing out")
|
2016-01-02 13:44:31 -05:00
|
|
|
/*gram = load_grammer(file_contents)*/
|
|
|
|
|
// since we now don't construct before hand
|
|
|
|
|
gram.copy_construct(&load_grammer(file_contents))
|
2015-12-01 16:19:44 -05:00
|
|
|
println("grammer loaded, calculate_first_set")
|
|
|
|
|
gram.calculate_first_set()
|
|
|
|
|
println("grammer loaded, calculate_state_automaton")
|
|
|
|
|
gram.calculate_state_automaton()
|
|
|
|
|
println("calculated, writing out")
|
|
|
|
|
write_file_binary(compiled_name, serialize(file_contents) + serialize(gram))
|
|
|
|
|
println("done writing")
|
|
|
|
|
}
|
2015-12-05 07:13:32 -05:00
|
|
|
/*println(gram.to_string())*/
|
2015-12-01 16:19:44 -05:00
|
|
|
|
2015-12-05 07:13:32 -05:00
|
|
|
/*var parse.construct(gram): parser*/
|
|
|
|
|
/*var parse_tree = parse.parse_input(read_file(string("to_parse.krak")), string("fun name"))*/
|
|
|
|
|
/*println("the tree")*/
|
|
|
|
|
/*println(syntax_tree_to_dot(parse_tree))*/
|
|
|
|
|
/*write_file(string("syntax_tree.dot"), syntax_tree_to_dot(parse_tree))*/
|
|
|
|
|
|
|
|
|
|
/*var ast_pass.construct(): ast_transformation*/
|
|
|
|
|
/*var ast = ast_pass.transform(parse_tree)*/
|
|
|
|
|
/*println("the AST")*/
|
|
|
|
|
/*println(ast_to_dot(ast))*/
|
|
|
|
|
/*write_file(string("ast.dot"), ast_to_dot(ast))*/
|
2015-12-01 16:19:44 -05:00
|
|
|
|
2015-12-05 07:13:32 -05:00
|
|
|
var parse.construct(gram): parser
|
2015-12-01 16:19:44 -05:00
|
|
|
var ast_pass.construct(): ast_transformation
|
2015-12-05 07:13:32 -05:00
|
|
|
var importer.construct(parse, ast_pass): importer
|
|
|
|
|
importer.import(string("to_parse.krak"))
|
2015-12-01 16:19:44 -05:00
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|