Files
kraken/tests/test_grammer.krak
2015-08-06 17:38:41 -04:00

60 lines
2.0 KiB
Plaintext

import io:*
import grammer:*
import parser:*
import lexer:*
import string:*
import util:*
import symbol:*
import tree:*
fun main():int {
/*var a = load_grammer(read_file(string("../krakenGrammer.kgm")))*/
/*var a = load_grammer(read_file(string("grammer.kgm")))*/
var a = load_grammer(read_file(string("grammer2.kgm")))
println(a.to_string())
var doFirstSet = fun() {
a.calculate_first_set()
println("///////////////////START FIRST SET/////////////")
println("//TERMINALS//")
a.terminals.for_each( fun(terminal: util::pair<symbol::symbol, regex::regex>) {
var set_str = string::string("{ ")
a.first_set_map[terminal.first].for_each( fun(sym: symbol::symbol) {
set_str += sym.to_string() + ", "
})
set_str += "}"
print(terminal.first.to_string() + " first: " + set_str + "\n")
})
println("//NON TERMINALS//")
a.non_terminals.for_each( fun(non_terminal: symbol::symbol) {
var set_str = string::string("{ ")
a.first_set_map[non_terminal].for_each( fun(sym: symbol::symbol) {
set_str += sym.to_string() + ", "
})
set_str += "}"
print(non_terminal.to_string() + " first: " + set_str + "\n")
println()
})
println("///////////////////END FIRST SET/////////////")
}
doFirstSet()
var lex = lexer(a.terminals)
/*lex.set_input(read_file(string("test_grammer.krak")))*/
/*lex.set_input(string("ccdahas spacedhas*/
/*returndaaaaaaaaaaaaaa"))*/
//lex.set_input(string("hibyed"))
println("woo lexing:")
/*range(8).for_each(fun(i: int) { println(lex.next().to_string()); } )*/
/*range(80).for_each(fun(i: int) { println(lex.next().to_string()); } )*/
println(a.to_string())
a.calculate_state_automaton()
var parse.construct(a): parser
var result = parse.parse_input(string("ad"), string("fun name"))
/*var parse.construct(): parser*/
return 0
}