Files
kraken/tests/test_grammer.krak

48 lines
1.5 KiB
Plaintext
Raw Normal View History

import io:*
import grammer:*
import lexer:*
import string:*
import util:*
2015-07-13 12:16:30 -04:00
import symbol:*
fun main():int {
2015-07-13 12:16:30 -04:00
/*var a = load_grammer(read_file(string("../krakenGrammer.kgm")))*/
var a = load_grammer(read_file(string("grammer.kgm")))
println(a.to_string())
2015-07-13 12:16:30 -04:00
/*a.calculate_first_set()*/
println("///////////////////START FIRST SET/////////////")
println("//TERMINALS//")
a.terminals.for_each( fun(terminal: util::pair<symbol::symbol, regex::regex>) {
var set_str = string::string("{ ")
a.first_set_map[terminal.first].for_each( fun(sym: symbol::symbol) {
set_str += sym.to_string() + " "
})
set_str += "}"
print(terminal.first.to_string() + " first: " + set_str + "\n")
})
println("//NON TERMINALS//")
a.non_terminals.for_each( fun(non_terminal: symbol::symbol) {
var set_str = string::string("{ ")
a.first_set_map[non_terminal].for_each( fun(sym: symbol::symbol) {
set_str += sym.to_string() + " "
})
set_str += "}"
print(non_terminal.to_string() + " first: " + set_str + "\n")
println()
})
println("///////////////////END FIRST SET/////////////")
var lex = lexer(a.terminals)
/*lex.set_input(read_file(string("test_grammer.krak")))*/
lex.set_input(string("ccdahas spacedhas
returndaaaaaaaaaaaaaa"))
println("woo lexing:")
range(8).for_each(fun(i: int) { println(lex.next().to_string()); } )
/*range(80).for_each(fun(i: int) { println(lex.next().to_string()); } )*/
return 0
}