Grammer loading works and test file can lex itself. Got rid of regex memory saftey for speed, will reintroduce next

This commit is contained in:
Nathan Braswell
2015-07-06 13:48:19 -04:00
parent 501331e37b
commit 92f5c63c9a
7 changed files with 61 additions and 10 deletions

View File

@@ -46,7 +46,7 @@ fun split_into_words(gram_str: string::string): vector::vector<string::string> {
return out
}
fun load_grammer(path: string::string): grammer {
fun load_grammer(gram_str: string::string): grammer {
var gram.construct(): grammer
var leftSide = symbol::symbol("", false)
var doLeftSide = true
@@ -55,7 +55,7 @@ fun load_grammer(path: string::string): grammer {
/*io::print("word: "); io::println(word);*/
/*})*/
/*return gram*/
split_into_words(io::read_file(path)).for_each(fun(word: string::string) {
split_into_words(gram_str).for_each(fun(word: string::string) {
if (word == "=") {
// do nothing
} else if (word == "|") {
@@ -69,7 +69,13 @@ fun load_grammer(path: string::string): grammer {
if (doLeftSide)
leftSide = symbol::symbol(word, true)
else
rightSide.add(symbol::symbol(word, word[0] == '"'))
if (word[0] == '"') {
rightSide.add(symbol::symbol(word.slice(1,-2), true))
/*gram.regexs.add_unique(regex::regex(word.slice(1,-2)))*/
gram.regexs.add(regex::regex(word.slice(1,-2)))
} else {
rightSide.add(symbol::symbol(word, false))
}
doLeftSide = false
}
})
@@ -78,7 +84,7 @@ fun load_grammer(path: string::string): grammer {
obj grammer (Object) {
var rules: vector::vector<rule>
var regexs: set::set<regex::regex>
var regexs: vector::vector<regex::regex>
fun construct(): *grammer {
rules.construct()
@@ -143,7 +149,7 @@ obj rule (Object) {
fun to_string(): string::string {
var result = lhs.name + " -> "
rhs.for_each( fun(i : symbol::symbol) { result += i.name + ", "; } )
rhs.for_each( fun(i : symbol::symbol) { result += i.to_string() + ", "; } )
return result
}
}

View File

@@ -4,6 +4,12 @@ import string
import vector
import util
fun lexer(regs: vector::vector<regex::regex>): lexer {
var toRet.construct() :lexer
regs.for_each( fun(reg: regex::regex) toRet.add_regex(reg); )
return toRet
}
obj lexer (Object) {
var regs: vector::vector<regex::regex>
var input: string::string

View File

@@ -56,14 +56,14 @@ obj regex (Object) {
}
fun copy_construct(old:*regex):void {
//begin = old->begin
//regexString.copy_construct(&old->regexString)
construct(old->regexString)
begin = old->begin
regexString.copy_construct(&old->regexString)
/*construct(old->regexString)*/
}
fun destruct():void {
regexString.destruct()
mem::safe_recursive_delete(begin, fun(it: *regexState): set::set<*regexState> { return set::from_vector(it->next_states); } )
/*mem::safe_recursive_delete(begin, fun(it: *regexState): set::set<*regexState> { return set::from_vector(it->next_states); } )*/
}
fun operator==(other: regex):bool {

View File

@@ -127,6 +127,10 @@ obj vector<T> (Object) {
return i;
return -1;
}
// ditto
fun contains<U>(item: U): bool {
return find(item) != -1
}
fun operator[]=(index: int, dataIn: T) {
set(index, dataIn)
@@ -140,6 +144,11 @@ obj vector<T> (Object) {
for (var i = 0; i < dataIn.size; i++;)
addEnd(dataIn[i]);
}
// same darn trick
fun add_unique<U>(dataIn: U): void {
if (!contains(dataIn))
addEnd(dataIn)
}
fun add(dataIn: T): void { addEnd(dataIn); }
fun addEnd(dataIn: T): void {
if (size+1 >= available)