3 sec laptop speed improvement in hash_map
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import io:*
|
||||
import mem:*
|
||||
import map:*
|
||||
import hash_map:*
|
||||
import stack:*
|
||||
import string:*
|
||||
import util:*
|
||||
|
||||
@@ -53,13 +53,13 @@ obj hash_map<T,U> (Object, Serializable) {
|
||||
if (!data[(key_hash%data.size) cast int].contains_key(key)) {
|
||||
size++
|
||||
if (size > data.size) {
|
||||
var new_data = vector::vector<map::map<T,U>>()
|
||||
var new_data.construct(size*2): vector::vector<map::map<T,U>>
|
||||
for (var i = 0; i < size*2; i++;)
|
||||
new_data.addEnd(map::map<T,U>())
|
||||
for_each(fun(key: T, value: U) {
|
||||
new_data[(util::hash(key)%new_data.size) cast int].set(key, value)
|
||||
})
|
||||
data = new_data
|
||||
data.swap(new_data)
|
||||
}
|
||||
}
|
||||
data[(key_hash%data.size) cast int].set(key, value)
|
||||
@@ -67,6 +67,9 @@ obj hash_map<T,U> (Object, Serializable) {
|
||||
fun get(key: ref T): ref U {
|
||||
return data[(util::hash(key)%data.size) cast int].get(key)
|
||||
}
|
||||
fun get_ptr_or_null(key: ref T): *U {
|
||||
return data[(util::hash(key)%data.size) cast int].get_ptr_or_null(key)
|
||||
}
|
||||
fun contains_key(key: ref T): bool {
|
||||
return data[(util::hash(key)%data.size) cast int].contains_key(key)
|
||||
}
|
||||
|
||||
@@ -69,7 +69,6 @@ obj parser (Object) {
|
||||
|
||||
// if the zero state contains any reductions for state 0 and eof, then
|
||||
// it must be reducing to the goal state
|
||||
/*println("checking the bidness")*/
|
||||
if (inputStr == "" && gram->parse_table.get(0, eof_symbol()).contains(action(action_type::reduce(), 0))) {
|
||||
println("Accept on no input for ")
|
||||
println(name)
|
||||
@@ -80,8 +79,6 @@ obj parser (Object) {
|
||||
lex.set_input(inputStr)
|
||||
var current_symbol.construct(): symbol
|
||||
for (current_symbol = lex.next(); current_symbol != eof_symbol() && current_symbol != invalid_symbol(); current_symbol = lex.next();) {
|
||||
/*println("current_symbol is ")*/
|
||||
/*println(current_symbol.to_string())*/
|
||||
if (current_symbol != eof_symbol() && current_symbol != invalid_symbol())
|
||||
current_symbol.source = name
|
||||
input.addEnd(current_symbol)
|
||||
@@ -100,16 +97,9 @@ obj parser (Object) {
|
||||
var null_symbol_tree = null<tree<symbol>>()
|
||||
|
||||
gram->parse_table.get(0, input[0]).for_each(fun(act: action) {
|
||||
/*println("for each action")
|
||||
act.print()
|
||||
*/
|
||||
if (act.act == action_type::push())
|
||||
to_shift.push(make_pair(v0, act.state_or_rule))
|
||||
/*else if (act.act == reduce && fully_reduces_to_null(gram.rules[act.state_or_rule])) {*/
|
||||
else if (act.act == action_type::reduce() && act.rule_position == 0) {
|
||||
/*print("act == reduce() && == 0 Adding reduction from state: ")
|
||||
println(v0->data)
|
||||
*/
|
||||
to_reduce.push(reduction(v0, gram->rules[act.state_or_rule].lhs, 0, null_symbol_tree, null_symbol_tree))
|
||||
}
|
||||
})
|
||||
@@ -128,19 +118,12 @@ obj parser (Object) {
|
||||
return null<tree<symbol>>()
|
||||
}
|
||||
SPPFStepNodes.clear()
|
||||
/*
|
||||
print("to_reduce size: ")
|
||||
println(to_reduce.size())
|
||||
print("to_shift size: ")
|
||||
println(to_shift.size())
|
||||
*/
|
||||
while (to_reduce.size())
|
||||
reducer(i)
|
||||
shifter(i)
|
||||
}
|
||||
var acc_state = gss.frontier_get_acc_state(input.size-1)
|
||||
if (acc_state) {
|
||||
/*println("ACCEPTED!")*/
|
||||
return gss.get_edge(acc_state, v0)
|
||||
}
|
||||
|
||||
@@ -157,37 +140,13 @@ obj parser (Object) {
|
||||
}
|
||||
fun reducer(i: int) {
|
||||
var curr_reduction = to_reduce.pop()
|
||||
/*print("reducing from state: ")
|
||||
println(curr_reduction.from->data)
|
||||
print("curr_reduction.length (not length-1) is: ")
|
||||
println(curr_reduction.length)
|
||||
*/
|
||||
gss.get_reachable_paths(curr_reduction.from, max(0, curr_reduction.length-1)).
|
||||
for_each(fun(path: ref vector<*tree<int>>) {
|
||||
/*println("in get_reachable_paths for_each loop")*/
|
||||
var path_edges = range(path.size-1).map(fun(indx: int): *tree<symbol> { return gss.get_edge(path[indx], path[indx+1]);}).reverse()
|
||||
/*print("path ")
|
||||
path.for_each(fun(part: *tree<int>) {
|
||||
print(part->data)
|
||||
print(" ")
|
||||
})
|
||||
println()
|
||||
println("got path edges")
|
||||
println("there are this many:")
|
||||
println(path_edges.size)
|
||||
*/
|
||||
if (curr_reduction.length != 0) {
|
||||
path_edges.addEnd(curr_reduction.label)
|
||||
/*println("also adding the one from the reduction")
|
||||
println(curr_reduction.label->data.to_string())
|
||||
*/
|
||||
}
|
||||
var curr_reached = path.last()
|
||||
/*print("checking shift for state ")
|
||||
print(curr_reached->data)
|
||||
print(" and ")
|
||||
println(curr_reduction.sym.to_string())
|
||||
*/
|
||||
// if this is the Goal = a type reduction, then skip the actual reduction part.
|
||||
// the shift lookup will fail, and likely other things, and this is our accept
|
||||
// criteria anyway
|
||||
@@ -197,7 +156,6 @@ obj parser (Object) {
|
||||
return;
|
||||
}
|
||||
var shift_to = gram->parse_table.get_shift(curr_reached->data, curr_reduction.sym).state_or_rule
|
||||
/*println("got shift to")*/
|
||||
var new_label = null<tree<symbol>>()
|
||||
if (curr_reduction.length == 0) {
|
||||
new_label = curr_reduction.nullable_parts
|
||||
@@ -222,16 +180,12 @@ obj parser (Object) {
|
||||
// do non-null reductions
|
||||
if (curr_reduction.length) {
|
||||
gram->parse_table.get(shift_to, input[i]).for_each(fun(act: action) {
|
||||
/*if (act.act == reduce && !fully_reduces_to_null(reduce_rule)) {*/
|
||||
if (act.act == action_type::reduce() && act.rule_position != 0) {
|
||||
var reduce_rule = gram->rules[act.state_or_rule]
|
||||
to_reduce.push(reduction(curr_reached, reduce_rule.lhs,
|
||||
act.rule_position,
|
||||
get_nullable_parts(reduce_rule),
|
||||
new_label))
|
||||
/*print("(non null) Adding reduction from state: ")
|
||||
println(curr_reached->data)
|
||||
*/
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -251,16 +205,10 @@ obj parser (Object) {
|
||||
to_reduce.push(reduction(shift_to_node, action_rule.lhs, 0,
|
||||
get_nullable_parts(action_rule),
|
||||
null<tree<symbol>>() ))
|
||||
/*print("null reduces Adding reduction from state: ")
|
||||
println(shift_to_node->data)
|
||||
*/
|
||||
} else if (curr_reduction.length != 0) {
|
||||
to_reduce.push(reduction(curr_reached, action_rule.lhs, act.rule_position,
|
||||
get_nullable_parts(action_rule),
|
||||
new_label ))
|
||||
/*print("null does not reduce Adding reduction from state: ")
|
||||
println(curr_reached->data)
|
||||
*/
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -270,73 +218,40 @@ obj parser (Object) {
|
||||
})
|
||||
}
|
||||
fun shifter(i: int) {
|
||||
/*println("shifting")*/
|
||||
if (i >= input.size-1)
|
||||
return; // darn ambiguity
|
||||
/*print("shifting on ")
|
||||
println(input[i].to_string())
|
||||
*/
|
||||
var next_shifts = stack< pair<*tree<int>, int> >()
|
||||
var new_label = new<tree<symbol>>()->construct(input[i])
|
||||
while (!to_shift.empty()) {
|
||||
/*println("to_shift not empty")*/
|
||||
var shift = to_shift.pop()
|
||||
/*println("post pop")*/
|
||||
var shift_to_node = gss.in_frontier(i+1, shift.second)
|
||||
/*println("post in_frontier")*/
|
||||
if (shift_to_node) {
|
||||
/*print("already in frontier ")
|
||||
println(i+1)
|
||||
*/
|
||||
gss.add_edge(shift_to_node, shift.first, new_label)
|
||||
gram->parse_table.get_reduces(shift.second, input[i+1]).for_each(fun(action: action) {
|
||||
var reduce_rule = gram->rules[action.state_or_rule]
|
||||
/*if (!fully_reduces_to_null(reduce_rule)) {*/
|
||||
if (action.rule_position != 0) {
|
||||
to_reduce.push(reduction(shift.first, reduce_rule.lhs, action.rule_position,
|
||||
get_nullable_parts(reduce_rule),
|
||||
new_label ))
|
||||
/*print("if shift to node Adding reduction from state: ")
|
||||
println(shift.first->data)
|
||||
*/
|
||||
}
|
||||
})
|
||||
} else {
|
||||
/*print("adding to frontier ")
|
||||
println(i+1)
|
||||
*/
|
||||
shift_to_node = gss.new_node(shift.second)
|
||||
gss.add_to_frontier(i+1, shift_to_node)
|
||||
/*println("post add to frontier")*/
|
||||
gss.add_edge(shift_to_node, shift.first, new_label)
|
||||
/*println("post add edger")*/
|
||||
gram->parse_table.get(shift.second, input[i+1]).for_each(fun(action: action) {
|
||||
/*println("looking at an action")*/
|
||||
if (action.act == action_type::push()) {
|
||||
/*println("is push")*/
|
||||
next_shifts.push(make_pair(shift_to_node, action.state_or_rule))
|
||||
} else {
|
||||
/*println("is reduce")*/
|
||||
var action_rule = gram->rules[action.state_or_rule]
|
||||
/*if (!fully_reduces_to_null(action_rule)) {*/
|
||||
if (action.rule_position != 0) {
|
||||
/*println("does not reduce to null")*/
|
||||
to_reduce.push(reduction(shift.first, action_rule.lhs, action.rule_position,
|
||||
get_nullable_parts(action_rule),
|
||||
new_label ))
|
||||
/*print("not shift to, reduce, != 0 Adding reduction from state: ")
|
||||
println(shift.first->data)
|
||||
print("added ruduction rule+position: ")
|
||||
println(action.rule_position)
|
||||
*/
|
||||
} else {
|
||||
/*println("does reduce to null")*/
|
||||
to_reduce.push(reduction(shift_to_node, action_rule.lhs, 0,
|
||||
get_nullable_parts(action_rule),
|
||||
null<tree<symbol>>() ))
|
||||
/*print("not shift to, reduce, == 0 Adding reduction from state: ")
|
||||
println(shift_to_node->data)
|
||||
*/
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -413,7 +328,6 @@ obj parser (Object) {
|
||||
|
||||
obj gss (Object) {
|
||||
var data: vector<vector<*tree<int>>>
|
||||
/*var edges: map< pair<*tree<int>, *tree<int>>, *tree<symbol> >*/
|
||||
var edges: hash_map< pair<*tree<int>, *tree<int>>, *tree<symbol> >
|
||||
|
||||
fun construct(): *gss {
|
||||
|
||||
@@ -43,6 +43,17 @@ obj vector<T> (Object, Serializable) {
|
||||
memmove((data) cast *void, (old->data) cast *void, size * #sizeof<T>)
|
||||
}
|
||||
}
|
||||
fun swap(other: ref vector<T>) {
|
||||
var temp_data = data
|
||||
var temp_size = size
|
||||
var temp_available = available
|
||||
data = other.data
|
||||
size = other.size
|
||||
available = other.available
|
||||
other.data = temp_data
|
||||
other.size = temp_size
|
||||
other.available = temp_available
|
||||
}
|
||||
fun serialize(): vector<char> {
|
||||
var toRet = serialize(size)
|
||||
for (var i = 0; i < size; i++;)
|
||||
|
||||
Reference in New Issue
Block a user