work on multithread, interpreter, and prototyped a #line-in-simple-passthrough ast changing pass turned on with -g

This commit is contained in:
Nathan Braswell
2016-06-14 02:14:25 -07:00
parent 1318e71efd
commit 27fea0e90c
12 changed files with 243 additions and 112 deletions

View File

@@ -8,6 +8,7 @@ import symbol:*
import tree:*
import serialize:*
import c_generator:*
import c_line_control:*
import interpreter:*
import os:*
import compiler_version
@@ -24,12 +25,16 @@ fun main(argc: int, argv: **char):int {
var interpret_instead = false
var argv1_str = string(argv[1])
var opt_str = string("-O3")
var line_ctrl = false
if (argv1_str == "-i") {
interpret_instead = true
input_file_offset++
} else if (argv1_str.length() > 2 && argv1_str.slice(0,2) == "-O") {
opt_str = argv1_str
input_file_offset++
} else if (argv1_str == "-g") {
line_ctrl = true
input_file_offset++
}
var kraken_file_name = string(argv[input_file_offset])
var executable_name = string(".").join(kraken_file_name.split('.').slice(0,-2))
@@ -77,15 +82,31 @@ fun main(argc: int, argv: **char):int {
println("done writing")
}
var parse.construct(gram): parser
var parse1.construct(&gram): parser
/*var parse2.construct(&gram): parser*/
/*var parse3.construct(&gram): parser*/
/*var parse4.construct(&gram): parser*/
/*var parse5.construct(&gram): parser*/
/*var parse6.construct(&gram): parser*/
/*var parse7.construct(&gram): parser*/
/*var parse8.construct(&gram): parser*/
var ast_pass.construct(): ast_transformation
var importer.construct(parse, ast_pass, vector(string(), base_dir + "/stdlib/")): importer
var parsers = vector(parse1)
/*var parsers = vector(parse1,parse2,parse3,parse4)*/
/*var parsers = vector(parse1,parse2,parse3,parse4,parse5,parse6)*/
/*var parsers = vector(parse1,parse2,parse3,parse4,parse5,parse6,parse7,parse8)*/
var importer.construct(parsers, ast_pass, vector(string(), base_dir + "/stdlib/")): importer
importer.import(kraken_file_name)
if (interpret_instead) {
printlnerr("Interpreting!")
var interpret.construct(importer.name_ast_map, importer.ast_pass.ast_to_syntax): interpreter
interpret.call_main()
} else {
if (line_ctrl) {
printlnerr("running C-specific passes")
printlnerr("running #line")
c_line_control(&importer.name_ast_map, &importer.ast_pass.ast_to_syntax)
}
printlnerr("Generating C")
var c_generator.construct(): c_generator
var c_output_pair = c_generator.generate_c(importer.name_ast_map, importer.ast_pass.ast_to_syntax)

View File

@@ -96,8 +96,8 @@ obj translation_unit (Object) {
return children == other.children && name == other.name && lambdas == other.lambdas
}
}
fun ast_import_ptr(name: string, translation_unit: *ast_node): *ast_node {
var to_ret.construct(name, translation_unit): import
fun ast_import_ptr(name: string, containing_tu: *ast_node): *ast_node {
var to_ret.construct(name, containing_tu): import
var ptr = new<ast_node>()
ptr->copy_construct(&ast_node::import(to_ret))
return ptr
@@ -111,14 +111,16 @@ fun is_import(node: *ast_node): bool {
obj import (Object) {
var scope: map<string, vector<*ast_node>>
var imported: set<string>
var containing_translation_unit: *ast_node
var translation_unit: *ast_node
var name: string
var starred: bool
fun construct(nameIn: string, translation_unit_in: *ast_node): *import {
fun construct(nameIn: string, containing_tu: *ast_node): *import {
scope.construct()
imported.construct()
name.copy_construct(&nameIn)
translation_unit = translation_unit_in
containing_translation_unit = containing_tu
translation_unit = null<ast_node>()
starred = false
return this
}
@@ -126,6 +128,7 @@ obj import (Object) {
scope.copy_construct(&old->scope)
imported.copy_construct(&old->imported)
name.copy_construct(&old->name)
containing_translation_unit = old->containing_translation_unit
translation_unit = old->translation_unit
starred = old->starred
}
@@ -139,7 +142,7 @@ obj import (Object) {
copy_construct(&other)
}
fun operator==(other: ref import): bool {
return imported == other.imported && name == other.name && translation_unit == other.translation_unit && starred == other.starred
return imported == other.imported && name == other.name && containing_translation_unit == other.containing_translation_unit && translation_unit == other.translation_unit && starred == other.starred
}
}
fun ast_identifier_ptr(name: *char, type: *type, enclosing_scope: *ast_node): *ast_node {

View File

@@ -42,9 +42,8 @@ obj ast_transformation (Object) {
fourth_pass_worklist.destruct()
}
// first pass defines all type_defs (objects and aliases), ADTs, and top-level if-comps/passthroughs
fun first_pass(file_name: string, parse_tree: *tree<symbol>, importer: *importer): *ast_node {
fun first_pass(file_name: string, parse_tree: *tree<symbol>, importer: *importer): pair<*ast_node, vector<*ast_node>> {
var translation_unit = ast_translation_unit_ptr(file_name)
importer->register(file_name, parse_tree, translation_unit)
parse_tree->children.for_each(fun(child: *tree<symbol>) {
if (child->data.name == "type_def") {
translation_unit->translation_unit.children.add(first_pass_type_def(child, translation_unit, false))
@@ -63,15 +62,16 @@ obj ast_transformation (Object) {
}
})
// now do all imports (done second so that if it imports this translation_unit,
// this one already has all its types defined
// now do all imports
// re return a vector of them so importer can fix them (and our translation unit scope)
// up with actual pointers to the other ASTs
var imports = vector<*ast_node>()
parse_tree->children.for_each(fun(child: *tree<symbol>) {
if (child->data.name == "import") {
var import_identifier_children = get_nodes("identifier", child)
var name = concat_symbol_tree(import_identifier_children[0])
var outside_translation_unit = importer->import_first_pass(name + ".krak")
add_to_scope(name, outside_translation_unit, translation_unit)
var import_node = ast_import_ptr(name, outside_translation_unit)
var import_node = ast_import_ptr(name, translation_unit)
imports.add(import_node)
translation_unit->translation_unit.children.add(import_node)
ast_to_syntax.set(import_node, child)
add_to_scope("~enclosing_scope", translation_unit, import_node)
@@ -80,7 +80,7 @@ obj ast_transformation (Object) {
import_node->import.starred = true
}
})
return translation_unit
return make_pair(translation_unit, imports)
}
fun transform_traits(traits_node: *tree<symbol>): set<string> {
if (!traits_node)
@@ -600,7 +600,11 @@ obj ast_transformation (Object) {
new_passthrough->simple_passthrough.linker_str = concat_symbol_tree(linker_str).slice(1,-2)
return new_passthrough
}
fun transform_statement(node: *tree<symbol>, scope: *ast_node, template_replacements: map<string, *type>): *ast_node return ast_statement_ptr(transform(node->children[0], scope, template_replacements));
fun transform_statement(node: *tree<symbol>, scope: *ast_node, template_replacements: map<string, *type>): *ast_node {
var to_ret = ast_statement_ptr(transform(node->children[0], scope, template_replacements));
ast_to_syntax.set(to_ret, node)
return to_ret
}
fun transform_declaration_statement(node: *tree<symbol>, scope: *ast_node, template_replacements: map<string, *type>): *ast_node {
// this might have an init position method call
var identifiers = get_nodes("identifier", node)

View File

@@ -995,7 +995,7 @@ obj c_generator (Object) {
ast_node::value(backing) return generate_value(node, need_variable)
ast_node::identifier(backing) return generate_identifier(node, enclosing_object, enclosing_func)
}
error("/* COULD NOT GENERATE */")
error(string("COULD NOT GENERATE ") + get_ast_name(node))
return code_triple("/* COULD NOT GENERATE */")
}
fun type_decoration(type: *type): string {
@@ -1053,8 +1053,8 @@ obj c_generator (Object) {
base_type::adt() return get_name(type->type_def) + indirection
base_type::function() {
// maybe disregard indirection in the future?
if (function_type_map.contains_key(*type))
return function_type_map[*type]
type = type->clone_with_indirection(0,false)
if (!function_type_map.contains_key(*type)) {
var temp_name = string("function_struct") + get_id()
var temp = string()
type->parameter_types.for_each(fun(parameter_type: *type) temp += string(", ") + type_to_c(parameter_type) + " ";)
@@ -1063,9 +1063,11 @@ obj c_generator (Object) {
function_typedef_string += with_data
function_typedef_string += without_data
function_typedef_string += string("typedef struct {\nvoid* data;\n") + temp_name + "_with_data func;\n} " + temp_name + ";\n"
function_typedef_string += string("/* ") + type->to_string() + " */\n"
// again, the indirection
function_type_map[*type] = temp_name+indirection
return temp_name + indirection
function_type_map[*type] = temp_name
}
return function_type_map[*type] + indirection
}
}
return string("impossible type") + indirection

View File

@@ -0,0 +1,76 @@
import symbol:*
import tree:*
import vector:*
import map:*
import util:*
import string:*
import mem:*
import io:*
import ast_nodes:*
import ast_transformation:*
fun get_line(node: *tree<symbol>, name: string): *ast_node {
var to_ret = ast_simple_passthrough_ptr()
to_ret->simple_passthrough.passthrough_str = string("\n#line ") + get_first_terminal(node)->data.position + " \"" + name + "\"\n"
return to_ret
}
fun add_before_in(to_add: *ast_node, before: *ast_node, in: *ast_node) {
var bc = null<vector<*ast_node>>()
match(*in) {
ast_node::translation_unit(backing) bc = &in->translation_unit.children
ast_node::code_block(backing) bc = &in->code_block.children
}
if (bc) {
for (var i = 0; i < bc->size; i++;) {
if ((*bc)[i] == before) {
/*println("\nbefore\n")*/
/*(*bc).for_each(fun(n:*ast_node) println(get_ast_name(n));)*/
/*(*bc).for_each(fun(n:*ast_node) println(n);)*/
(*bc).add(to_add, i)
/*println("\nafter\n")*/
/*(*bc).for_each(fun(n:*ast_node) println(get_ast_name(n));)*/
/*(*bc).for_each(fun(n:*ast_node) println(n);)*/
/*println("\n")*/
return
}
}
}
error(string("cannot add_before_in to ") + get_ast_name(in))
}
fun c_line_control(name_ast_map: *map<string, pair<*tree<symbol>,*ast_node>>, ast_to_syntax: *map<*ast_node, *tree<symbol>>) {
var first = true
name_ast_map->for_each(fun(name: string, syntax_ast_pair: pair<*tree<symbol>,*ast_node>) {
var helper: fun(*ast_node, *ast_node):void = fun(node: *ast_node, parent: *ast_node) {
if (!node) return
match(*node) {
ast_node::translation_unit(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::type_def(backing) {
/*println(string("adding ") + get_ast_name(node) + " to " + get_ast_name(parent))*/
/*add_before_in(get_line(), node, parent)*/
/*get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)*/
}
ast_node::function(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::code_block(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::statement(backing) {
if (is_code_block(parent) && ast_to_syntax->contains_key(node)) {
/*println(string("adding ") + get_ast_name(node) + " to " + get_ast_name(parent))*/
add_before_in(get_line(ast_to_syntax->get(node), name), node, parent)
}
get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
}
ast_node::if_statement(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::match_statement(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::case_statement(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::while_loop(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
ast_node::for_loop(backing) get_ast_children(node).for_each(fun(n: *ast_node) helper(n, node);)
}
}
if (first)
helper(syntax_ast_pair.second, null<ast_node>())
first = false
})
}

View File

@@ -10,21 +10,22 @@ import io:*
import ast_nodes:*
import ast_transformation:*
import parser:*
import thread:*
obj importer (Object) {
var parse: parser
var parsers: vector<parser>
var ast_pass: ast_transformation
var name_ast_map: map<string, pair<*tree<symbol>,*ast_node>>
var import_paths: vector<string>
fun construct(parseIn: ref parser, ast_passIn: ref ast_transformation, import_paths_in: vector<string>): *importer {
parse.copy_construct(&parseIn)
fun construct(parsersIn: ref vector<parser>, ast_passIn: ref ast_transformation, import_paths_in: vector<string>): *importer {
parsers.copy_construct(&parsersIn)
ast_pass.copy_construct(&ast_passIn)
name_ast_map.construct()
import_paths.copy_construct(&import_paths_in)
return this
}
fun copy_construct(old: *importer) {
parse.copy_construct(&old->parse)
parsers.copy_construct(&old->parsers)
ast_pass.copy_construct(&old->ast_pass)
name_ast_map.copy_construct(&old->name_ast_map)
import_paths.copy_construct(&old->import_paths)
@@ -34,15 +35,66 @@ obj importer (Object) {
copy_construct(&old)
}
fun destruct() {
parse.destruct()
parsers.destruct()
ast_pass.destruct()
name_ast_map.destruct()
import_paths.destruct()
}
fun import(file_name: string): *ast_node {
fun import(file_name: string) {
// lambda closes over our fix-up list
var imports_to_fix = vector<*ast_node>()
var import_first_pass = fun(file_name_idx: pair<string,int>) {
var file_name = file_name_idx.first
var file = string()
import_paths.for_each(fun(path: string) {
if (file_exists(path + file_name)) {
file = read_file(path + file_name)
} else {
}
})
printerr(file_name + ", ")
var parse_tree = parsers[file_name_idx.second].parse_input(file, file_name)
trim(parse_tree)
var ast_and_imports = ast_pass.first_pass(file_name, parse_tree, this)
imports_to_fix += ast_and_imports.second
name_ast_map[file_name] = make_pair(parse_tree, ast_and_imports.first)
}
printlnerr("**First Pass**")
printerr("parsing: ")
var to_ret = import_first_pass(file_name)
import_first_pass(make_pair(file_name,0))
for (var i = 0; i < imports_to_fix.size; i++;) {
/*println(string("iteration of imports to fix: ") + i)*/
var threads = vector<*ulong>()
var num_threads = min(imports_to_fix.size - i, parsers.size)
for (var j = i; j < i+num_threads; j++;) {
var import_name = imports_to_fix[j]->import.name
var file_name = import_name + ".krak"
if (!name_ast_map.contains_key(file_name)) {
/*import_first_pass(file_name)*/
/*join(run(import_first_pass, file_name))*/
if (num_threads > 1)
threads.add(run(import_first_pass, make_pair(file_name, j-i)))
else
import_first_pass(make_pair(file_name,0))
} else {
threads.add(null<ulong>())
}
}
/*println(string("iteration of imports to fix: ") + i + " made")*/
for (var j = i; j < i+num_threads; j++;) {
if (num_threads > 1) {
if (threads[j-i])
join(threads[j-i])
}
var im = imports_to_fix[j]
var import_name = im->import.name
var file_name = import_name + ".krak"
im->import.translation_unit = name_ast_map[file_name].second
add_to_scope(import_name, im->import.translation_unit, im->import.containing_translation_unit)
}
/*println(string("iteration of imports to fix: ") + i + " done")*/
i += num_threads-1
}
printlnerr()
printlnerr("**Second Pass**")
name_ast_map.for_each(fun(name: string, tree_pair: pair<*tree<symbol>, *ast_node>) ast_pass.second_pass(tree_pair.first, tree_pair.second);)
@@ -51,49 +103,6 @@ obj importer (Object) {
// this needs to be modified to do chaotic iteration on instantiating template classes, based on what I see in the C++ version
printlnerr("**Fourth Pass**")
name_ast_map.for_each(fun(name: string, tree_pair: pair<*tree<symbol>, *ast_node>) ast_pass.fourth_pass(tree_pair.first, tree_pair.second);)
/*
name_ast_map.for_each(fun(name: string, tree_pair: pair<*tree<symbol>, *ast_node>) {
print("writing ast for: "); println(name)
write_file(name + ".ast.dot", ast_to_dot(tree_pair.second))
})
*/
return to_ret
}
fun import_first_pass(file_name: string): *ast_node {
if (name_ast_map.contains_key(file_name))
return name_ast_map[file_name].second
/*print("pre-parse: "); println(file_name)*/
var file = string()
import_paths.for_each(fun(path: string) {
/*println(string("Checking ") + path + " for " + file_name)*/
if (file_exists(path + file_name)) {
/*println("Found it!")*/
file = read_file(path + file_name)
return
} else {
/*println("did not find it")*/
}
})
printerr(file_name + ", ")
var parse_tree = parse.parse_input(file, file_name)
/*print("post-parse: "); println(file_name)*/
/*write_file(file_name + ".parse.dot", syntax_tree_to_dot(parse_tree))*/
/*print("pre-trim: "); println(file_name)*/
trim(parse_tree)
/*print("post-trim: "); println(file_name)*/
/*write_file(file_name + ".trimmed_parse.dot", syntax_tree_to_dot(parse_tree))*/
/*print("pre-first-ast: "); println(file_name)*/
var ast = ast_pass.first_pass(file_name, parse_tree, this)
/*print("post-first-ast: "); println(file_name)*/
return ast
}
fun register(file_name: string, parse_tree: *tree<symbol>, translation_unit: *ast_node) {
name_ast_map.set(file_name, make_pair(parse_tree, translation_unit))
/*print("Registered parse_tree+translation_unit for ")*/
/*println(file_name)*/
}
fun trim(parse_tree: *tree<symbol>) {
remove_node(symbol("$NULL$", false), parse_tree)

View File

@@ -656,7 +656,7 @@ obj interpreter (Object) {
return make_pair(value::variable(make_pair(get_real_value(dereference_val).pointer.first, dereference_val.pointer.second->clone_with_decreased_indirection())), control_flow::nor())
}
// check for built-in-ish externs (everything the standard library needs)
if (func_name == "printf" || func_name == "malloc" || func_name == "free" || func_name == "fflush" || func_name == "snprintf" || func_name == "fopen" || func_name == "fclose" || func_name == "ftell" || func_name == "fseek" || func_name == "fread" || func_name == "fwrite" || func_name == "atan" || func_name == "atan2" || func_name == "acos" || func_name == "asin" || func_name == "tan" || func_name == "cos" || func_name == "sin")
if (func_name == "printf" || func_name == "malloc" || func_name == "free" || func_name == "memmove" || func_name == "fflush" || func_name == "snprintf" || func_name == "fopen" || func_name == "fclose" || func_name == "ftell" || func_name == "fseek" || func_name == "fread" || func_name == "fwrite" || func_name == "atan" || func_name == "atan2" || func_name == "acos" || func_name == "asin" || func_name == "tan" || func_name == "cos" || func_name == "sin")
return make_pair(call_built_in_extern(func_name, parameters), control_flow::nor())
if (!func_call_func->function.body_statement)
error(string("trying to call unsupported extern function: ") + func_name)
@@ -764,6 +764,9 @@ obj interpreter (Object) {
} else if (func_name == "free") {
assert(parameters.size == 1 && is_pointer(parameters[0]), "Calling free with wrong params")
free(parameters[0].pointer.first)
} else if (func_name == "memmove") {
assert(parameters.size == 3 && is_pointer(parameters[0]) && is_pointer(parameters[1]) && is_ulong_int(parameters[2]), "Calling memmove with wrong params")
return value::pointer(make_pair(memmove((parameters[0].pointer.first) cast *void, (parameters[1].pointer.first) cast *void, parameters[2].ulong_int), type_ptr(base_type::void_return(), 1)))
} else if (func_name == "fflush") {
assert(parameters.size == 1 && is_integer(parameters[0]), "Calling fflush with wrong params")
fflush(parameters[0].integer)
@@ -884,7 +887,7 @@ obj interpreter (Object) {
}
error("bad branch type")
}
fun interpret_code_block(block: *ast_node, var_stack: *stack<map<*ast_node, value>>, enclosing_object: value, enclosing_func: *ast_node, defer_stack: *stack<*ast_node>): pair<value, control_flow> {
fun interpret_code_block(block: *ast_node, var_stack: *stack<map<*ast_node, value>>, enclosing_object: value, enclosing_func: *ast_node): pair<value, control_flow> {
var_stack->push(map<*ast_node,value>())
var defer_stack = stack<*ast_node>()
for (var i = 0; i < block->code_block.children.size; i++;) {
@@ -1025,7 +1028,7 @@ obj interpreter (Object) {
ast_node::while_loop(backing) return interpret_while_loop(node, var_stack, enclosing_object, enclosing_func, defer_stack)
ast_node::for_loop(backing) return interpret_for_loop(node, var_stack, enclosing_object, enclosing_func)
ast_node::branching_statement(backing) return interpret_branching_statement(node)
ast_node::code_block(backing) return interpret_code_block(node, var_stack, enclosing_object, enclosing_func, defer_stack)
ast_node::code_block(backing) return interpret_code_block(node, var_stack, enclosing_object, enclosing_func)
ast_node::return_statement(backing) return interpret_return_statement(node, var_stack, enclosing_object, enclosing_func, defer_stack)
ast_node::declaration_statement(backing) return interpret_declaration_statement(node, var_stack, enclosing_object, enclosing_func, defer_stack)
ast_node::assignment_statement(backing) return interpret_assignment_statement(node, var_stack, enclosing_object, enclosing_func, defer_stack)

View File

@@ -35,11 +35,6 @@ fun print(toPrint: *char) : void {
fun println()
print("\n")
fun print<T>(toPrint: *T) {
print("ptr:<")
print((toPrint) cast ulong)
print(">")
}
fun print(toPrint: char) : void
print(string::string(toPrint))

View File

@@ -13,7 +13,7 @@ import io:*
obj parser (Object) {
var input: vector<symbol>
var gram: grammer
var gram: *grammer
var gss: gss
var to_reduce: stack<reduction>
var to_shift: stack< pair<*tree<int>, int> >
@@ -21,9 +21,9 @@ obj parser (Object) {
var packed_map: map<*tree<symbol>, bool>
var reduces_to_null_map: map<vector<symbol>, bool>
fun construct(grammerIn: grammer): *parser {
fun construct(grammerIn: *grammer): *parser {
input.construct()
gram.copy_construct(&grammerIn)
gram = grammerIn
gss.construct()
to_reduce.construct()
to_shift.construct()
@@ -32,9 +32,12 @@ obj parser (Object) {
reduces_to_null_map.construct()
return this
}
fun construct(): *parser {
return construct(null<grammer>())
}
fun copy_construct(old: *parser) {
input.copy_construct(&old->input)
gram.copy_construct(&old->gram)
gram = old->gram
gss.copy_construct(&old->gss)
to_reduce.copy_construct(&old->to_reduce)
to_shift.copy_construct(&old->to_shift)
@@ -48,7 +51,6 @@ obj parser (Object) {
}
fun destruct() {
input.destruct()
gram.destruct()
gss.destruct()
to_reduce.destruct()
to_shift.destruct()
@@ -68,13 +70,13 @@ obj parser (Object) {
// if the zero state contains any reductions for state 0 and eof, then
// it must be reducing to the goal state
/*println("checking the bidness")*/
if (inputStr == "" && gram.parse_table.get(0, eof_symbol()).contains(action(action_type::reduce(), 0))) {
if (inputStr == "" && gram->parse_table.get(0, eof_symbol()).contains(action(action_type::reduce(), 0))) {
println("Accept on no input for ")
println(name)
return new<tree<symbol>>()->construct(null_symbol())
}
var lex = lexer(gram.terminals)
var lex = lexer(gram->terminals)
lex.set_input(inputStr)
var current_symbol.construct(): symbol
for (current_symbol = lex.next(); current_symbol != eof_symbol() && current_symbol != invalid_symbol(); current_symbol = lex.next();) {
@@ -97,7 +99,7 @@ obj parser (Object) {
var null_symbol_tree = null<tree<symbol>>()
gram.parse_table.get(0, input[0]).for_each(fun(act: action) {
gram->parse_table.get(0, input[0]).for_each(fun(act: action) {
/*println("for each action")
act.print()
*/
@@ -108,7 +110,7 @@ obj parser (Object) {
/*print("act == reduce() && == 0 Adding reduction from state: ")
println(v0->data)
*/
to_reduce.push(reduction(v0, gram.rules[act.state_or_rule].lhs, 0, null_symbol_tree, null_symbol_tree))
to_reduce.push(reduction(v0, gram->rules[act.state_or_rule].lhs, 0, null_symbol_tree, null_symbol_tree))
}
})
@@ -190,11 +192,11 @@ obj parser (Object) {
// the shift lookup will fail, and likely other things, and this is our accept
// criteria anyway
/*if (curr_reached->data == 0 && curr_reduction.sym == gram.rules[0].lhs)*/
if (curr_reduction.sym == gram.rules[0].lhs) {
if (curr_reduction.sym == gram->rules[0].lhs) {
/*println("would accept here")*/
return;
}
var shift_to = gram.parse_table.get_shift(curr_reached->data, curr_reduction.sym).state_or_rule
var shift_to = gram->parse_table.get_shift(curr_reached->data, curr_reduction.sym).state_or_rule
/*println("got shift to")*/
var new_label = null<tree<symbol>>()
if (curr_reduction.length == 0) {
@@ -219,10 +221,10 @@ obj parser (Object) {
gss.add_edge(shift_to_node, curr_reached, new_label)
// do non-null reductions
if (curr_reduction.length) {
gram.parse_table.get(shift_to, input[i]).for_each(fun(act: action) {
gram->parse_table.get(shift_to, input[i]).for_each(fun(act: action) {
/*if (act.act == reduce && !fully_reduces_to_null(reduce_rule)) {*/
if (act.act == action_type::reduce() && act.rule_position != 0) {
var reduce_rule = gram.rules[act.state_or_rule]
var reduce_rule = gram->rules[act.state_or_rule]
to_reduce.push(reduction(curr_reached, reduce_rule.lhs,
act.rule_position,
get_nullable_parts(reduce_rule),
@@ -238,11 +240,11 @@ obj parser (Object) {
shift_to_node = gss.new_node(shift_to)
gss.add_to_frontier(i, shift_to_node)
gss.add_edge(shift_to_node, curr_reached, new_label)
gram.parse_table.get(shift_to, input[i]).for_each(fun(act: action) {
gram->parse_table.get(shift_to, input[i]).for_each(fun(act: action) {
if (act.act == action_type::push()) {
to_shift.push(make_pair(shift_to_node, act.state_or_rule))
} else {
var action_rule = gram.rules[act.state_or_rule]
var action_rule = gram->rules[act.state_or_rule]
// tricky tricky tricky. Fully reduces to null is not the same as act.rule_position being 0
/*if (fully_reduces_to_null(action_rule)) {*/
if (act.rule_position == 0) {
@@ -287,8 +289,8 @@ obj parser (Object) {
println(i+1)
*/
gss.add_edge(shift_to_node, shift.first, new_label)
gram.parse_table.get_reduces(shift.second, input[i+1]).for_each(fun(action: action) {
var reduce_rule = gram.rules[action.state_or_rule]
gram->parse_table.get_reduces(shift.second, input[i+1]).for_each(fun(action: action) {
var reduce_rule = gram->rules[action.state_or_rule]
/*if (!fully_reduces_to_null(reduce_rule)) {*/
if (action.rule_position != 0) {
to_reduce.push(reduction(shift.first, reduce_rule.lhs, action.rule_position,
@@ -308,14 +310,14 @@ obj parser (Object) {
/*println("post add to frontier")*/
gss.add_edge(shift_to_node, shift.first, new_label)
/*println("post add edger")*/
gram.parse_table.get(shift.second, input[i+1]).for_each(fun(action: action) {
gram->parse_table.get(shift.second, input[i+1]).for_each(fun(action: action) {
/*println("looking at an action")*/
if (action.act == action_type::push()) {
/*println("is push")*/
next_shifts.push(make_pair(shift_to_node, action.state_or_rule))
} else {
/*println("is reduce")*/
var action_rule = gram.rules[action.state_or_rule]
var action_rule = gram->rules[action.state_or_rule]
/*if (!fully_reduces_to_null(action_rule)) {*/
if (action.rule_position != 0) {
/*println("does not reduce to null")*/
@@ -391,7 +393,7 @@ obj parser (Object) {
}
fun reduces_to_null(r: ref rule): bool {
if (!reduces_to_null_map.contains_key(r.rhs))
reduces_to_null_map[r.rhs] = gram.first_vector(r.rhs).contains(null_symbol())
reduces_to_null_map[r.rhs] = gram->first_vector(r.rhs).contains(null_symbol())
return reduces_to_null_map[r.rhs]
}
fun get_nullable_parts(r: ref rule): *tree<symbol> {

View File

@@ -18,6 +18,8 @@ fun to_string(in: long): string
return to_string_num(in)
fun to_string(in: ulong): string
return to_string_num(in)
fun to_string<T>(in: *T): string
return string("ptr:<") + to_string_num((in) cast ulong) + ">"
fun string_to_int(it: string): int {
var is_negative = false

View File

@@ -20,7 +20,7 @@ fun run<T>(func :fun(T): void, data: T) : *ulong {
// to avoid extra copy etc
var func_copy = new<fun(T):void>()
var func_and_data = new<pair<*void, *T>>()->construct()
*data_copy = data
maybe_copy_construct(data_copy, &data)
memmove((func_copy) cast *void, (&func) cast *void, #sizeof<fun(T):void>)
*func_and_data = make_pair((func_copy) cast *void, data_copy)
var wrapper = fun(func_and_data: *void): *void {

View File

@@ -34,7 +34,7 @@ obj vector<T> (Object, Serializable) {
}
fun copy_construct(old: *vector<T>): void {
construct(old->size)
construct(old->available)
size = old->size
if (is_object<T>()) {
for (var i = 0; i < old->size; i++;)
@@ -213,12 +213,26 @@ obj vector<T> (Object, Serializable) {
}
fun add(dataIn: ref T): void { addEnd(dataIn); }
fun addEnd(dataIn: ref T): void {
if (size+1 >= available)
// if we resize, we need to be careful as the dataIn reference
// may come from this itself
if (size+1 > available) {
var temp = dataIn
resize((size+1)*2);
maybe_copy_construct(&data[size], &temp);
} else {
maybe_copy_construct(&data[size], &dataIn);
}
size++;
}
fun add(dataIn: ref T, index: int) {
add(last())
for (var i = size-2; i > index; i--;) {
data[i] = data[i-1]
}
data[index] = dataIn
}
fun remove(index: int) {
maybe_destruct(&data[index])
for (var i = index+1; i < size; i++;) {