inline dup and most of drop, relative added to benchmarks, added first basic run (without any of the complex stuff implemented) of local type inference and use it to do word value equality if possible
This commit is contained in:
@@ -102,25 +102,6 @@
|
||||
true (recurse vs (+ i 2) (concat r (slice vs i (+ i 2))))
|
||||
))) (vau de (vs b) (vapply let (array (destructure_helper vs 0 (array)) b) de)))
|
||||
|
||||
; and a destructuring-capable lambda!
|
||||
only_symbols (rec-lambda recurse (a i) (cond (= i (len a)) true
|
||||
(symbol? (idx a i)) (recurse a (+ i 1))
|
||||
true false))
|
||||
|
||||
; Note that if macro_helper is inlined, the mapping lambdas will close over
|
||||
; se, and then not be able to be taken in as values to the maps, and the vau
|
||||
; will fail to partially evaluate away.
|
||||
lambda (let (macro_helper (lambda (p b) (let (
|
||||
sym_params (map (lambda (param) (if (symbol? param) param
|
||||
(str-to-symbol (str param)))) p)
|
||||
body (array let (flat_map_i (lambda (i x) (array (idx p i) x)) sym_params) b)
|
||||
) (array vau sym_params body))))
|
||||
(vau se (p b) (if (only_symbols p 0) (vapply lambda (array p b) se)
|
||||
(wrap (eval (macro_helper p b) se)))))
|
||||
|
||||
; and rec-lambda - yes it's the same definition again
|
||||
rec-lambda (vau se (n p b) (eval (array Y (array lambda (array n) (array lambda p b))) se))
|
||||
|
||||
nil (array)
|
||||
not (lambda (x) (if x false true))
|
||||
or (let (macro_helper (rec-lambda recurse (bs i) (cond (= i (len bs)) false
|
||||
@@ -180,9 +161,20 @@
|
||||
E empty
|
||||
EE (array 'BB nil nil nil)
|
||||
|
||||
generic-foldl (rec-lambda recurse (f z t) (match t
|
||||
generic-foldl (rec-lambda generic-foldl (f z t) (match t
|
||||
,E z
|
||||
(c a x b) (recurse f (f (recurse f z a) x) b)))
|
||||
|
||||
; We use intermediate vars so that partial eval knows
|
||||
; that generic-foldl will be evaluated and the Y-combinator recursion
|
||||
; is properly noticed and eliminated
|
||||
(c a x b) (let (new_left_result (generic-foldl f z a)
|
||||
folded (f new_left_result x)
|
||||
) (generic-foldl f folded b))))
|
||||
;(c a x b) (generic-foldl f (f (generic-foldl f z a) x) b)))
|
||||
|
||||
; this also works, but lapply isn't currently inlined, so you generate one env for the call to it
|
||||
; should be equlivant and nicer in general, we should support inlining it in the future
|
||||
;(c a x b) (generic-foldl f (lapply f (array (generic-foldl f z a) x) root_env) b)))
|
||||
|
||||
blacken (lambda (t) (match t
|
||||
('R a x b) (array 'B a x b)
|
||||
@@ -198,11 +190,13 @@
|
||||
('BB a x ('R ('R b y c) z d)) (array 'B (array 'B a x b) y (array 'B c z d))
|
||||
; already balenced
|
||||
t t))
|
||||
map-insert (lambda (t k v) (blacken ((rec-lambda ins (t) (match t
|
||||
,E (array 'R t (array k v) t)
|
||||
(c a x b) (cond (< k (idx x 0)) (balance (array c (ins a) x b))
|
||||
(= k (idx x 0)) (array c a (array k v) b)
|
||||
true (balance (array c a x (ins b)))))) t)))
|
||||
|
||||
map-insert (let (ins (rec-lambda ins (t k v) (match t
|
||||
,E (array 'R t (array k v) t)
|
||||
(c a x b) (cond (< k (idx x 0)) (balance (array c (ins a k v) x b))
|
||||
(= k (idx x 0)) (array c a (array k v) b)
|
||||
true (balance (array c a x (ins b k v))))))
|
||||
) (lambda (t k v) (blacken (ins t k v))))
|
||||
|
||||
map-empty empty
|
||||
|
||||
@@ -213,6 +207,9 @@
|
||||
monad (array 'write 1 (str "running tree test") (vau (written code)
|
||||
(array 'args (vau (args code)
|
||||
(array 'exit (log (reduce-test-tree (make-test-tree (read-string (idx args 1)) map-empty))))
|
||||
;(array 'exit (log (let (t (make-test-tree (read-string (idx args 1)) map-empty)
|
||||
; _ (log "swapping to reduce")
|
||||
; ) (reduce-test-tree t))))
|
||||
))
|
||||
))
|
||||
|
||||
|
||||
14
koka_bench/relative.py
Executable file
14
koka_bench/relative.py
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
with open(sys.argv[1], "r") as f:
|
||||
csv = [ [ v.strip() for v in l.split(',') ] for l in f.readlines() ]
|
||||
csv[0] = csv[0] + [ 'relative' ]
|
||||
min = min( float(r[1]) for r in csv[1:] )
|
||||
subset = csv[1:]
|
||||
for i in range(len(subset)):
|
||||
subset[i] = subset[i] + [ float(subset[i][1]) / min ]
|
||||
csv[1:] = sorted(subset, key=lambda x: x[8])
|
||||
|
||||
out = "\n".join(",".join(str(x) for x in r) for r in csv)
|
||||
with open(sys.argv[1] + "with_relative.csv", "w") as f:
|
||||
f.write(out)
|
||||
@@ -16,22 +16,26 @@ mkdir -p slow
|
||||
find build -type f -name \*slow\* -exec mv {} slow \;
|
||||
cp ./build/kraken/out/bench/kraken-* ./slow
|
||||
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*nqueens\* -printf "\"%p 7\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_nqueens_table.md --export-csv slow_nqueens_table.csv'
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*cfold\* -printf "\"%p 5\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_cfold_table.md --export-csv slow_cfold_table.csv'
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*deriv\* -printf "\"%p 3\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_deriv_table.md --export-csv slow_deriv_table.csv'
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*rbtree\* -printf "\"%p 100\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_rbtree_table.md --export-csv slow_rbtree_table.csv'
|
||||
|
||||
|
||||
nix develop -i -c bash -c 'ulimit -s unlimited && find build -type f -executable -name \*nqueens\* -printf "\"%p 10\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown nqueens_table.md --export-csv nqueens_table.csv'
|
||||
nix develop -i -c bash -c 'ulimit -s unlimited && find build -type f -executable -name \*rbtree\* -printf "\"%p 42000\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown rbtree_table.md --export-csv rbtree_table.csv'
|
||||
nix develop -i -c bash -c 'ulimit -s unlimited && find build -type f -executable -name \*cfold\* -printf "\"%p 5\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown cfold_table.md --export-csv cfold_table.csv'
|
||||
nix develop -i -c bash -c 'ulimit -s unlimited && find build -type f -executable -name \*deriv\* -printf "\"%p 8\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown deriv_table.md --export-csv deriv_table.csv'
|
||||
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*nqueens\* -printf "\"%p 7\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_nqueens_table.md --export-csv slow_nqueens_table.csv'
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*cfold\* -printf "\"%p 5\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_cfold_table.md --export-csv slow_cfold_table.csv'
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*deriv\* -printf "\"%p 3\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_deriv_table.md --export-csv slow_deriv_table.csv'
|
||||
#nix develop -i -c bash -c 'ulimit -s unlimited && find slow -type f -executable -name \*rbtree\* -printf "\"%p 100\"\n" | xargs hyperfine --ignore-failure --warmup 2 --export-markdown slow_rbtree_table.md --export-csv slow_rbtree_table.csv'
|
||||
|
||||
printf "# Benchmarks\n\n" > benchmarks.md
|
||||
for x in *_table.csv
|
||||
do
|
||||
./relative.py $x
|
||||
done
|
||||
|
||||
printf "# Benchmarks\n\n" > benchmarks_table.md
|
||||
for x in *_table.md
|
||||
do
|
||||
printf "## $x\n\n" >> benchmarks.md
|
||||
cat "$x" >> benchmarks.md
|
||||
printf "\n\n\n" >> benchmarks.md
|
||||
printf "## $x\n\n" >> benchmarks_table.md
|
||||
cat "$x" >> benchmarks_table.md
|
||||
printf "\n\n\n" >> benchmarks_table.md
|
||||
done
|
||||
|
||||
830
partial_eval.scm
830
partial_eval.scm
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user