From: adam Date: Mon, 30 Oct 2006 05:21:55 +0000 (-0500) Subject: break Node out of GSS X-Git-Url: http://git.megacz.com/?p=sbp.git;a=commitdiff_plain;h=a8478f5ddfbfbc8d910d09f27163cbd55752d3b6 break Node out of GSS darcs-hash:20061030052155-5007d-93d6573a107b44466c54168ab2a11749069e8eea.gz --- diff --git a/src/edu/berkeley/sbp/Forest.java b/src/edu/berkeley/sbp/Forest.java index 353e256..04e06be 100644 --- a/src/edu/berkeley/sbp/Forest.java +++ b/src/edu/berkeley/sbp/Forest.java @@ -129,7 +129,7 @@ public abstract class Forest implements GraphViz.ToGraphViz { /** An "ambiguity node"; this is immutable once it has been "looked at" */ static class Many extends Forest { - HashSet parents = new HashSet(); + HashSet parents = new HashSet(); private FastSet> hp = new FastSet>(); private boolean touched = false; diff --git a/src/edu/berkeley/sbp/GSS.java b/src/edu/berkeley/sbp/GSS.java index ac62e18..0358ef6 100644 --- a/src/edu/berkeley/sbp/GSS.java +++ b/src/edu/berkeley/sbp/GSS.java @@ -12,8 +12,16 @@ import java.lang.reflect.*; /** implements Tomita's Graph Structured Stack */ class GSS { - static int count = 0; + public static Queue removals = new LinkedList(); + + static String note = ""; + static int single_newnode = 0; + static int toplevel_reductions = 0; + static int multi_newnode = 0; + static int waiting_newnode = 0; static int shifts = 0; + + static int count = 0; static int reductions = 0; int resets = 0; int waits = 0; @@ -22,7 +30,7 @@ class GSS { public GSS(Input input) { this.input = input; } - private Phase.Node[] reducing_list = null; + private Node[] reducing_list = null; // FIXME: right now, these are the performance bottleneck HashMapBag waiting = new HashMapBag(); @@ -34,9 +42,13 @@ class GSS { Forest.Many finalResult; /** corresponds to a positions between tokens the input stream; same as Tomita's U_i's */ - class Phase implements Invokable.Node>, IntegerMappable, GraphViz.ToGraphViz, Iterable { + class Phase implements Invokable, IntegerMappable, GraphViz.ToGraphViz, Iterable { - public Iterator iterator() { return hash.iterator(); } + public int pos() { return pos; } + public boolean closed() { return closed; } + public Tok token() { return token; } + + public Iterator iterator() { return hash.iterator(); } public void invoke(State st, Forest result, Node n) { shifts++; good |= next.newNode(n, result, st, false); @@ -48,7 +60,7 @@ class GSS { private final int pos; boolean reducing; - private IntPairMap hash; /* ALLOC */ + public IntPairMap hash; /* ALLOC */ private boolean closed; private boolean good; private Phase next = null; @@ -81,7 +93,7 @@ class GSS { lastperformed.clear(); lastperformed.addAll(performed); performed.clear(); - hash = new IntPairMap(); + hash = new IntPairMap(); reset = false; good = false; closed = false; @@ -171,7 +183,7 @@ class GSS { private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) { if (p.merge(parent, pending)) return true; - p.parents().add(parent, true); + p.addParent(parent, true); if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent); return true; } @@ -188,7 +200,7 @@ class GSS { //return; } while(false); - Node n = new Node(parent, pending, state); // ALLOC + Node n = new Node(Phase.this, parent, pending, state); // ALLOC if (reducing) { n.performEmptyReductions(); if (!fromEmptyReduction) n.performReductions(parent); @@ -196,14 +208,14 @@ class GSS { return true; } - LinkedList reductionQueue = new LinkedList(); + public LinkedList reductionQueue = new LinkedList(); /** perform all reduction operations */ public void reduce() throws ParseFailed { try { reducing = true; if (reducing_list==null || reducing_list.length < hash.size()) - reducing_list = new Phase.Node[hash.size() * 4]; + reducing_list = new Node[hash.size() * 4]; hash.toArray(reducing_list); int num = hash.size(); for(int i=0; i, IntegerMappable, GraphViz.ToGraphViz { - public FastSet set = new FastSet(); - private boolean allqueued = false; - - /** what state this node is in */ - public final Parser.Table.State state; - - /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */ - public Phase phase() { return Phase.this; } - - private HashSet resultMap = new HashSet(); - public Iterable results() { return resultMap; } - public FastSet parents() { return set; } - public boolean merge(Node parent, Forest result) { - // FIXME: inefficient! - for(Forest.Many f : results()) { - if (f.parents.contains(parent) /* UGLY: */ && f.parents.size()==1) { - f.merge(result); - return true; - } - } - Forest.Many f = new Forest.Many(); - f.parents.add(parent); - f.merge(result); - resultMap.add(f); - set.add(parent, true); - return false; - } - - public void performReductions() { - if (allqueued) return; - allqueued = true; - state.invokeReductions(token, this, this, null); - } - - public void performReductions(Node n2) { - if (!allqueued) reductionQueue.add(this);//performReductions(); - else state.invokeReductions(token, this, this, n2); - } - - public void performEmptyReductions() { state.invokeReductions(token, this, null, null); } - public final void invoke(Position r, Node n, Node n2) { - reductions++; - if (n==null || n2==null || r.pos==0) { - if (r.pos==0) { - if (n==null) n = this; - else return; - } - if (n==null) return; - Forest[] holder = new Forest[r.pos]; - if (r.pos==0) n.finish(r, r.zero(n.phase().getLocation().createRegion(n.phase().getLocation())), n.phase()); - else n.reduce(r, r.pos-1, n.phase(), null); - } else { - if (r.pos<=0) throw new Error("called wrong form of reduce()"); - int pos = r.pos-1; - n.reduce(r, pos, n.phase(), n2); - } - } - - public void reduce(Position r, int pos, Phase target, Node only) { - Forest[] holder = r.holder; - Forest old = holder[pos]; - - HashSet rr = new HashSet(); - for(Forest result : results()) rr.add(result); - for(Forest result : rr) - for(Node child : ((Forest.Many)result).parents) { - if (only != null && child!=only) continue; - holder[pos] = result; - if (pos==0) child.finish(r, r.rewrite(child.phase().getLocation().createRegion(target.getLocation())), target); - else child.reduce(r, pos-1, target, null); - } - - holder[pos] = old; - } - - public void finish(Position r, Forest result, Phase target) { - Parser.Table.State state0 = state.gotoSetNonTerminals.get(r.owner()); - if (result==null) throw new Error(); - if (state0!=null) - target.newNode(this, result, state0, r.pos<=0, r); - } - - private Node(Node parent, Forest pending, State state) { - this.state = state; - this.merge(parent, pending); - Phase start = parent==null ? null : parent.phase(); - if (parent != null) parents().add(parent, true); - if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!"); - Phase.this.hash.put(state, start, this); - } - public int toInt() { return idx; } - private final int idx = node_idx++; - - // GraphViz ////////////////////////////////////////////////////////////////////////////// - - public GraphViz.Node toGraphViz(GraphViz gv) { - if (gv.hasNode(this)) return gv.createNode(this); - GraphViz.Node n = gv.createNode(this); - n.label = ""+state.toStringx(); - n.shape = "rectangle"; - n.fill = "green"; - for(Forest result : results()) n.edge(result, ""); - for(Node parent : parents()) n.edge(parent, ""); - ((GraphViz.Group)phase().toGraphViz(gv)).add(n); - return n; - } - public boolean isTransparent() { return false; } - public boolean isHidden() { return false; } - - } - private int node_idx = 0; public int toInt() { return pos+1; } public int size() { return hash==null ? 0 : hash.size(); } @@ -433,4 +343,5 @@ class GSS { public boolean isHidden() { return false; } } + } diff --git a/src/edu/berkeley/sbp/Node.java b/src/edu/berkeley/sbp/Node.java new file mode 100644 index 0000000..53782f7 --- /dev/null +++ b/src/edu/berkeley/sbp/Node.java @@ -0,0 +1,147 @@ +// Copyright 2006 all rights reserved; see LICENSE file for BSD-style license + +package edu.berkeley.sbp; +import edu.berkeley.sbp.*; +import edu.berkeley.sbp.util.*; +import edu.berkeley.sbp.Parser.Table.*; +import edu.berkeley.sbp.Sequence.Position; +import java.io.*; +import java.util.*; +import java.lang.reflect.*; + +/** a node in the GSS */ +final class Node implements Invokable, IntegerMappable, GraphViz.ToGraphViz { + + public static int node_idx = 0; + + private final GSS.Phase phase; + + public FastSet setx = new FastSet(); + public FastSet childs = new FastSet(); + + private boolean allqueued = false; + + /** what state this node is in */ + public final Parser.Table.State state; + + /** which GSS.Phase this Node belongs to (node that Node is also a non-static inner class of GSS.Phase) */ + public GSS.Phase phase() { return phase; } + + private HashSet resultMap = new HashSet(); + public Iterable results() { return resultMap; } + public Iterable parents() { return setx; } + public void addParent(Node n, boolean b) { + if (n==null) return; + setx.add(n, b); + n.childs.add(this, true); + //else + //System.out.println("************ evilstate: " + this); + } + public boolean merge(Node parent, Forest result) { + // FIXME: inefficient! + for(Forest.Many f : results()) { + if (f.parents.contains(parent) /* UGLY: */ && f.parents.size()==1) { + f.merge(result); + return true; + } + } + Forest.Many f = new Forest.Many(); + f.parents.add(parent); + f.merge(result); + resultMap.add(f); + addParent(parent, true); + return false; + } + + public void performReductions() { + if (allqueued) return; + allqueued = true; + state.invokeReductions(phase().token(), this, this, null); + } + + public void performReductions(Node n2) { + if (!allqueued) phase().reductionQueue.add(this);//performReductions(); + else state.invokeReductions(phase().token(), this, this, n2); + } + + public Parser.Table.State state() { return state; } + + public void performEmptyReductions() { state.invokeReductions(phase().token, this, null, null); } + public final void invoke(Position r, Node n, Node n2) { + //reductions++; + //if (r.pos==1) single_newnode++; + //if (r.pos>1) multi_newnode++; + if (n==null || n2==null || r.pos==0) { + if (r.pos==0) { + if (n==null) n = this; + else return; + } + if (n==null) return; + Forest[] holder = new Forest[r.pos]; + if (r.pos==0) n.finish(r, r.zero(n.phase().getLocation().createRegion(n.phase().getLocation())), n.phase()); + else n.reduce(r, r.pos-1, n.phase(), null); + } else { + if (r.pos<=0) throw new Error("called wrong form of reduce()"); + int pos = r.pos-1; + n.reduce(r, pos, n.phase(), n2); + } + } + + public void reduce(Position r, int pos, GSS.Phase target, Node only) { + Forest[] holder = r.holder; + Forest old = holder[pos]; + + //toplevel_reductions++; + HashSet rr = new HashSet(); + for(Forest result : results()) { + rr.add(result); + } + //System.out.println(r); + for(Forest result : rr) { + for(Node child : ((Forest.Many)result).parents) { + if (only != null && child!=only) continue; + holder[pos] = result; + if (pos==0) child.finish(r, r.rewrite(child.phase().getLocation().createRegion(target.getLocation())), target); + else child.reduce(r, pos-1, target, null); + } + } + holder[pos] = old; + } + + public void finish(Position r, Forest result, GSS.Phase target) { + Parser.Table.State state0 = (Parser.Table.State)state.gotoSetNonTerminals.get(r.owner()); + if (result==null) throw new Error(); + if (state0!=null) + target.newNode(this, result, state0, r.pos<=0, r); + } + + Node(GSS.Phase phase, Node parent, Forest pending, State state) { + this.phase = phase; + this.state = state; + this.merge(parent, pending); + GSS.Phase start = parent==null ? null : parent.phase(); + if (parent != null) addParent(parent, true); + if (phase.hash.get(state, start) != null) throw new Error("severe problem!"); + phase.hash.put(state, start, this); + } + public int toInt() { return idx; } + private final int idx = node_idx++; + + // GraphViz ////////////////////////////////////////////////////////////////////////////// + + public GraphViz.Node toGraphViz(GraphViz gv) { + if (gv.hasNode(this)) return gv.createNode(this); + GraphViz.Node n = gv.createNode(this); + n.label = ""+state.toStringx(); + n.shape = "rectangle"; + boolean hasparents = false; + for(Node parent : parents()) { hasparents = true; n.edge(parent, ""); } + for(Forest result : results()) n.edge(result, ""); + n.color = !hasparents ? "blue" : /*state.evil ? "red" :*/ "green"; + ((GraphViz.Group)phase().toGraphViz(gv)).add(n); + return n; + } + public boolean isTransparent() { return false; } + public boolean isHidden() { return false; } + +} diff --git a/src/edu/berkeley/sbp/ParseFailed.java b/src/edu/berkeley/sbp/ParseFailed.java index 1fa0b08..31a3a56 100644 --- a/src/edu/berkeley/sbp/ParseFailed.java +++ b/src/edu/berkeley/sbp/ParseFailed.java @@ -4,7 +4,7 @@ package edu.berkeley.sbp; import edu.berkeley.sbp.*; import edu.berkeley.sbp.Sequence.Position; import edu.berkeley.sbp.GSS.Phase; -import edu.berkeley.sbp.GSS.Phase.Node; +import edu.berkeley.sbp.Node; import edu.berkeley.sbp.util.*; import java.io.*; import java.util.*; @@ -46,25 +46,25 @@ public class ParseFailed extends Exception { return (c >= 'A' && c <= 'Z'); } - static void barf(HashMap sb, GSS.Phase.Node n, int indent, boolean skip, int count, Input.Location loc) { + static void barf(HashMap sb, Node n, int indent, boolean skip, int count, Input.Location loc) { if (count <= 0) { barf(sb, n, indent, skip, loc); } else { - for(GSS.Phase.Node nn : n.parents()) + for(Node nn : (Iterable)n.parents()) barf(sb, nn, indent, skip, count-1, n.phase().getLocation()); } } - static void barf(HashMap sb, GSS.Phase.Node n, int indent, boolean skip, Input.Location loc) { + static void barf(HashMap sb, Node n, int indent, boolean skip, Input.Location loc) { if (touched.contains(n)) return; touched.add(n); String s = ""; for(int i=0; i< indent; i++) s += " "; - GSS.Phase.Node parent = n; + Node parent = n; boolean done = false; boolean alldone = false; boolean go = false; boolean force = false; - for(Position p : parent.state) { + for(Position p : (Iterable)parent.state()) { if (skip) p = p.next(); int raise = 0; done = false; @@ -97,11 +97,11 @@ public class ParseFailed extends Exception { // FIXME - private static HashSet touched = new HashSet(); - static void complain(GSS.Phase.Node n, HashMap> errors, boolean force, int indent) { + private static HashSet touched = new HashSet(); + static void complain(Node n, HashMap> errors, boolean force, int indent) { if (touched.contains(n)) return; touched.add(n); - for(Position p : n.state) { + for(Position p : (Iterable)n.state()) { //if (!p.isLast() && !p.next().isLast()) continue; if (((p.isFirst() || p.isLast()) && !force)/* || p.owner().name==null*/ || !important(p)) { diff --git a/src/edu/berkeley/sbp/Parser.java b/src/edu/berkeley/sbp/Parser.java index 34d7da9..fa2f7d1 100644 --- a/src/edu/berkeley/sbp/Parser.java +++ b/src/edu/berkeley/sbp/Parser.java @@ -43,7 +43,7 @@ public abstract class Parser { PrintWriter p = new PrintWriter(new OutputStreamWriter(fos)); GraphViz gv = new GraphViz(); for(Object n : next) - ((GSS.Phase.Node)n).toGraphViz(gv); + ((Node)n).toGraphViz(gv); gv.dump(p); p.flush(); p.close(); diff --git a/src/edu/berkeley/sbp/meta/MetaGrammarBindings.java b/src/edu/berkeley/sbp/meta/MetaGrammarBindings.java index b0b86da..1a60610 100644 --- a/src/edu/berkeley/sbp/meta/MetaGrammarBindings.java +++ b/src/edu/berkeley/sbp/meta/MetaGrammarBindings.java @@ -225,9 +225,9 @@ public class MetaGrammarBindings extends AnnotationGrammarBindings { } public Sequence build(Context cx, Union u, NonTerminalNode cnt) { Sequence ret = build0(cx, cnt); - for(Seq s : and) { Sequence dork = s.build(cx, u, cnt); ret = ret.and(dork); } - for(Seq s : not) { Sequence dork = s.build(cx, u, cnt); ret = ret.andnot(dork); } - u.add(ret); + for(Seq s : and) { Sequence dork = s.build(cx, null, cnt); ret = ret.and(dork); } + for(Seq s : not) { Sequence dork = s.build(cx, null, cnt); ret = ret.andnot(dork); } + if (u!=null) u.add(ret); return ret; } public Sequence build0(Context cx, NonTerminalNode cnt) { diff --git a/src/edu/berkeley/sbp/misc/HaskellHelper.java b/src/edu/berkeley/sbp/misc/HaskellHelper.java index 5b2e0d7..2a96356 100644 --- a/src/edu/berkeley/sbp/misc/HaskellHelper.java +++ b/src/edu/berkeley/sbp/misc/HaskellHelper.java @@ -10,6 +10,9 @@ import java.io.*; public class HaskellHelper { + public static void main(String[] argv) throws Throwable { + main(argv[0], argv[1]); + } public static Tree main(String grammarFile, String targetFile) throws Throwable { try { Tree res = new CharParser(MetaGrammar.newInstance()).parse(new FileInputStream(grammarFile)).expand1(); diff --git a/tests/regression.tc b/tests/regression.tc index 9312733..3351018 100644 --- a/tests/regression.tc +++ b/tests/regression.tc @@ -409,3 +409,33 @@ testcase { | WSB:: " "++ } +//testcase { +// input "aaaaaXaaaa"; +// output ""; +// s = ManyA &~ EndsWithZ +// EndsWithZ = Anything "Z" +// ManyA = () | "a" ManyA +// Anything = () | ("a" | "X" | "Z") Anything +//} + +testcase { + input "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + output ""; + s = ManyA + ManyA = () + | A ManyA! & ManyAB + A = "a" + ManyAB = () + | "a" ManyAB + | "b" ManyAB +} + +testcase { + input "aaaaaaaa"; + output ""; + s = As & AAs + As = () | As "a" + AAs = () | AAs "aa" +} + +