X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;ds=sidebyside;f=src%2Fedu%2Fberkeley%2Fsbp%2FGSS.java;h=645fa8ff6f290d7750f382b7b04f0c7141158307;hb=c4431d19cc5ddaae29d22c8c56366b53b0bad352;hp=2d9f6ff0e7ccbe56066d09be042c66b2c622d144;hpb=96a2822a729e563a64173f22dc184bc972a200ef;p=sbp.git
diff --git a/src/edu/berkeley/sbp/GSS.java b/src/edu/berkeley/sbp/GSS.java
index 2d9f6ff..645fa8f 100644
--- a/src/edu/berkeley/sbp/GSS.java
+++ b/src/edu/berkeley/sbp/GSS.java
@@ -21,47 +21,41 @@ import java.lang.reflect.*;
// class is meant to remain simple and easy to understand;
// optimizations which obscure that do not belong here (they
// should go into the compiled version instead)
-//
-// - most of our time is now spent creating and storing Reduct instances
-// - we should be able to perform Reduct's immediately after creating them...
-//
/** implements Tomita's Graph Structured Stack */
class GSS {
public GSS() { }
+ private Phase.Node[] reducing_list = null;
+
/** corresponds to a positions between tokens the input stream; same as Tomita's U_i's */
public class Phase {
/** the token immediately after this phase */
public final Token token;
+ boolean reducing = false;
+
/** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
private final int pos;
/** FIXME */
public Forest.Ref finalResult = null;
- /** all reductions (pending and completed) */
- private HashSet reductions = new HashSet(); /* ALLOC */
-
/** all nodes, keyed by the value returned by code() */
private HashMap hash = new HashMap(); /* ALLOC */
- /** the number of pending reductions */
- private int pendingReductions = 0;
- private int totalReductions = 0;
- private HashSet pendingReduct = new HashSet();
-
/** the number of nodes in this phase */
private int numNodes = 0;
boolean closed = false;
- public Phase(Phase previous, Token token) {
+ private Token.Location location;
+ public Phase(Phase previous, Token token, Token.Location location) {
this.pos = previous==null ? 0 : previous.pos+1;
this.token = token;
+ this.location = location;
}
public boolean isDone() { return token == null; }
@@ -72,7 +66,7 @@ class GSS {
throw new Parser.Failed(error, getLocation());
}
- public Token.Location getLocation() { return token==null ? null : token.getLocation(); }
+ public Token.Location getLocation() { return location; }
/** add a new node (merging with existing nodes if possible)
* @param parent the parent of the new node
@@ -88,9 +82,8 @@ class GSS {
}
private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
p.holder.merge(pending);
- if (p.parents.contains(parent)) return;
- p.parents.add(parent, true);
- if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
+ if (p.parents().contains(parent)) return;
+ p.addParent(parent, fromEmptyReduction);
}
private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
do {
@@ -99,9 +92,8 @@ class GSS {
if (token==null) break;
int count = 0;
Parser.Table.Reduction r = null;
- for(Parser.Table.Reduction red : token==null ? state.getEofReductions() : state.getReductions(token)) { r = red; count++; }
- //if (count==0) return; -- BEWARE! this optimization is suspected to cause really nasty heisenbugs
- if (count > 1) break;
+ if (!state.hasReductions(token)) return;
+ //if (count > 1) break;
//if (r.numPop == 0) break;
//r.reduce(pending, parent, null, Phase.this, null);
//return;
@@ -109,17 +101,28 @@ class GSS {
Node n = new Node(parent, pending, state, start); // ALLOC
n.queueEmptyReductions();
- if (!fromEmptyReduction) n.queueReductions();
+ if (!fromEmptyReduction) n.queueReductions(parent);
}
+
/** perform all reduction operations */
public void reduce() {
- for(Phase.Node n : hash.values()) {
+ reducing = true;
+ if (reducing_list==null || reducing_list.length < hash.size())
+ reducing_list = new Phase.Node[hash.size() * 4];
+ Collection hv = hash.values();
+ hv.toArray(reducing_list);
+ int num = hv.size();
+ for(int i=0; i0)
- pendingReduct.iterator().next().go();
}
/** perform all shift operations, adding promoted nodes to next */
@@ -128,6 +131,7 @@ class GSS {
Forest res = null;
boolean ok = false;
for(Phase.Node n : hash.values()) {
+ if (n.holder==null) continue;
n.holder.resolve();
if (token == null && n.state.isAccepting()) {
ok = true;
@@ -146,87 +150,79 @@ class GSS {
if (!ok && token != null) {
StringBuffer error = new StringBuffer();
error.append("error: unable to shift token \"" + token + "\"\n");
- error.append(" before: " +pendingReductions+ "\n");
- error.append(" before: " +totalReductions+ "\n");
- for(Phase.Node n : hash.values()) {
- n.queueReductions();
- n.queueEmptyReductions();
- }
- error.append(" after: " +pendingReductions+ "\n");
- error.append(" candidate states:\n");
- for(Phase.Node n : hash.values()) {
+ //error.append(" before: " +pendingReductions+ "\n");
+ //error.append(" before: " +totalReductions+ "\n");
+ //for(Phase.Node n : hash.values()) {
+ //n.queueReductions();
+ //n.queueEmptyReductions();
+ //}
+ //error.append(" after: " +pendingReductions+ "\n");
+ //error.append(" candidate states:\n");
+ //for(Phase.Node n : hash.values()) {
//for(Sequence.Position p : n.state) error.append(" " + p + "\n");
//error.append(" --\n");
- for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
+ //for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
//error.append(" ==\n");
- }
+ //}
next.error = error.toString();
}
// this massively improves GC performance
- reductions = null;
hash = null;
}
// GSS Nodes //////////////////////////////////////////////////////////////////////////////
- private HashMap pcache = new HashMap();
/** a node in the GSS */
- public class Node {
+ public final class Node extends FastSet {
- private Forest.Ref holder = null;
- private HashMap cache = null;
-
- public HashMap cache() { return cache==null ? (cache = new HashMap()) : cache; }
- public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
- public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
- public FastSet parents() { return parents; }
+ public void addParent(Node parent, boolean fromEmptyReduction) {
+ parents().add(parent, true);
+ if (this!=parent && !fromEmptyReduction) queueReductions(parent);
+ }
- /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
- public final Phase phase = Phase.this;
+ private Forest.Ref holder = null;
+ private boolean allqueued = false;
/** what state this node is in */
public final Parser.Table.State state;
- /** the set of nodes to which there is an edge starting at this node */
- public final FastSet parents = new FastSet(); /* ALLOC */
+ /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
+ public Phase phase() { return Phase.this; }
+
+ public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
+ public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
+ public FastSet parents() { return this; }
- /** FIXME */
public void queueReductions() {
- for(Node n2 : parents)
- queueReductions(n2);
+ if (allqueued) return;
+ allqueued = true;
+ int where = parents().size();
+ for(Parser.Table.Reduction r : state.getReductions(token))
+ if (r.numPop >= 1)
+ r.reduce(this);
}
- /** FIXME */
public void queueReductions(Node n2) {
- new Reduct(this, n2, null);
- for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token)) {
-
- // currently we have this weird problem where we
- // have to do an individual reduct for each child
- // when the reduction length is one (ie the
- // children wind up being children of the newly
- // created node rather than part of the popped
- // sequence
-
- if (r.numPop == 1) new Reduct(this, n2, r);
- }
+ if (!allqueued) { queueReductions(); return; }
+ for(Parser.Table.Reduction r : state.getReductions(token))
+ if (r.numPop > 0)
+ r.reduce(this, n2);
}
- /** FIXME */
public void queueEmptyReductions() {
- for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token)) {
- if (r.numPop==0)
- new Reduct(this, null, r); /* ALLOC */
- }
+ if (reducing)
+ for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token))
+ if (r.numPop==0)
+ r.reduce(this);
}
private Node(Node parent, Forest pending, Parser.Table.State state, Phase start) {
this.state = state;
if (pending != null) this.holder().merge(pending);
- if (parent != null) parents.add(parent, true);
+ if (parent != null) parents().add(parent);
if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
Phase.this.hash.put(code(state, start), this);
Phase.this.numNodes++;
@@ -234,83 +230,6 @@ class GSS {
}
}
-
- // Forest / Completed Reductions //////////////////////////////////////////////////////////////////////////////
-
- /** a pending or completed reduction */
- class Reduct {
-
- /** the node from which the reduction should begin */
- public Node n = null;
-
- /** the node on the other end of the edge to be reduced along (either: null, the second node of the reduction,
- * or the parent of the result of a length-one reduction)
- */
- public Node n2 = null;
-
- /** true iff the reduction has already been performed */
- private boolean done = false;
-
- /** the reduction to be applied */
- public Parser.Table.Reduction r;
-
- public Tree result = null;
-
- public Reduct(Node n, Node n2, Parser.Table.Reduction r) {
- this.n = n;
- this.n2 = n2;
- this.r = r;
- if (reductions.contains(this)) { done = true; return; }
- reductions.add(this);
- pendingReduct.add(this);
- pendingReductions++;
- }
-
- /** perform the reduction */
- public void go() {
- if (done) return;
- done = true;
- pendingReduct.remove(this);
- pendingReductions--;
-
- // FIXME: explain this
- if (r==null) {
- for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
- if (r.numPop <= 1) continue;
- r.reduce(n, n2, Phase.this, null);
- }
- } else if (r.numPop<=1) {
- // UGLY HACK
- // The problem here is that a "reduction of length 0/1"
- // performed twice with different values of n2 needs
- // to only create a *single* new result, but must add
- // multiple parents to the node holding that result.
- // The current reducer doesn't differentiate between
- // the next node of an n-pop reduction and the
- // ultimate parent of the last pop, so we need to
- // cache instances here as a way of avoiding
- // recreating them.
-
- Forest ret = (r.numPop==0 ? pcache : n.cache()).get(r);
- if (ret != null) r.reduce(n, n2, n.phase, ret);
- else (r.numPop==0 ? pcache : n.cache()).put(r, r.reduce(n, n2, n.phase, null));
-
- } else {
- r.reduce(n, n2, Phase.this, null);
- }
- }
-
- // FIXME: this is a PITA
- public int hashCode() { return n.hashCode() ^ (r==null ? 0 : r.hashCode()) ^ (n2==null ? 0 : n2.hashCode()); }
- public boolean equals(Object o) {
- if (o==null) return false;
- if (o==this) return true;
- if (!(o instanceof Reduct)) return false;
- Reduct other = (Reduct)o;
- return equal(r, other.r) && equal(n, other.n) && equal(n2, other.n2);
- }
- }
-
}
/** helper method */
@@ -324,5 +243,4 @@ class GSS {
private static long code(Parser.Table.State state, Phase start) {
return (((long)state.idx) << 32) | (start==null ? 0 : start.pos);
}
-
}