/** all reductions (pending and completed) */
private HashSet<Phase.Reduct> reductions = new HashSet<Phase.Reduct>(); /* ALLOC */
-
+
/** all nodes, keyed by the value returned by code() */
private HashMap<Long,Phase.Node> hash = new HashMap<Long,Phase.Node>(); /* ALLOC */
/** the number of pending reductions */
private int pendingReductions = 0;
private int totalReductions = 0;
- private HashSet<Reduct> pendingReduct = new HashSet<Reduct>();
+ //private HashSet<Reduct> pendingReduct = new HashSet<Reduct>();
+ private LinkedList<Reduct> pendingReduct = new LinkedList<Reduct>();
/** the number of nodes in this phase */
private int numNodes = 0;
boolean closed = false;
- public Phase(Phase previous, Token token) {
+ private Token.Location location;
+ public Phase(Phase previous, Token token, Token.Location location) {
this.pos = previous==null ? 0 : previous.pos+1;
this.token = token;
+ this.location = location;
}
public boolean isDone() { return token == null; }
throw new Parser.Failed(error, getLocation());
}
- public Token.Location getLocation() { return token==null ? null : token.getLocation(); }
+ public Token.Location getLocation() { return location; }
/** add a new node (merging with existing nodes if possible)
* @param parent the parent of the new node
int count = 0;
Parser.Table.Reduction r = null;
for(Parser.Table.Reduction red : token==null ? state.getEofReductions() : state.getReductions(token)) { r = red; count++; }
- //if (count==0) return; -- BEWARE! this optimization is suspected to cause really nasty heisenbugs
- if (count > 1) break;
+ //if (count==0) return; // BEWARE! this optimization is suspected to cause really nasty heisenbugs
+ //if (count > 1) break;
//if (r.numPop == 0) break;
//r.reduce(pending, parent, null, Phase.this, null);
//return;
/** perform all reduction operations */
public void reduce() {
- for(Phase.Node n : hash.values()) {
+ HashSet<Phase.Node> s = new HashSet<Phase.Node>();
+ s.addAll(hash.values());
+ for(Phase.Node n : s) {
n.queueEmptyReductions();
n.queueReductions();
}
while(pendingReduct.size()>0)
- pendingReduct.iterator().next().go();
+ //pendingReduct.iterator().next().go();
+ pendingReduct.removeFirst().go();
}
/** perform all shift operations, adding promoted nodes to <tt>next</tt> */
// GSS Nodes //////////////////////////////////////////////////////////////////////////////
- private HashMap<Parser.Table.Reduction,Forest> pcache = new HashMap<Parser.Table.Reduction,Forest>();
+ //private HashMap<Parser.Table.Reduction,Forest> pcache = new HashMap<Parser.Table.Reduction,Forest>();
/** a node in the GSS */
- public class Node {
+ public final class Node {
private Forest.Ref holder = null;
- private HashMap<Parser.Table.Reduction,Forest> cache = null;
- public HashMap<Parser.Table.Reduction,Forest> cache() { return cache==null ? (cache = new HashMap<Parser.Table.Reduction,Forest>()) : cache; }
- public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
- public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
- public FastSet<Node> parents() { return parents; }
+ private HashMap<Parser.Table.Reduction,Forest> cache = null;
- /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
- public final Phase phase = Phase.this;
+ /** the set of nodes to which there is an edge starting at this node */
+ public final FastSet<Node> parents = new FastSet<Node>(); /* ALLOC */
/** what state this node is in */
public final Parser.Table.State state;
+ /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
+ public final Phase phase = Phase.this;
- /** the set of nodes to which there is an edge starting at this node */
- public final FastSet<Node> parents = new FastSet<Node>(); /* ALLOC */
+ public HashMap<Parser.Table.Reduction,Forest> cache() {
+ return cache==null ? (cache = new HashMap<Parser.Table.Reduction,Forest>()) : cache; }
+ public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
+ public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
+ public FastSet<Node> parents() { return parents; }
/** FIXME */
public void queueReductions() {
/** FIXME */
public void queueReductions(Node n2) {
- new Reduct(this, n2, null);
- for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token)) {
-
- // currently we have this weird problem where we
- // have to do an individual reduct for each child
- // when the reduction length is one (ie the
- // children wind up being children of the newly
- // created node rather than part of the popped
- // sequence
-
- if (r.numPop == 1) new Reduct(this, n2, r);
- }
+ newReduct(this, n2, null);
}
public void queueEmptyReductions() {
for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token)) {
if (r.numPop==0)
- new Reduct(this, null, r); /* ALLOC */
+ newReduct(this, null, r); /* ALLOC */
}
}
}
}
+ public void newReduct(Node n, Node n2, Parser.Table.Reduction r) {
+ new Reduct(n, n2, r)/*.go()*/;
+ }
// Forest / Completed Reductions //////////////////////////////////////////////////////////////////////////////
this.r = r;
if (reductions.contains(this)) { done = true; return; }
reductions.add(this);
- pendingReduct.add(this);
+ pendingReduct.addFirst(this);
pendingReductions++;
}
pendingReduct.remove(this);
pendingReductions--;
+ if (r==null)
+ for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
+
+ // currently we have this weird problem where we
+ // have to do an individual reduct for each child
+ // when the reduction length is one (ie the
+ // children wind up being children of the newly
+ // created node rather than part of the popped
+ // sequence
+
+ if (r.numPop == 1) new Reduct(n, n2, r).go();
+ }
+
+
// FIXME: explain this
if (r==null) {
for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
if (r.numPop <= 1) continue;
r.reduce(n, n2, Phase.this, null);
}
- } else if (r.numPop<=1) {
+ } else if (r.numPop==0) { r.reduce(n, n2, n.phase, r.zero());
+ } else if (r.numPop==1) {
// UGLY HACK
// The problem here is that a "reduction of length 0/1"
// performed twice with different values of n2 needs
// cache instances here as a way of avoiding
// recreating them.
- Forest ret = (r.numPop==0 ? pcache : n.cache()).get(r);
+ Forest ret = n.cache().get(r);
if (ret != null) r.reduce(n, n2, n.phase, ret);
- else (r.numPop==0 ? pcache : n.cache()).put(r, r.reduce(n, n2, n.phase, null));
+ else n.cache().put(r, r.reduce(n, n2, n.phase, null));
} else {
r.reduce(n, n2, Phase.this, null);