-
- Node n = new Node(parent, pending, state, start); // ALLOC
- n.queueEmptyReductions();
- if (!fromEmptyReduction) n.queueReductions();
- }
-
-
- boolean reducing = false;
- /** perform all reduction operations */
- public void reduce() {
- reducing = true;
- HashSet<Phase.Node> s = new HashSet<Phase.Node>();
- s.addAll(hash.values());
- for(Phase.Node n : s) n.queueEmptyReductions();
- for(Phase.Node n : s) n.queueReductions();
- while(pendingReduct.size()>0)
- //pendingReduct.iterator().next().go();
- pendingReduct.removeFirst().go();
- }
-
- /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
- public void shift(Phase next, Forest result) {
- closed = true;
- Forest res = null;
- boolean ok = false;
- for(Phase.Node n : hash.values()) {
- if (n.holder==null) continue;
- n.holder.resolve();
- if (token == null && n.state.isAccepting()) {
- ok = true;
- if (finalResult==null) finalResult = new Forest.Ref();
- finalResult.merge(n.holder);
- }
- if (!n.holder.valid()) continue;
- if (token == null) continue;
- for(Parser.Table.State st : n.state.getShifts(token)) {
- if (res == null) res = result;
- next.newNode(n, res, st, true, this);
- ok = true;
- }
- }
-
- if (!ok && token != null) {
- StringBuffer error = new StringBuffer();
- error.append("error: unable to shift token \"" + token + "\"\n");
- error.append(" before: " +pendingReductions+ "\n");
- error.append(" before: " +totalReductions+ "\n");
- //for(Phase.Node n : hash.values()) {
- //n.queueReductions();
- //n.queueEmptyReductions();
- //}
- error.append(" after: " +pendingReductions+ "\n");
- error.append(" candidate states:\n");
- for(Phase.Node n : hash.values()) {
- //for(Sequence.Position p : n.state) error.append(" " + p + "\n");
- //error.append(" --\n");
- for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
- //error.append(" ==\n");
- }
- next.error = error.toString();
- }
-
- // this massively improves GC performance
- reductions = null;
- hash = null;
- }
-
-
- // GSS Nodes //////////////////////////////////////////////////////////////////////////////
-
- //private HashMap<Parser.Table.Reduction,Forest> pcache = new HashMap<Parser.Table.Reduction,Forest>();
- /** a node in the GSS */
- public final class Node {
-
- private Forest.Ref holder = null;
-
- private HashMap<Parser.Table.Reduction,Forest> cache = null;
-
- /** the set of nodes to which there is an edge starting at this node */
- public final FastSet<Node> parents = new FastSet<Node>(); /* ALLOC */
-
- /** what state this node is in */
- public final Parser.Table.State state;
- /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
- public final Phase phase = Phase.this;
-
- public HashMap<Parser.Table.Reduction,Forest> cache() {
- return cache==null ? (cache = new HashMap<Parser.Table.Reduction,Forest>()) : cache; }
- public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
- public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
- public FastSet<Node> parents() { return parents; }
-
- /** FIXME */
- public void queueReductions() {
- for(Node n2 : parents)
- queueReductions(n2);
- }
-
- /** FIXME */
- public void queueReductions(Node n2) {
- newReduct(this, n2, null);
- }
-
-
- /** FIXME */
- public void queueEmptyReductions() {
- if (reducing) {
- for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token)) {
- if (r.numPop==0) {
- //r.reduce(this, null, this.phase, r.zero());
- Reduct red = new Reduct(this, null, r);
- red.go(); /* ALLOC */
- }
- }
- }
- }
-
- private Node(Node parent, Forest pending, Parser.Table.State state, Phase start) {
- this.state = state;
- if (pending != null) this.holder().merge(pending);
- if (parent != null) parents.add(parent, true);
- if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
- Phase.this.hash.put(code(state, start), this);
- Phase.this.numNodes++;
- if (parent==null) holder().valid = true; // hack to make sure that the "base" node is always considered valid
- }