-
- boolean reducing = false;
- /** perform all reduction operations */
- public void reduce() {
- reducing = true;
- HashSet<Phase.Node> s = new HashSet<Phase.Node>();
- s.addAll(hash.values());
- for(Phase.Node n : s) n.queueEmptyReductions();
- for(Phase.Node n : s) n.queueReductions();
- }
-
- /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
- public void shift(Phase next, Forest result) {
- closed = true;
- Forest res = null;
- boolean ok = false;
- for(Phase.Node n : hash.values()) {
- if (n.holder==null) continue;
- n.holder.resolve();
- if (token == null && n.state.isAccepting()) {
- ok = true;
- if (finalResult==null) finalResult = new Forest.Ref();
- finalResult.merge(n.holder);
- }
- if (!n.holder.valid()) continue;
- if (token == null) continue;
- for(Parser.Table.State st : n.state.getShifts(token)) {
- if (res == null) res = result;
- next.newNode(n, res, st, true, this);
- ok = true;
- }
- }
-
- if (!ok && token != null) {
- StringBuffer error = new StringBuffer();
- error.append("error: unable to shift token \"" + token + "\"\n");
- //error.append(" before: " +pendingReductions+ "\n");
- //error.append(" before: " +totalReductions+ "\n");
- //for(Phase.Node n : hash.values()) {
- //n.queueReductions();
- //n.queueEmptyReductions();
- //}
- //error.append(" after: " +pendingReductions+ "\n");
- error.append(" candidate states:\n");
- for(Phase.Node n : hash.values()) {
- //for(Sequence.Position p : n.state) error.append(" " + p + "\n");
- //error.append(" --\n");
- for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
- //error.append(" ==\n");
- }
- next.error = error.toString();
- }
-
- // this massively improves GC performance
- hash = null;
- }
-
-
- // GSS Nodes //////////////////////////////////////////////////////////////////////////////
-
- /** a node in the GSS */
- public final class Node {
-
- private Forest.Ref holder = null;
-
- private HashMap<Parser.Table.Reduction,Forest> cache = null;
-
- /** the set of nodes to which there is an edge starting at this node */
- public final FastSet<Node> parents = new FastSet<Node>(); /* ALLOC */
-
- /** what state this node is in */
- public final Parser.Table.State state;
- /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
- public final Phase phase = Phase.this;
-
- public HashMap<Parser.Table.Reduction,Forest> cache() {
- return cache==null ? (cache = new HashMap<Parser.Table.Reduction,Forest>()) : cache; }
- public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
- public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
- public FastSet<Node> parents() { return parents; }
-
- /** FIXME */
- public void queueReductions() {
- if (allqueued) return;
- allqueued = true;
- FastSet<Node> h = new FastSet<Node>();
- for(Node n : parents) h.add(n);
- for(Node n : h) queueReductions(n);
- }
-
- private boolean allqueued = false;
- private HashSet<Node> queued = new HashSet<Node>();
- /** FIXME */
- public void queueReductions(Node n2) {
- if (!allqueued) { queueReductions(); return; }
- if (queued.contains(n2)) return;
- queued.add(n2);
- Node n = this;
- for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
-
- // UGLY HACK
- // The problem here is that a "reduction of length 1"
- // performed twice with different values of n2 needs
- // to only create a *single* new result, but must add
- // multiple parents to the node holding that result.
- // The current reducer doesn't differentiate between
- // the next node of an n-pop reduction and the
- // ultimate parent of the last pop, so we need to
- // cache instances here as a way of avoiding
- // recreating them.
-
- // currently we have this weird problem where we
- // have to do an individual reduct for each child
- // when the reduction length is one (ie the
- // children wind up being children of the newly
- // created node rather than part of the popped
- // sequence
- if (r.numPop <= 0) continue;
- if (r.numPop == 1) {
- Forest ret = n.cache().get(r);
- if (ret != null) r.reduce(n, n2, n.phase, ret);
- else n.cache().put(r, r.reduce(n, n2, n.phase, null));
- } else {
- r.reduce(n, n2, Phase.this, null);
- }
- }
- }
-
-
- /** FIXME */
- public void queueEmptyReductions() {
- if (reducing)
- for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token))
- if (r.numPop==0)
- r.reduce(this, null, this.phase, r.zero());
- }
-
- private Node(Node parent, Forest pending, Parser.Table.State state, Phase start) {
- this.state = state;
- if (pending != null) this.holder().merge(pending);
- if (parent != null) parents.add(parent, true);
- if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
- Phase.this.hash.put(code(state, start), this);
- Phase.this.numNodes++;
- if (parent==null) holder().valid = true; // hack to make sure that the "base" node is always considered valid
- }