package edu.berkeley.sbp;
import edu.berkeley.sbp.*;
-import edu.berkeley.sbp.*;
-import edu.berkeley.sbp.*;
import edu.berkeley.sbp.util.*;
import java.io.*;
import java.util.*;
import java.lang.reflect.*;
-//////////////////////////////////////////////////////////////////////////////
-// TODO:
-//
-// - fix public/package/private status
-//
-
-//////////////////////////////////////////////////////////////////////////////
-// Optimizations to add
-//
-// ** NOTE: not all of these are appropriate for this class -- it is
-// simply a list of optimizations not implemented. This
-// class is meant to remain simple and easy to understand;
-// optimizations which obscure that do not belong here (they
-// should go into the compiled version instead)
-
/** implements Tomita's Graph Structured Stack */
class GSS {
public GSS() { }
+ private Phase.Node[] reducing_list = null;
+
/** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
- public class Phase {
+ public class Phase implements Invokable<Parser.Table.State, Forest, GSS.Phase.Node> {
/** the token immediately after this phase */
public final Token token;
+ boolean reducing = false;
+
/** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
private final int pos;
* @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
* @param start the earliest part of the input contributing to this node (used to make merging decisions)
*/
- public void newNode(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
- Node p = hash.get(code(state, start));
- if (p != null) newNode2(p, parent, pending, state, fromEmptyReduction, start);
- else newNode3(parent, pending, state, fromEmptyReduction, start);
+ public void newNode(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction) {
+ Node p = hash.get(code(state, parent==null?null:parent.phase()));
+ if (p != null) newNode2(p, parent, pending, state, fromEmptyReduction);
+ else newNode3(parent, pending, state, fromEmptyReduction);
}
- private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
+ private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction) {
p.holder.merge(pending);
if (p.parents().contains(parent)) return;
+ if (p.fe && p.phase() != parent.phase()) throw new Error("yep yep");
+ if (!p.fe && p.phase() == parent.phase()) throw new Error("yep yep2");
p.parents().add(parent, true);
if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
}
- private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
+ private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction) {
do {
if (token != null && state.canShift(token)) break;
if (state.isAccepting()) break;
if (token==null) break;
- int count = 0;
- Parser.Table.Reduction r = null;
- for(Parser.Table.Reduction red : token==null ? state.getEofReductions() : state.getReductions(token)) { r = red; count++; }
- if (count==0) return; // BEWARE! this optimization is suspected to cause really nasty heisenbugs
+ if (!state.canReduce(token)) return;
//if (count > 1) break;
//if (r.numPop == 0) break;
//r.reduce(pending, parent, null, Phase.this, null);
//return;
} while(false);
- Node n = new Node(parent, pending, state, start); // ALLOC
+ Node n = new Node(parent, pending, state, fromEmptyReduction); // ALLOC
n.queueEmptyReductions();
if (!fromEmptyReduction) n.queueReductions(parent);
}
- boolean reducing = false;
/** perform all reduction operations */
public void reduce() {
reducing = true;
- HashSet<Phase.Node> s = new HashSet<Phase.Node>();
- s.addAll(hash.values());
- for(Phase.Node n : s) n.queueEmptyReductions();
- for(Phase.Node n : s) n.queueReductions();
+ if (reducing_list==null || reducing_list.length < hash.size())
+ reducing_list = new Phase.Node[hash.size() * 4];
+ Collection<Node> hv = hash.values();
+ hv.toArray(reducing_list);
+ int num = hv.size();
+ for(int i=0; i<num; i++) {
+ Node n = reducing_list[i];
+ n.queueEmptyReductions();
+ // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
+ }
+ for(int i=0; i<num; i++) {
+ Node n = reducing_list[i];
+ reducing_list[i] = null;
+ n.queueReductions();
+ }
+ }
+
+ public void invoke(Parser.Table.State st, Forest result, Node n) {
+ next.newNode(n, result, st, false);
}
+ private Phase next = null;
/** perform all shift operations, adding promoted nodes to <tt>next</tt> */
public void shift(Phase next, Forest result) {
+ this.next = next;
closed = true;
Forest res = null;
boolean ok = false;
}
if (!n.holder.valid()) continue;
if (token == null) continue;
+ n.state.invokeShifts(token, this, result, n);
+ /*
for(Parser.Table.State st : n.state.getShifts(token)) {
if (res == null) res = result;
next.newNode(n, res, st, true, this);
ok = true;
}
+ */
}
if (!ok && token != null) {
//n.queueEmptyReductions();
//}
//error.append(" after: " +pendingReductions+ "\n");
- error.append(" candidate states:\n");
- for(Phase.Node n : hash.values()) {
+ //error.append(" candidate states:\n");
+ //for(Phase.Node n : hash.values()) {
//for(Sequence.Position p : n.state) error.append(" " + p + "\n");
//error.append(" --\n");
- for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
+ //for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
//error.append(" ==\n");
- }
+ //}
next.error = error.toString();
}
// GSS Nodes //////////////////////////////////////////////////////////////////////////////
/** a node in the GSS */
- public final class Node extends FastSet<Node> {
+ public final class Node extends FastSet<Node> implements Invokable<Parser.Table.Reduction, Node, Node> {
private Forest.Ref holder = null;
private boolean allqueued = false;
- private HashMap<Parser.Table.Reduction,Forest> cache = null;
-
- /** the set of nodes to which there is an edge starting at this node */
- //public final FastSet<Node> parents = new FastSet<Node>(); /* ALLOC */
-
/** what state this node is in */
public final Parser.Table.State state;
/** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
- public final Phase phase = Phase.this;
+ public Phase phase() { return Phase.this; }
- public HashMap<Parser.Table.Reduction,Forest> cache() {
- return cache==null ? (cache = new HashMap<Parser.Table.Reduction,Forest>()) : cache; }
public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
public FastSet<Node> parents() { return this; }
- /** FIXME */
public void queueReductions() {
+ if (!reducing) return;
if (allqueued) return;
allqueued = true;
int where = parents().size();
- for(int i=0; i<where; i++) queueReductions(get(i));
+ state.invokeReductions(token, this, this, null);
}
- /** FIXME */
public void queueReductions(Node n2) {
if (!allqueued) { queueReductions(); return; }
- Node n = this;
- for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
-
- // UGLY HACK
- // The problem here is that a "reduction of length 1"
- // performed twice with different values of n2 needs
- // to only create a *single* new result, but must add
- // multiple parents to the node holding that result.
- // The current reducer doesn't differentiate between
- // the next node of an n-pop reduction and the
- // ultimate parent of the last pop, so we need to
- // cache instances here as a way of avoiding
- // recreating them.
-
- // currently we have this weird problem where we
- // have to do an individual reduct for each child
- // when the reduction length is one (ie the
- // children wind up being children of the newly
- // created node rather than part of the popped
- // sequence
- if (r.numPop <= 0) continue;
- if (r.numPop == 1) {
- Forest ret = n.cache().get(r);
- if (ret != null) r.reduce(n, n2, n.phase, ret);
- else n.cache().put(r, r.reduce(n, n2, n.phase, null));
- } else {
- r.reduce(n, n2, Phase.this, null);
- }
- }
+ state.invokeReductions(token, this, this, n2);
}
-
- /** FIXME */
+ public final void invoke(Parser.Table.Reduction r, Node n, Node n2) {
+ if (n==null) {
+ if (r.numPop==0) r.reduce(this);
+ return;
+ }
+ if (r.numPop==0) return;
+ if (n2==null) r.reduce(n);
+ else r.reduce(n, n2);
+ }
public void queueEmptyReductions() {
- if (reducing)
- for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token))
- if (r.numPop==0)
- r.reduce(this, null, this.phase, r.zero());
+ if (!reducing) return;
+ state.invokeReductions(token, this, null, null);
}
- private Node(Node parent, Forest pending, Parser.Table.State state, Phase start) {
+ private boolean fe;
+ private Node(Node parent, Forest pending, Parser.Table.State state, boolean fe) {
+ this.fe = fe;
this.state = state;
+ Phase start = parent==null ? null : parent.phase();
if (pending != null) this.holder().merge(pending);
if (parent != null) parents().add(parent, true);
if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
/** this is something of a hack right now */
private static long code(Parser.Table.State state, Phase start) {
- return (((long)state.idx) << 32) | (start==null ? 0 : start.pos);
+ return (((long)state.idx) << 32) | (start==null ? 0 : (start.pos+1));
}
- public boolean yak = false;
}