package edu.berkeley.sbp;
import edu.berkeley.sbp.*;
import edu.berkeley.sbp.*;
import edu.berkeley.sbp.*;
import edu.berkeley.sbp.util.*;
import java.io.*;
import java.util.*;
import java.lang.reflect.*;
//////////////////////////////////////////////////////////////////////////////
// TODO:
//
// - fix public/package/private status
//
//////////////////////////////////////////////////////////////////////////////
// Optimizations to add
//
// ** NOTE: not all of these are appropriate for this class -- it is
// simply a list of optimizations not implemented. This
// class is meant to remain simple and easy to understand;
// optimizations which obscure that do not belong here (they
// should go into the compiled version instead)
//
// - most of our time is now spent creating and storing Reduct instances
// - we should be able to perform Reduct's immediately after creating them...
//
/** implements Tomita's Graph Structured Stack */
class GSS {
public GSS() { }
/** corresponds to a positions between tokens the input stream; same as Tomita's U_i's */
public class Phase {
/** the token immediately after this phase */
public final Token token;
/** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
private final int pos;
/** FIXME */
public Forest.Ref finalResult = null;
/** all reductions (pending and completed) */
private HashSet reductions = new HashSet(); /* ALLOC */
/** all nodes, keyed by the value returned by code() */
private HashMap hash = new HashMap(); /* ALLOC */
/** the number of pending reductions */
private int pendingReductions = 0;
private int totalReductions = 0;
//private HashSet pendingReduct = new HashSet();
private LinkedList pendingReduct = new LinkedList();
/** the number of nodes in this phase */
private int numNodes = 0;
boolean closed = false;
private Token.Location location;
public Phase(Phase previous, Token token, Token.Location location) {
this.pos = previous==null ? 0 : previous.pos+1;
this.token = token;
this.location = location;
}
public boolean isDone() { return token == null; }
private String error = "generic syntax error";
public void checkFailure() throws Parser.Failed {
if (numNodes <= 0)
throw new Parser.Failed(error, getLocation());
}
public Token.Location getLocation() { return location; }
/** add a new node (merging with existing nodes if possible)
* @param parent the parent of the new node
* @param result the SPPF result corresponding to the new node
* @param state the state that the new node is in
* @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
* @param start the earliest part of the input contributing to this node (used to make merging decisions)
*/
public void newNode(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
Node p = hash.get(code(state, start));
if (p != null) newNode2(p, parent, pending, state, fromEmptyReduction, start);
else newNode3(parent, pending, state, fromEmptyReduction, start);
}
private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
p.holder.merge(pending);
if (p.parents.contains(parent)) return;
p.parents.add(parent, true);
if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
}
private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
do {
if (token != null && state.canShift(token)) break;
if (state.isAccepting()) break;
if (token==null) break;
int count = 0;
Parser.Table.Reduction r = null;
for(Parser.Table.Reduction red : token==null ? state.getEofReductions() : state.getReductions(token)) { r = red; count++; }
//if (count==0) return; // BEWARE! this optimization is suspected to cause really nasty heisenbugs
//if (count > 1) break;
//if (r.numPop == 0) break;
//r.reduce(pending, parent, null, Phase.this, null);
//return;
} while(false);
Node n = new Node(parent, pending, state, start); // ALLOC
n.queueEmptyReductions();
if (!fromEmptyReduction) n.queueReductions();
}
boolean reducing = false;
/** perform all reduction operations */
public void reduce() {
reducing = true;
HashSet s = new HashSet();
s.addAll(hash.values());
for(Phase.Node n : s) n.queueEmptyReductions();
for(Phase.Node n : s) n.queueReductions();
while(pendingReduct.size()>0)
//pendingReduct.iterator().next().go();
pendingReduct.removeFirst().go();
}
/** perform all shift operations, adding promoted nodes to next */
public void shift(Phase next, Forest result) {
closed = true;
Forest res = null;
boolean ok = false;
for(Phase.Node n : hash.values()) {
if (n.holder==null) continue;
n.holder.resolve();
if (token == null && n.state.isAccepting()) {
ok = true;
if (finalResult==null) finalResult = new Forest.Ref();
finalResult.merge(n.holder);
}
if (!n.holder.valid()) continue;
if (token == null) continue;
for(Parser.Table.State st : n.state.getShifts(token)) {
if (res == null) res = result;
next.newNode(n, res, st, true, this);
ok = true;
}
}
if (!ok && token != null) {
StringBuffer error = new StringBuffer();
error.append("error: unable to shift token \"" + token + "\"\n");
error.append(" before: " +pendingReductions+ "\n");
error.append(" before: " +totalReductions+ "\n");
//for(Phase.Node n : hash.values()) {
//n.queueReductions();
//n.queueEmptyReductions();
//}
error.append(" after: " +pendingReductions+ "\n");
error.append(" candidate states:\n");
for(Phase.Node n : hash.values()) {
//for(Sequence.Position p : n.state) error.append(" " + p + "\n");
//error.append(" --\n");
for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
//error.append(" ==\n");
}
next.error = error.toString();
}
// this massively improves GC performance
reductions = null;
hash = null;
}
// GSS Nodes //////////////////////////////////////////////////////////////////////////////
//private HashMap pcache = new HashMap();
/** a node in the GSS */
public final class Node {
private Forest.Ref holder = null;
private HashMap cache = null;
/** the set of nodes to which there is an edge starting at this node */
public final FastSet parents = new FastSet(); /* ALLOC */
/** what state this node is in */
public final Parser.Table.State state;
/** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
public final Phase phase = Phase.this;
public HashMap cache() {
return cache==null ? (cache = new HashMap()) : cache; }
public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
public FastSet parents() { return parents; }
/** FIXME */
public void queueReductions() {
for(Node n2 : parents)
queueReductions(n2);
}
/** FIXME */
public void queueReductions(Node n2) {
newReduct(this, n2, null);
}
/** FIXME */
public void queueEmptyReductions() {
if (reducing) {
for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token)) {
if (r.numPop==0) {
//r.reduce(this, null, this.phase, r.zero());
Reduct red = new Reduct(this, null, r);
red.go(); /* ALLOC */
}
}
}
}
private Node(Node parent, Forest pending, Parser.Table.State state, Phase start) {
this.state = state;
if (pending != null) this.holder().merge(pending);
if (parent != null) parents.add(parent, true);
if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
Phase.this.hash.put(code(state, start), this);
Phase.this.numNodes++;
if (parent==null) holder().valid = true; // hack to make sure that the "base" node is always considered valid
}
}
public void newReduct(Node n, Node n2, Parser.Table.Reduction r) {
new Reduct(n, n2, r)/*.go()*/;
}
// Forest / Completed Reductions //////////////////////////////////////////////////////////////////////////////
/** a pending or completed reduction */
class Reduct {
/** the node from which the reduction should begin */
public Node n = null;
/** the node on the other end of the edge to be reduced along (either: null, the second node of the reduction,
* or the parent of the result of a length-one reduction)
*/
public Node n2 = null;
/** true iff the reduction has already been performed */
private boolean done = false;
/** the reduction to be applied */
public Parser.Table.Reduction r;
public Tree result = null;
public Reduct(Node n, Node n2, Parser.Table.Reduction r) {
this.n = n;
this.n2 = n2;
this.r = r;
if (reductions.contains(this)) { done = true; return; }
reductions.add(this);
pendingReduct.addFirst(this);
pendingReductions++;
}
/** perform the reduction */
public void go() {
if (done) return;
done = true;
pendingReduct.remove(this);
pendingReductions--;
if (r==null)
for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
// currently we have this weird problem where we
// have to do an individual reduct for each child
// when the reduction length is one (ie the
// children wind up being children of the newly
// created node rather than part of the popped
// sequence
if (r.numPop == 1) new Reduct(n, n2, r)/*.go()*/;
}
// FIXME: explain this
if (r==null) {
for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
if (r.numPop <= 1) continue;
r.reduce(n, n2, Phase.this, null);
}
} else if (r.numPop==0) { r.reduce(n, n2, n.phase, r.zero());
} else if (r.numPop==1) {
// UGLY HACK
// The problem here is that a "reduction of length 1"
// performed twice with different values of n2 needs
// to only create a *single* new result, but must add
// multiple parents to the node holding that result.
// The current reducer doesn't differentiate between
// the next node of an n-pop reduction and the
// ultimate parent of the last pop, so we need to
// cache instances here as a way of avoiding
// recreating them.
Forest ret = n.cache().get(r);
if (ret != null) r.reduce(n, n2, n.phase, ret);
else n.cache().put(r, r.reduce(n, n2, n.phase, null));
} else {
r.reduce(n, n2, Phase.this, null);
}
}
// FIXME: this is a PITA
public int hashCode() { return n.hashCode() ^ (r==null ? 0 : r.hashCode()) ^ (n2==null ? 0 : n2.hashCode()); }
public boolean equals(Object o) {
if (o==null) return false;
if (o==this) return true;
if (!(o instanceof Reduct)) return false;
Reduct other = (Reduct)o;
return equal(r, other.r) && equal(n, other.n) && equal(n2, other.n2);
}
}
}
/** helper method */
private static boolean equal(Object a, Object b) {
if (a==null && b==null) return true;
if (a==null || b==null) return false;
return a.equals(b);
}
/** this is something of a hack right now */
private static long code(Parser.Table.State state, Phase start) {
return (((long)state.idx) << 32) | (start==null ? 0 : start.pos);
}
public boolean yak = false;
}