package edu.berkeley.sbp;
import edu.berkeley.sbp.*;
import edu.berkeley.sbp.*;
import edu.berkeley.sbp.*;
import edu.berkeley.sbp.util.*;
import java.io.*;
import java.util.*;
import java.lang.reflect.*;
//////////////////////////////////////////////////////////////////////////////
// TODO:
//
// - fix public/package/private status
//
//////////////////////////////////////////////////////////////////////////////
// Optimizations to add
//
// ** NOTE: not all of these are appropriate for this class -- it is
// simply a list of optimizations not implemented. This
// class is meant to remain simple and easy to understand;
// optimizations which obscure that do not belong here (they
// should go into the compiled version instead)
/** implements Tomita's Graph Structured Stack */
class GSS {
public GSS() { }
/** corresponds to a positions between tokens the input stream; same as Tomita's U_i's */
public class Phase {
/** the token immediately after this phase */
public final Token token;
/** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
private final int pos;
/** FIXME */
public Forest.Ref finalResult = null;
/** all nodes, keyed by the value returned by code() */
private HashMap hash = new HashMap(); /* ALLOC */
/** the number of nodes in this phase */
private int numNodes = 0;
boolean closed = false;
private Token.Location location;
public Phase(Phase previous, Token token, Token.Location location) {
this.pos = previous==null ? 0 : previous.pos+1;
this.token = token;
this.location = location;
}
public boolean isDone() { return token == null; }
private String error = "generic syntax error";
public void checkFailure() throws Parser.Failed {
if (numNodes <= 0)
throw new Parser.Failed(error, getLocation());
}
public Token.Location getLocation() { return location; }
/** add a new node (merging with existing nodes if possible)
* @param parent the parent of the new node
* @param result the SPPF result corresponding to the new node
* @param state the state that the new node is in
* @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
* @param start the earliest part of the input contributing to this node (used to make merging decisions)
*/
public void newNode(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
Node p = hash.get(code(state, start));
if (p != null) newNode2(p, parent, pending, state, fromEmptyReduction, start);
else newNode3(parent, pending, state, fromEmptyReduction, start);
}
private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
p.holder.merge(pending);
if (p.parents().contains(parent)) return;
p.parents().add(parent, true);
if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
}
private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
do {
if (token != null && state.canShift(token)) break;
if (state.isAccepting()) break;
if (token==null) break;
int count = 0;
Parser.Table.Reduction r = null;
for(Parser.Table.Reduction red : token==null ? state.getEofReductions() : state.getReductions(token)) { r = red; count++; }
if (count==0) return; // BEWARE! this optimization is suspected to cause really nasty heisenbugs
//if (count > 1) break;
//if (r.numPop == 0) break;
//r.reduce(pending, parent, null, Phase.this, null);
//return;
} while(false);
Node n = new Node(parent, pending, state, start); // ALLOC
n.queueEmptyReductions();
if (!fromEmptyReduction) n.queueReductions(parent);
}
boolean reducing = false;
/** perform all reduction operations */
public void reduce() {
reducing = true;
HashSet s = new HashSet();
s.addAll(hash.values());
for(Phase.Node n : s) n.queueEmptyReductions();
for(Phase.Node n : s) n.queueReductions();
}
/** perform all shift operations, adding promoted nodes to next */
public void shift(Phase next, Forest result) {
closed = true;
Forest res = null;
boolean ok = false;
for(Phase.Node n : hash.values()) {
if (n.holder==null) continue;
n.holder.resolve();
if (token == null && n.state.isAccepting()) {
ok = true;
if (finalResult==null) finalResult = new Forest.Ref();
finalResult.merge(n.holder);
}
if (!n.holder.valid()) continue;
if (token == null) continue;
for(Parser.Table.State st : n.state.getShifts(token)) {
if (res == null) res = result;
next.newNode(n, res, st, true, this);
ok = true;
}
}
if (!ok && token != null) {
StringBuffer error = new StringBuffer();
error.append("error: unable to shift token \"" + token + "\"\n");
//error.append(" before: " +pendingReductions+ "\n");
//error.append(" before: " +totalReductions+ "\n");
//for(Phase.Node n : hash.values()) {
//n.queueReductions();
//n.queueEmptyReductions();
//}
//error.append(" after: " +pendingReductions+ "\n");
error.append(" candidate states:\n");
for(Phase.Node n : hash.values()) {
//for(Sequence.Position p : n.state) error.append(" " + p + "\n");
//error.append(" --\n");
for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
//error.append(" ==\n");
}
next.error = error.toString();
}
// this massively improves GC performance
hash = null;
}
// GSS Nodes //////////////////////////////////////////////////////////////////////////////
/** a node in the GSS */
public final class Node extends FastSet {
private Forest.Ref holder = null;
private boolean allqueued = false;
private HashMap cache = null;
/** the set of nodes to which there is an edge starting at this node */
//public final FastSet parents = new FastSet(); /* ALLOC */
/** what state this node is in */
public final Parser.Table.State state;
/** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
public final Phase phase = Phase.this;
public HashMap cache() {
return cache==null ? (cache = new HashMap()) : cache; }
public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
public FastSet parents() { return this; }
/** FIXME */
public void queueReductions() {
if (allqueued) return;
allqueued = true;
int where = parents().size();
for(int i=0; i