+// Copyright 2006-2007 all rights reserved; see LICENSE file for BSD-style license
+
package edu.berkeley.sbp;
-import edu.berkeley.sbp.*;
import edu.berkeley.sbp.util.*;
-import edu.berkeley.sbp.*;
-import edu.berkeley.sbp.Sequence.Position;
-import edu.berkeley.sbp.*;
+import edu.berkeley.sbp.Sequence.Pos;
+import edu.berkeley.sbp.Sequence.Pos;
import java.io.*;
import java.util.*;
-import java.lang.reflect.*;
-/** a parser which translates streams of Tokens of type T into a Forest<R> */
-public abstract class Parser<T extends Token, R> {
+/** a parser which translates an Input<Token> into a Forest<NodeType> */
+public abstract class Parser<Token, NodeType> implements Serializable {
- private final Table pt;
+ final Table pt;
/** create a parser to parse the grammar with start symbol <tt>u</tt> */
- protected Parser(Union u) { this.pt = new Table(u, top()); }
- protected Parser(Table pt) { this.pt = pt; }
-
- public abstract Forest<R> shiftedToken(T t, Token.Location loc);
- public abstract Topology<T> top();
-
-
- /** parse <tt>input</tt> for a exactly one unique result, throwing <tt>Ambiguous</tt> if not unique or <tt>Failed</tt> if none */
- public Tree<R> parse1(Token.Stream<T> input) throws IOException, Failed, Ambiguous {
- Forest<R> ret = parse(input);
- try { return ret.expand1(); }
- catch (Ambiguous a) {
- System.out.println("while expanding:");
- System.out.println(ret);
- throw a;
+ public Parser(Union u) { this.pt = new Table(u); }
+
+ /** implement this method to create the output forest corresponding to a lone shifted input token */
+ public abstract Forest<NodeType> shiftToken(Token t, Input.Region region);
+
+ public abstract Topology<Token> emptyTopology();
+
+ public String toString() { return pt.toString(); }
+
+ /** parse <tt>input</tt>, and return the shared packed parse forest (or throw an exception) */
+ public Forest<NodeType> parse(Input<Token> input) throws IOException, ParseFailed {
+ verbose = System.getProperty("sbp.verbose", null) != null;
+ spinpos = 0;
+ GSS gss = new GSS(input, this);
+ try {
+ for(GSS.Phase current = gss.new Phase<Token>(pt.start); ;) {
+ if (verbose) debug(current.token, gss, input);
+ if (current.isDone()) return (Forest<NodeType>)current.finalResult;
+ Input.Region region = current.getLocation().createRegion(current.getNextLocation());
+ Forest forest = shiftToken((Token)current.token, region);
+ current = gss.new Phase<Token>(current, forest);
+ }
+ } finally {
+ if (verbose) {
+ System.err.print("\r"+ANSI.clreol());
+ debug(null, gss, input);
+ }
}
}
- /** parse <tt>input</tt>, using the table <tt>pt</tt> to drive the parser */
- public Forest<R> parse(Token.Stream<T> input) throws IOException, Failed {
- GSS gss = new GSS();
- Token.Location loc = input.getLocation();
- GSS.Phase current = gss.new Phase(null, input.next(), loc);
- current.newNode(null, null, pt.start, true, null);
- for(;;) {
- loc = input.getLocation();
- GSS.Phase next = gss.new Phase(current, input.next(), loc);
- current.reduce();
- Forest forest = current.token==null ? null : shiftedToken((T)current.token, loc);
- current.shift(next, forest);
- if (current.isDone()) return (Forest<R>)current.finalResult;
- current.checkFailure();
- current = next;
- }
+ // Spinner //////////////////////////////////////////////////////////////////////////////
+
+ private boolean verbose = false;
+ private static final char[] spin = new char[] { '-', '\\', '|', '/' };
+ private int spinpos = 0;
+ private long last = 0;
+ void spin() {
+ if (!verbose) return;
+ long now = System.currentTimeMillis();
+ if (now-last < 70) return;
+ last = now;
+ System.err.print("\r " + spin[spinpos++ % (spin.length)]+"\r");
}
-
- // Exceptions //////////////////////////////////////////////////////////////////////////////
-
- public static class Failed extends Exception {
- private final Token.Location location;
- private final String message;
- public Failed() { this("", null); }
- public Failed(String message, Token.Location loc) { this.location = loc; this.message = message; }
- public Token.Location getLocation() { return location; }
- public String toString() { return message + (location==null ? "" : (" at " + location)); }
- }
-
- public static class Ambiguous extends RuntimeException {
- public final Forest ambiguity;
- public Ambiguous(Forest ambiguity) { this.ambiguity = ambiguity; }
- public String toString() {
- StringBuffer sb = new StringBuffer();
- sb.append("unresolved ambiguity "/*"at " + ambiguity.getLocation() + ":"*/);
- for(Object result : ambiguity.expand(false))
- sb.append("\n " + result);
- return sb.toString();
+ private int _last = -1;
+ private String buf = "";
+ private void debug(Object t, GSS gss, Input input) {
+ //FIXME
+ int c = t==null ? -1 : ((t+"").charAt(0));
+ int last = _last;
+ _last = c;
+ switch(c) {
+ case edu.berkeley.sbp.chr.CharAtom.left:
+ buf += "\033[31m>\033[0m";
+ break;
+ case edu.berkeley.sbp.chr.CharAtom.right:
+ buf += "\033[31m<\033[0m";
+ break;
+ case -1: // FIXME
+ case '\n':
+ if (verbose) {
+ if (last==' ') buf += ANSI.blue("\\n");
+ System.err.println("\r"+ANSI.clreol()+"\r"+buf);
+ buf = "";
+ }
+ break;
+ default:
+ buf += ANSI.cyan(""+((char)c));
+ break;
}
+ if (t==null) return;
+
+ // FIXME: clean this up
+ String s;
+ s = " " + spin[spinpos++ % (spin.length)]+" parsing ";
+ s += input.getName();
+ s += " "+input.getLocation();
+ while(s.indexOf(':') != -1 && s.indexOf(':') < 8) s = " " + s;
+ String y = "@"+gss.viewPos+" ";
+ while(y.length() < 9) y = " " + y;
+ s += y;
+ s += " nodes="+gss.numOldNodes;
+ while(s.length() < 50) s = s + " ";
+ s += " shifted="+gss.numNewNodes;
+ while(s.length() < 60) s = s + " ";
+ s += " reductions="+gss.numReductions;
+ while(s.length() < 78) s = s + " ";
+ System.err.print("\r"+ANSI.invert(s+ANSI.clreol())+"\r");
}
-
// Table //////////////////////////////////////////////////////////////////////////////
/** an SLR(1) parse table which may contain conflicts */
- static class Table extends Walk.Cache {
-
- public final Walk.Cache cache = this;
-
- private void walk(Element e, HashSet<Element> hs) {
- if (e==null) return;
- if (hs.contains(e)) return;
- hs.add(e);
- if (e instanceof Atom) return;
- for(Sequence s : (Union)e) {
- hs.add(s);
- for(Position p = s.firstp(); p != null; p = p.next())
- walk(p.element(), hs);
- }
- }
+ class Table implements Serializable {
/** the start state */
- public final State start;
+ final State<Token> start;
+
+ /** a dummy state from which no reductions can be performed */
+ private final State<Token> dead_state;
/** used to generate unique values for State.idx */
private int master_state_idx = 0;
+ /** all the states for this table */
+ private transient HashSet<State<Token>> all_states = new HashSet<State<Token>>();
+
+ /** all the doomed states in this table */
+ private transient HashMap<HashSet<Pos>,State<Token>> doomed_states = new HashMap<HashSet<Pos>,State<Token>>();
+
+ /** all the non-doomed states in this table */
+ private transient HashMap<HashSet<Pos>,State<Token>> normal_states = new HashMap<HashSet<Pos>,State<Token>>();
+
/** construct a parse table for the given grammar */
- public Table(Topology top) { this("s", top); }
- public Table(String startSymbol, Topology top) { this(new Union(startSymbol), top); }
- public Table(Union ux, Topology top) {
- Union start0 = new Union("0");
- start0.add(new Sequence.Singleton(ux, null, null));
-
- for(Sequence s : start0) cache.eof.put(s, true);
- cache.eof.put(start0, true);
-
- // construct the set of states
- HashMap<HashSet<Position>,State> all_states = new HashMap<HashSet<Position>,State>();
- HashSet<Element> all_elements = new HashSet<Element>();
- walk(start0, all_elements);
- for(Element e : all_elements)
- cache.ys.put(e, new Walk.YieldSet(e, cache).walk());
- HashSet<Position> hp = new HashSet<Position>();
- reachable(start0, hp);
- this.start = new State(hp, all_states, all_elements);
+ Table(Union ux) {
+ Union rootUnion = new Union("0", Sequence.create(ux), true);
+ Grammar<Token> grammar = new Grammar<Token>(rootUnion) {
+ public Topology<Token> emptyTopology() { return Parser.this.emptyTopology(); }
+ };
+
+ // create the "dead state"
+ this.dead_state = new State<Token>(new HashSet<Pos>(), true, grammar);
+ // construct the start state; this will recursively create *all* the states
+ this.start = new State<Token>(reachable(rootUnion), false, grammar);
+
+ buildReductions(grammar);
+ sortReductions(grammar);
+ }
+
+ /** fill in the reductions table */
+ private void buildReductions(Grammar<Token> grammar) {
// for each state, fill in the corresponding "row" of the parse table
- for(State state : all_states.values())
- for(Position p : state.hs) {
-
- // the Grammar's designated "last position" is the only accepting state
- if (start0.contains(p.owner()) && p.next()==null)
- state.accept = true;
-
- // FIXME: how does right-nullability interact with follow restrictions?
- // all right-nullable rules get a reduction [Johnstone 2000]
- if (p.isRightNullable(cache)) {
- Walk.Follow wf = new Walk.Follow(top.empty(), p.owner(), all_elements, cache);
- Reduction red = new Reduction(p);
- state.reductions.put(wf.walk(p.owner()), red);
- if (wf.includesEof()) state.eofReductions.add(red);
- }
+ for(State<Token> state : all_states)
+ for(Pos p : state.hs) {
// if the element following this position is an atom, copy the corresponding
// set of rows out of the "master" goto table and into this state's shift table
if (p.element() != null && p.element() instanceof Atom)
- state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element())));
+ state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element()).getTokenTopology()));
+
+ // RNGLR: we can potentially reduce from any "right-nullable" position -- that is,
+ // any position for which all Elements after it in the Sequence are capable of
+ // matching the empty string.
+ if (!grammar.isRightNullable(p)) continue;
+ Topology<Token> follow = grammar.follow(p.owner());
+ for(Pos p2 = p; p2 != null && p2.element() != null; p2 = p2.next()) {
+ if (!(p2.element() instanceof Union))
+ throw new Error("impossible -- only Unions can be nullable");
+
+ // interesting RNGLR-followRestriction interaction: we must intersect
+ // not just the follow-set of the last non-nullable element, but the
+ // follow-sets of the nulled elements as well.
+ for(Sequence s : ((Union)p2.element()))
+ follow = follow.intersect(grammar.follow(s));
+ Topology<Token> set = grammar.epsilonFollowSet((Union)p2.element());
+ if (set != null) follow = follow.intersect(set);
+ }
+
+ // indicate that when the next token is in the set "follow", nodes in this
+ // state should reduce according to Pos "p"
+ state.reductions.put(follow, p);
+ if (grammar.followEof.contains(p.owner())) state.eofReductions.add(p);
+ }
+
+ // optimize the reductions table
+ if (emptyTopology() instanceof IntegerTopology)
+ for(State<Token> state : all_states) {
+ // FIXME: this is pretty ugly
+ state.oreductions = state.reductions.optimize(((IntegerTopology)emptyTopology()).functor());
+ state.oshifts = state.shifts.optimize(((IntegerTopology)emptyTopology()).functor());
}
}
- /** a single state in the LR table and the transitions possible from it */
- public class State implements Comparable<Table.State>, Iterable<Position> {
-
- /*
- public boolean isResolvable(Token t) {
- boolean found = false;
- for(Reduction r : getReductions(t)) {
- Position p = r.position;
- if (!p.isRightNullable(cache)) continue;
- if (p.owner().firstp()==p) continue;
- if (found) {
- // found two items meeting criteria #1
- return false;
- } else {
- found = true;
- continue;
+ // FIXME: this method needs to be cleaned up and documented
+ private void sortReductions(Grammar<Token> grammar) {
+ // crude algorithm to assing an ordinal ordering to every position
+ // al will be sorted in DECREASING order (al[0] >= al[1])
+ ArrayList<Sequence.Pos> al = new ArrayList<Sequence.Pos>();
+ for(State s : all_states) {
+ for(Object po : s.positions()) {
+ Sequence.Pos p = (Sequence.Pos)po;
+ if (al.contains(p)) continue;
+ int i=0;
+ for(; i<al.size(); i++) {
+ if (grammar.comparePositions(p, al.get(i)) < 0)
+ break;
}
- if (p.element()==null) continue;
- Topology first = new Walk.First(top(), cache).walk(p.element());
- if (first.contains(t))
+ al.add(i, p);
}
}
- */
+ // FIXME: this actually pollutes the "pure" objects (the ones that should not be modified by the Parser)
+ // sort in increasing order...
+ OUTER: while(true) {
+ for(int i=0; i<al.size(); i++)
+ for(int j=i+1; j<al.size(); j++)
+ if (grammar.comparePositions(al.get(i), al.get(j)) > 0) {
+ Sequence.Pos p = al.remove(j);
+ al.add(i, p);
+ continue OUTER;
+ }
+ break;
+ }
+
+ int j = 1;
+ int pk = 0;
+ for(int i=0; i<al.size(); i++) {
+ boolean inc = false;
+ for(int k=pk; k<i; k++) {
+ if (grammar.comparePositions(al.get(k), al.get(i)) > 0)
+ { inc = true; break; }
+ }
+ inc = true;
+ if (inc) {
+ j++;
+ pk = i;
+ }
+ al.get(i).ord = j;
+ }
+ }
+ /**
+ * A single state in the LR table and the transitions
+ * possible from it
+ *
+ * A state corresponds to a set of Sequence.Pos's. Each
+ * Node in the GSS has a State; the Node represents a set of
+ * possible parses, one for each Pos in the State.
+ *
+ * Every state is either "doomed" or "normal". If a Pos
+ * is part of a Sequence which is a conjunct (that is, it was
+ * passed to Sequence.{and(),andnot()}), then that Pos
+ * will appear only in doomed States. Furthermore, any set
+ * of Positions reachable from a doomed State also forms a
+ * doomed State. Note that in this latter case, a doomed
+ * state might have exactly the same set of Positions as a
+ * non-doomed state.
+ *
+ * Nodes with non-doomed states represent nodes which
+ * contribute to actual valid parses. Nodes with doomed
+ * States exist for no other purpose than to enable/disable
+ * some future reduction from a non-doomed Node. Because of
+ * this, we "garbage-collect" Nodes with doomed states if
+ * there are no more non-doomed Nodes which they could
+ * affect (see Result, Reduction, and Node for details).
+ *
+ * Without this optimization, many seemingly-innocuous uses
+ * of positive and negative conjuncts can trigger O(n^2)
+ * space+time complexity in otherwise simple grammars. There
+ * is an example of this in the regression suite.
+ */
+ class State<Token> implements IntegerMappable, Serializable {
+
public final int idx = master_state_idx++;
- private final HashSet<Position> hs;
+ private final transient HashSet<Pos> hs;
+ public HashSet<State<Token>> conjunctStates = new HashSet<State<Token>>();
- private transient HashMap<Element,State> gotoSetNonTerminals = new HashMap<Element,State>();
- private transient TopologicalBag<Token,State> gotoSetTerminals = new TopologicalBag<Token,State>();
+ HashMap<Pos,State<Token>> gotoSetNonTerminals = new HashMap<Pos,State<Token>>();
+ private transient TopologicalBag<Token,State<Token>> gotoSetTerminals = new TopologicalBag<Token,State<Token>>();
- private TopologicalBag<Token,Reduction> reductions = new TopologicalBag<Token,Reduction>();
- private HashSet<Reduction> eofReductions = new HashSet<Reduction>();
- private TopologicalBag<Token,State> shifts = new TopologicalBag<Token,State>();
- private boolean accept = false;
+ TopologicalBag<Token,Pos> reductions = new TopologicalBag<Token,Pos>();
+ HashSet<Pos> eofReductions = new HashSet<Pos>();
+ private TopologicalBag<Token,State<Token>> shifts = new TopologicalBag<Token,State<Token>>();
+ private boolean accept = false;
+
+ private VisitableMap<Token,State<Token>> oshifts = null;
+ private VisitableMap<Token,Pos> oreductions = null;
+ public final boolean doomed;
// Interface Methods //////////////////////////////////////////////////////////////////////////////
- public boolean canShift(Token t) { return shifts.contains(t); }
- public Iterable<State> getShifts(Token t) { return shifts.get(t); }
- public boolean isAccepting() { return accept; }
- public Iterable<Reduction> getReductions(Token t) { return t==null ? eofReductions : reductions.get(t); }
- public Iterable<Reduction> getEofReductions() { return eofReductions; }
- public Iterator<Position> iterator() { return hs.iterator(); }
+ public boolean doomed() { return doomed; }
+ boolean isAccepting() { return accept; }
+
+ Iterable<Pos> positions() { return hs; }
+
+ boolean canShift(Token t) { return oshifts!=null && oshifts.contains(t); }
+ void invokeShifts(Token t, GSS.Phase phase, Node pred, Forest f) { oshifts.invoke(t, phase, pred, f); }
+ boolean canReduce(Token t) {
+ return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); }
+ void invokeEpsilonReductions(Token t, Node node) {
+ if (t==null) for(Pos r : eofReductions) node.invoke(r, null, null);
+ else oreductions.invoke(t, node, null, null);
+ }
+ void invokeReductions(Token t, Node node, Result b) {
+ if (t==null) for(Pos r : eofReductions) node.invoke(r, b, null);
+ else oreductions.invoke(t, node, b, null);
+ }
// Constructor //////////////////////////////////////////////////////////////////////////////
/**
- * create a new state consisting of all the <tt>Position</tt>s in <tt>hs</tt>
- * @param hs the set of <tt>Position</tt>s comprising this <tt>State</tt>
- * @param all_states the set of states already constructed (to avoid recreating states)
- * @param all_elements the set of all elements (Atom instances need not be included)
+ * create a new state consisting of all the <tt>Pos</tt>s in <tt>hs</tt>
+ * @param hs the set of <tt>Pos</tt>s comprising this <tt>State</tt>
+ * @param all the set of all elements (Atom instances need not be included)
*
* In principle these two steps could be merged, but they
* are written separately to highlight these two facts:
* for non-Atom Elements.
* </ul>
*/
- public State(HashSet<Position> hs,
- HashMap<HashSet<Position>,State> all_states,
- HashSet<Element> all_elements) {
+ public State(HashSet<Pos> hs, boolean doomed, Grammar<Token> grammar) {
this.hs = hs;
+ this.doomed = doomed;
+
+ // register ourselves so that no two states are ever
+ // created with an identical position set (termination depends on this)
+ ((HashMap)(doomed ? doomed_states : normal_states)).put(hs, this);
+ ((HashSet)all_states).add(this);
+
+ for(Pos p : hs) {
+ // Step 1a: take note if we are an accepting state
+ // (last position of the root Union's sequence)
+ if (p.next()==null && !doomed && grammar.rootUnion.contains(p.owner()))
+ accept = true;
+
+ // Step 1b: If any Pos in the set is the first position of its sequence, then this
+ // state is responsible for spawning the "doomed" states for each of the
+ // Sequence's conjuncts. This obligation is recorded by adding the to-be-spawned
+ // states to conjunctStates.
+ if (!p.isFirst()) continue;
+ for(Sequence s : p.owner().needs())
+ if (!hs.contains(s.firstp()))
+ conjunctStates.add(mkstate(reachable(s.firstp()), true, grammar));
+ for(Sequence s : p.owner().hates())
+ if (!hs.contains(s.firstp()))
+ conjunctStates.add(mkstate(reachable(s.firstp()), true, grammar));
+ }
- // register ourselves in the all_states hash so that no
- // two states are ever created with an identical position set
- all_states.put(hs, this);
-
- // Step 1a: examine all Position's in this state and compute the mappings from
+ // Step 2a: examine all Pos's in this state and compute the mappings from
// sets of follow tokens (tokens which could follow this position) to sets
// of _new_ positions (positions after shifting). These mappings are
// collectively known as the _closure_
- TopologicalBag<Token,Position> bag0 = new TopologicalBag<Token,Position>();
- for(Position position : hs) {
+ TopologicalBag<Token,Pos> bag0 = new TopologicalBag<Token,Pos>();
+ for(Pos position : hs) {
if (position.isLast() || !(position.element() instanceof Atom)) continue;
Atom a = (Atom)position.element();
- HashSet<Position> hp = new HashSet<Position>();
+ HashSet<Pos> hp = new HashSet<Pos>();
reachable(position.next(), hp);
- bag0.addAll(a, hp);
+ bag0.addAll(a.getTokenTopology(), hp);
}
- // Step 1b: for each _minimal, contiguous_ set of characters having an identical next-position
+ // Step 2b: for each _minimal, contiguous_ set of characters having an identical next-position
// set, add that character set to the goto table (with the State corresponding to the
// computed next-position set).
for(Topology<Token> r : bag0) {
- HashSet<Position> h = new HashSet<Position>();
- for(Position p : bag0.getAll(r)) h.add(p);
- gotoSetTerminals.put(r, all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h));
+ HashSet<Pos> h = new HashSet<Pos>();
+ for(Pos p : bag0.getAll(r)) h.add(p);
+ ((TopologicalBag)gotoSetTerminals).put(r, mkstate(h, doomed, grammar));
}
- // Step 2: for every non-Atom element (ie every Element which has a corresponding reduction),
- // compute the closure over every position in this set which is followed by a symbol
- // which could yield the Element in question.
+ // Step 3: for every Sequence, compute the closure over every position in this set which
+ // is followed by a symbol which could yield the Sequence.
//
// "yields" [in one or more step] is used instead of "produces" [in exactly one step]
// to avoid having to iteratively construct our set of States as shown in most
// expositions of the algorithm (ie "keep doing XYZ until things stop changing").
- /*
- for(Element e : all_elements) {
- if (e instanceof Atom) continue;
- HashSet<Position> h = new Walk.Closure(null, g.cache).closure(e, hs);
- State s = all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h);
- if (gotoSetNonTerminals.get(e) != null)
- throw new Error("this should not happen");
- gotoSetNonTerminals.put(e, s);
- }
- */
- HashMapBag<Element,Position> move = new HashMapBag<Element,Position>();
- for(Position p : hs) {
- Element e = p.element();
- if (e==null) continue;
- HashSet<Element> ys = cache.ys.get(e);
- if (ys != null) {
- for(Element y : ys) {
- HashSet<Position> hp = new HashSet<Position>();
+
+ HashMapBag<Sequence,Pos> move = new HashMapBag<Sequence,Pos>();
+ for(Pos p : hs)
+ if (!p.isLast() && p.element() instanceof Union)
+ for(Sequence s : ((Union)p.element())) {
+ HashSet<Pos> hp = new HashSet<Pos>();
reachable(p.next(), hp);
- move.addAll(y, hp);
+ move.addAll(s, hp);
}
- }
- }
- for(Element y : move) {
- HashSet<Position> h = move.getAll(y);
- State s = all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h);
- gotoSetNonTerminals.put(y, s);
+ OUTER: for(Sequence y : move) {
+ // if a reduction is "lame", it should wind up in the dead_state after reducing
+ HashSet<Pos> h = move.getAll(y);
+ State<Token> s = mkstate(h, doomed, grammar);
+ for(Pos p : hs)
+ if (p.element() != null && (p.element() instanceof Union))
+ for(Sequence seq : ((Union)p.element()))
+ if (seq.needs.contains(y) || seq.hates.contains(y)) {
+ // FIXME: assumption that no sequence is ever both usefully (non-lamely) matched
+ // and also directly lamely matched
+ for(Pos pp = y.firstp(); pp != null; pp = pp.next())
+ ((HashMap)gotoSetNonTerminals).put(pp, dead_state);
+ continue OUTER;
+ }
+ for(Pos pp = y.firstp(); pp != null; pp = pp.next())
+ gotoSetNonTerminals.put(pp, s);
}
}
- public String toString() { return "state["+idx+"]"; }
-
- public int compareTo(Table.State s) { return idx==s.idx ? 0 : idx < s.idx ? -1 : 1; }
- }
-
- /**
- * the information needed to perform a reduction; copied here to
- * avoid keeping references to <tt>Element</tt> objects in a Table
- */
- public class Reduction {
- // FIXME: cleanup; almost everything in here could go in either Sequence.Position.getRewrite() or else in GSS.Reduct
- public final int numPop;
- /*private*/ final Position position;
- private final Forest[] holder; // to avoid constant reallocation
- public int hashCode() { return position.hashCode(); }
- public boolean equals(Object o) {
- if (o==null) return false;
- if (o==this) return true;
- if (!(o instanceof Reduction)) return false;
- Reduction r = (Reduction)o;
- return r.position == position;
- }
- public Reduction(Position p) {
- this.position = p;
- this.numPop = p.pos;
- this.holder = new Forest[numPop];
- }
- public String toString() { return "[reduce " + position + "]"; }
-
- private Forest zero = null;
- public Forest zero() {
- if (zero != null) return zero;
- if (numPop > 0) throw new Error();
- return zero = position.rewrite(null);
- }
-
- public Forest reduce(GSS.Phase.Node parent) {
- if (numPop==0) return finish(parent, zero(), parent.phase());
- return reduce(parent, numPop-1, null, parent.phase());
- }
-
- public Forest reduce(GSS.Phase.Node parent, GSS.Phase.Node onlychild) {
- if (numPop<=0) throw new Error("called wrong form of reduce()");
- int pos = numPop-1;
- holder[pos] = parent.pending();
- if (pos==0) {
- System.arraycopy(holder, 0, position.holder, 0, holder.length);
- return finish(onlychild, position.rewrite(parent.phase().getLocation()), parent.phase());
- }
- return reduce(onlychild, pos-1, null, parent.phase());
+ private State<Token> mkstate(HashSet<Pos> h, boolean b, Grammar<Token> grammar) {
+ State ret = (b?doomed_states:normal_states).get(h);
+ if (ret==null) ret = new State<Token>(h,b, grammar);
+ return ret;
}
- // FIXME: this could be more elegant and/or cleaner and/or somewhere else
- private Forest reduce(GSS.Phase.Node parent, int pos, Forest rex, GSS.Phase target) {
- if (pos<0) return finish(parent, rex, target);
- holder[pos] = parent.pending();
- if (pos==0 && rex==null) {
- System.arraycopy(holder, 0, position.holder, 0, holder.length);
- rex = position.rewrite(target.getLocation());
- }
- for(GSS.Phase.Node child : parent.parents())
- if (pos==0) finish(child, rex, target);
- else reduce(child, pos-1, rex, target);
- return rex;
- }
- private Forest finish(GSS.Phase.Node parent, Forest result, GSS.Phase target) {
- State state = parent.state.gotoSetNonTerminals.get(position.owner());
- if (state!=null)
- target.newNode(parent, result, state, numPop<=0, parent.phase());
- return result;
+ public int toInt() { return idx; }
+ public String toString() {
+ StringBuffer ret = new StringBuffer();
+ for(Pos p : hs)
+ ret.append(p+"\n");
+ return ret.toString();
}
}
- }
-
- private static final Forest[] emptyForestArray = new Forest[0];
+ }
// Helpers //////////////////////////////////////////////////////////////////////////////
-
- private static void reachable(Element e, HashSet<Position> h) {
+
+ private static HashSet<Pos> reachable(Element e) {
+ HashSet<Pos> h = new HashSet<Pos>();
+ reachable(e, h);
+ return h;
+ }
+ private static void reachable(Element e, HashSet<Pos> h) {
if (e instanceof Atom) return;
for(Sequence s : ((Union)e))
reachable(s.firstp(), h);
}
- private static void reachable(Position p, HashSet<Position> h) {
+ private static void reachable(Pos p, HashSet<Pos> h) {
if (h.contains(p)) return;
h.add(p);
if (p.element() != null) reachable(p.element(), h);
}
+ private static HashSet<Pos> reachable(Pos p) {
+ HashSet<Pos> ret = new HashSet<Pos>();
+ reachable(p, ret);
+ return ret;
+ }
}