cleanups, reorg, and commenting
[sbp.git] / src / edu / berkeley / sbp / Parser.java
index 817aea6..64ad67a 100644 (file)
+// Copyright 2006 all rights reserved; see LICENSE file for BSD-style license
+
 package edu.berkeley.sbp;
-import edu.berkeley.sbp.*;
 import edu.berkeley.sbp.util.*;
-import edu.berkeley.sbp.*;
 import edu.berkeley.sbp.Sequence.Position;
-import edu.berkeley.sbp.*;
 import java.io.*;
 import java.util.*;
-import java.lang.reflect.*;
-
-/** a parser which translates streams of Tokens of type T into a Forest<R> */
-public abstract class Parser<T extends Token, R> {
-
-    private final Table pt;
 
-    /** create a parser to parse the grammar with start symbol <tt>u</tt> */
-    protected Parser(Union u)  { this.pt = new Table(u, top()); }
-    protected Parser(Table pt) { this.pt = pt; }
-
-    public abstract Forest<R> shiftedToken(T t, Token.Location loc);
-    public abstract Topology<T> top();
+// FEATURE: try harder to "fuse" states together along two dimensions:
+//   - identical (equivalent) states, or states that subsume each other
+//   - unnecessary intermediate states ("short cut" GLR)
 
+/** a parser which translates an Input&lt;Token&gt; into a Forest&lt;NodeType&gt; */
+public abstract class Parser<Token, NodeType> {
 
-    /** parse <tt>input</tt> for a exactly one unique result, throwing <tt>Ambiguous</tt> if not unique or <tt>Failed</tt> if none */
-    public Tree<R> parse1(Token.Stream<T> input) throws IOException, Failed, Ambiguous {
-        Forest<R> ret = parse(input);
-        try { return ret.expand1(); }
-        catch (Ambiguous a) {
-            System.out.println("while expanding:");
-            System.out.println(ret);
-            throw a;
-        }
-    }
-
-    /** parse <tt>input</tt>, using the table <tt>pt</tt> to drive the parser */
-    public Forest<R> parse(Token.Stream<T> input) throws IOException, Failed {
-        GSS gss = new GSS();
-        Token.Location loc = input.getLocation();
-        GSS.Phase current = gss.new Phase(null, input.next(), loc);
-        current.newNode(null, null, pt.start, true, null);
-        for(;;) {
-            loc = input.getLocation();
-            GSS.Phase next = gss.new Phase(current, input.next(), loc);
-            current.reduce();
-            Forest forest = current.token==null ? null : shiftedToken((T)current.token, loc);
-            current.shift(next, forest);
-            if (current.isDone()) return (Forest<R>)current.finalResult;
-            current.checkFailure();
-            current = next;
-        }
-    }
-    
+    final Table pt;
 
-    // Exceptions //////////////////////////////////////////////////////////////////////////////
-
-    public static class Failed extends Exception {
-        private final Token.Location location;
-        private final String         message;
-        public Failed() { this("", null); }
-        public Failed(String message, Token.Location loc) { this.location = loc; this.message = message; }
-        public Token.Location getLocation() { return location; }
-        public String toString() { return message + (location==null ? "" : (" at " + location)); }
+    /** create a parser to parse the grammar with start symbol <tt>u</tt> */
+    public Parser(Union u)  { this.pt = new Table(u); }
+
+    /** implement this method to create the output forest corresponding to a lone shifted input token */
+    public abstract Forest<NodeType> shiftToken(Token t, Input.Region region);
+
+    public abstract Topology<Token> emptyTopology();
+
+    public String toString() { return pt.toString(); }
+    Cache cache() { return pt; }
+
+    /** parse <tt>input</tt>, and return the shared packed parse forest (or throw an exception) */
+    public Forest<NodeType> parse(Input<Token> input) throws IOException, ParseFailed {
+        verbose = System.getProperty("sbp.verbose", null) != null;
+        spinpos = 0;
+        try {
+            GSS gss = new GSS(input, this);
+            for(GSS.Phase current = gss.new Phase<Token>(pt.start); ;) {
+                
+                if (verbose) {
+                    // FIXME: clean this up
+                    String s;
+                    s = "  " + spin[spinpos++ % (spin.length)]+" parsing ";
+                    s += input.getName();
+                    s += " "+input.getLocation();
+                    while(s.indexOf(':') != -1 && s.indexOf(':') < 8) s = " " + s;
+                    String y = "@"+gss.viewPos+" ";
+                    while(y.length() < 9) y = " " + y;
+                    s += y;
+                    s += "   nodes="+gss.numOldNodes;
+                    while(s.length() < 50) s = s + " ";
+                    s += " shifted="+gss.numNewNodes;
+                    while(s.length() < 60) s = s + " ";
+                    s += " reductions="+gss.numReductions;
+                    System.err.print("\r"+s+ANSI.clreol()+"\r");
+                }
+                
+                if (current.isDone()) return (Forest<NodeType>)current.finalResult;
+                Forest forest = shiftToken((Token)current.token, current.getRegion());
+                current = gss.new Phase<Token>(current, forest);
+            }
+        } finally { if (verbose) System.err.print("\r"+ANSI.clreol()); }
     }
 
-    public static class Ambiguous extends RuntimeException {
-        public final Forest ambiguity;
-        public Ambiguous(Forest ambiguity) { this.ambiguity = ambiguity; }
-        public String toString() {
-            StringBuffer sb = new StringBuffer();
-            sb.append("unresolved ambiguity "/*"at " + ambiguity.getLocation() + ":"*/);
-            for(Object result : ambiguity.expand(false))
-                sb.append("\n    " + result);
-            return sb.toString();
-        }
+    // Spinner //////////////////////////////////////////////////////////////////////////////
+
+    private boolean verbose = false;
+    private static final char[] spin = new char[] { '-', '\\', '|', '/' };
+    private int spinpos = 0;
+    private long last = 0;
+    void spin() {
+        if (!verbose) return;
+        long now = System.currentTimeMillis();
+        if (now-last < 70) return;
+        last = now;
+        System.err.print("\r  " + spin[spinpos++ % (spin.length)]+"\r");
     }
 
-
     // Table //////////////////////////////////////////////////////////////////////////////
 
     /** an SLR(1) parse table which may contain conflicts */
-    static class Table extends Walk.Cache {
-
-        public final Walk.Cache cache = this;
-        
-        private void walk(Element e, HashSet<Element> hs) {
-            if (e==null) return;
-            if (hs.contains(e)) return;
-            hs.add(e);
-            if (e instanceof Atom) return;
-            for(Sequence s : (Union)e) {
-                hs.add(s);
-                for(Position p = s.firstp(); p != null; p = p.next())
-                    walk(p.element(), hs);
-            }
-        }
+    class Table extends Cache<Token> {
 
         /** the start state */
-        public final State   start;
+        final State<Token>   start;
+
+        /** a dummy state from which no reductions can be performed */
+        private final State<Token>   dead_state;
 
         /** used to generate unique values for State.idx */
         private int master_state_idx = 0;
 
+        /** all the states for this table */
+        HashSet<State<Token>>                     all_states       = new HashSet<State<Token>>();
+
+        /** all the doomed states in this table */
+        HashMap<HashSet<Position>,State<Token>>   doomed_states    = new HashMap<HashSet<Position>,State<Token>>();
+
+        /** all the non-doomed states in this table */
+        HashMap<HashSet<Position>,State<Token>>   normal_states    = new HashMap<HashSet<Position>,State<Token>>();
+
+        Topology<Token> emptyTopology() { return Parser.this.emptyTopology(); }
+    
         /** construct a parse table for the given grammar */
-        public Table(Topology top) { this("s", top); }
-        public Table(String startSymbol, Topology top) { this(new Union(startSymbol), top); }
-        public Table(Union ux, Topology top) {
-            Union start0 = new Union("0");
-            start0.add(new Sequence.Singleton(ux, null, null));
-
-            for(Sequence s : start0) cache.eof.put(s, true);
-            cache.eof.put(start0, true);
-
-            // construct the set of states
-            HashMap<HashSet<Position>,State>   all_states    = new HashMap<HashSet<Position>,State>();
-            HashSet<Element>                   all_elements  = new HashSet<Element>();
-            walk(start0, all_elements);
-            for(Element e : all_elements)
-                cache.ys.put(e, new Walk.YieldSet(e, cache).walk());
-            HashSet<Position> hp = new HashSet<Position>();
-            reachable(start0, hp);
-            this.start = new State(hp, all_states, all_elements);
+        Table(Union ux) {
+            super(new Union("0", Sequence.create(ux), true));
 
+            // create the "dead state"
+            this.dead_state = new State<Token>(new HashSet<Position>(), true);
+
+            // construct the start state; this will recursively create *all* the states
+            this.start = new State<Token>(reachable(rootUnion), false);
+
+            buildReductions();
+            sortReductions();
+        }
+
+        /** fill in the reductions table */
+        private void buildReductions() {
             // for each state, fill in the corresponding "row" of the parse table
-            for(State state : all_states.values())
+            for(State<Token> state : all_states)
                 for(Position p : state.hs) {
 
-                    // the Grammar's designated "last position" is the only accepting state
-                    if (start0.contains(p.owner()) && p.next()==null)
-                        state.accept = true;
-
-                    // FIXME: how does right-nullability interact with follow restrictions?
-                    // all right-nullable rules get a reduction [Johnstone 2000]
-                    if (p.isRightNullable(cache)) {
-                        Walk.Follow wf = new Walk.Follow(top.empty(), p.owner(), all_elements, cache);
-                        Reduction red = new Reduction(p);
-                        state.reductions.put(wf.walk(p.owner()), red);
-                        if (wf.includesEof()) state.eofReductions.add(red);
-                    }
-
                     // if the element following this position is an atom, copy the corresponding
                     // set of rows out of the "master" goto table and into this state's shift table
                     if (p.element() != null && p.element() instanceof Atom)
-                        state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element())));
+                        state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element()).getTokenTopology()));
+
+                    // RNGLR: we can potentially reduce from any "right-nullable" position -- that is,
+                    // any position for which all Elements after it in the Sequence are capable of
+                    // matching the empty string.
+                    if (!isRightNullable(p)) continue;
+                    Topology<Token> follow = follow(p.owner());
+                    for(Position p2 = p; p2 != null && p2.element() != null; p2 = p2.next()) {
+                        if (!(p2.element() instanceof Union))
+                            throw new Error("impossible -- only Unions can be nullable");
+                        
+                        // interesting RNGLR-followRestriction interaction: we must intersect
+                        // not just the follow-set of the last non-nullable element, but the
+                        // follow-sets of the nulled elements as well.
+                        for(Sequence s : ((Union)p2.element()))
+                            follow = follow.intersect(follow(s));
+                        Topology<Token> set = epsilonFollowSet((Union)p2.element());
+                        if (set != null) follow = follow.intersect(set);
+                    }
+                    
+                    // indicate that when the next token is in the set "follow", nodes in this
+                    // state should reduce according to Position "p"
+                    state.reductions.put(follow, p);
+                    if (followEof.contains(p.owner())) state.eofReductions.add(p);
+                }
+
+            // optimize the reductions table
+            if (emptyTopology() instanceof IntegerTopology)
+                for(State<Token> state : all_states) {
+                    // FIXME: this is pretty ugly
+                    state.oreductions = state.reductions.optimize(((IntegerTopology)emptyTopology()).functor());
+                    state.oshifts     = state.shifts.optimize(((IntegerTopology)emptyTopology()).functor());
                 }
         }
 
-        /** a single state in the LR table and the transitions possible from it */
-        public class State implements Comparable<Table.State>, Iterable<Position> {
-        
-            /*
-            public boolean isResolvable(Token t) {
-                boolean found = false;
-                for(Reduction r : getReductions(t)) {
-                    Position p = r.position;
-                    if (!p.isRightNullable(cache)) continue;
-                    if (p.owner().firstp()==p) continue;
-                    if (found) {
-                        // found two items meeting criteria #1
-                        return false;
-                    } else {
-                        found = true;
-                        continue;
+        // FIXME: this method needs to be cleaned up and documented
+        private void sortReductions() {
+            // crude algorithm to assing an ordinal ordering to every position
+            // al will be sorted in DECREASING order (al[0] >= al[1])
+            ArrayList<Sequence.Position> al = new ArrayList<Sequence.Position>();
+            for(State s : all_states) {
+                for(Object po : s) {
+                    Sequence.Position p = (Sequence.Position)po;
+                    if (al.contains(p)) continue;
+                    int i=0;
+                    for(; i<al.size(); i++) {
+                        if (comparePositions(p, al.get(i)) < 0)
+                            break;
                     }
-                    if (p.element()==null) continue;
-                    Topology first = new Walk.First(top(), cache).walk(p.element());
-                    if (first.contains(t))
+                    al.add(i, p);
                 }
             }
-            */
+            // FIXME: this actually pollutes the "pure" objects (the ones that should not be modified by the Parser)
+            // sort in increasing order...
+            OUTER: while(true) {
+                for(int i=0; i<al.size(); i++)
+                    for(int j=i+1; j<al.size(); j++)
+                        if (comparePositions(al.get(i), al.get(j)) > 0) {
+                            Sequence.Position p = al.remove(j);
+                            al.add(i, p);
+                            continue OUTER;
+                        }
+                break;
+            }
 
+            int j = 1;
+            int pk = 0;
+            for(int i=0; i<al.size(); i++) {
+                boolean inc = false;
+                for(int k=pk; k<i; k++) {
+                    if (comparePositions(al.get(k), al.get(i)) > 0)
+                        { inc = true; break; }
+                }
+                inc = true;
+                if (inc) {
+                    j++;
+                    pk = i;
+                }
+                al.get(i).ord = j;
+            }
+        }
+
+        /**
+         *  A single state in the LR table and the transitions
+         *  possible from it
+         *
+         *  A state corresponds to a set of Sequence.Position's.  Each
+         *  Node in the GSS has a State; the Node represents a set of
+         *  possible parses, one for each Position in the State.
+         *
+         *  Every state is either "doomed" or "normal".  If a Position
+         *  is part of a Sequence which is a conjunct (that is, it was
+         *  passed to Sequence.{and(),andnot()}), then that Position
+         *  will appear only in doomed States.  Furthermore, any set
+         *  of Positions reachable from a doomed State also forms a
+         *  doomed State.  Note that in this latter case, a doomed
+         *  state might have exactly the same set of Positions as a
+         *  non-doomed state.
+         *
+         *  Nodes with non-doomed states represent nodes which
+         *  contribute to actual valid parses.  Nodes with doomed
+         *  States exist for no other purpose than to enable/disable
+         *  some future reduction from a non-doomed Node.  Because of
+         *  this, we "garbage-collect" Nodes with doomed states if
+         *  there are no more non-doomed Nodes which they could
+         *  affect (see Result, Reduction, and Node for details).
+         *
+         *  Without this optimization, many seemingly-innocuous uses
+         *  of positive and negative conjuncts can trigger O(n^2)
+         *  space+time complexity in otherwise simple grammars.  There
+         *  is an example of this in the regression suite.
+         */
+        class State<Token> implements IntegerMappable, Iterable<Position> {
+        
             public  final     int               idx    = master_state_idx++;
             private final     HashSet<Position> hs;
+            public HashSet<State<Token>> conjunctStates = new HashSet<State<Token>>();
 
-            private transient HashMap<Element,State>          gotoSetNonTerminals = new HashMap<Element,State>();
-            private transient TopologicalBag<Token,State>     gotoSetTerminals    = new TopologicalBag<Token,State>();
+            HashMap<Sequence,State<Token>>      gotoSetNonTerminals = new HashMap<Sequence,State<Token>>();
+            private transient TopologicalBag<Token,State<Token>>  gotoSetTerminals    = new TopologicalBag<Token,State<Token>>();
 
-            private           TopologicalBag<Token,Reduction> reductions          = new TopologicalBag<Token,Reduction>();
-            private           HashSet<Reduction>              eofReductions       = new HashSet<Reduction>();
-            private           TopologicalBag<Token,State>     shifts              = new TopologicalBag<Token,State>();
-            private           boolean                         accept              = false;
+            private           TopologicalBag<Token,Position>      reductions          = new TopologicalBag<Token,Position>();
+            private           HashSet<Position>                   eofReductions       = new HashSet<Position>();
+            private           TopologicalBag<Token,State<Token>>  shifts              = new TopologicalBag<Token,State<Token>>();
+            private           boolean                             accept              = false;
+
+            private VisitableMap<Token,State<Token>> oshifts     = null;
+            private VisitableMap<Token,Position>     oreductions = null;
+            public  final boolean doomed;
 
             // Interface Methods //////////////////////////////////////////////////////////////////////////////
 
-            public boolean             canShift(Token t)           { return shifts.contains(t); }
-            public Iterable<State>     getShifts(Token t)          { return shifts.get(t); }
-            public boolean             isAccepting()               { return accept; }
-            public Iterable<Reduction> getReductions(Token t)      { return t==null ? eofReductions : reductions.get(t); }
-            public Iterable<Reduction> getEofReductions()          { return eofReductions; }
-            public Iterator<Position>  iterator()                  { return hs.iterator(); }
+            boolean                    isAccepting()           { return accept; }
+            public Iterator<Position>  iterator()              { return hs.iterator(); }
+            boolean                    canShift(Token t)       { return oshifts!=null && oshifts.contains(t); }
+            void                       invokeShifts(Token t, GSS.Phase phase, Result r) { oshifts.invoke(t, phase, r); }
+            boolean                    canReduce(Token t)        {
+                return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); }
+            void          invokeEpsilonReductions(Token t, Node node) {
+                if (t==null) for(Position r : eofReductions) node.invoke(r, null);
+                else         oreductions.invoke(t, node, null);
+            }
+            void          invokeReductions(Token t, Node node, Result b) {
+                if (t==null) for(Position r : eofReductions) node.invoke(r, b);
+                else         oreductions.invoke(t, node, b);
+            }
 
             // Constructor //////////////////////////////////////////////////////////////////////////////
 
             /**
              *  create a new state consisting of all the <tt>Position</tt>s in <tt>hs</tt>
              *  @param hs           the set of <tt>Position</tt>s comprising this <tt>State</tt>
-             *  @param all_states   the set of states already constructed (to avoid recreating states)
-             *  @param all_elements the set of all elements (Atom instances need not be included)
+             *  @param all the set of all elements (Atom instances need not be included)
              *  
              *   In principle these two steps could be merged, but they
              *   are written separately to highlight these two facts:
@@ -212,16 +291,35 @@ public abstract class Parser<T extends Token, R> {
              *      for non-Atom Elements.
              *  </ul>
              */
-            public State(HashSet<Position> hs,
-                         HashMap<HashSet<Position>,State> all_states,
-                         HashSet<Element> all_elements) {
+            public State(HashSet<Position> hs, boolean doomed) {
                 this.hs = hs;
+                this.doomed = doomed;
 
-                // register ourselves in the all_states hash so that no
-                // two states are ever created with an identical position set
-                all_states.put(hs, this);
+                // register ourselves so that no two states are ever
+                // created with an identical position set (termination depends on this)
+                ((HashMap)(doomed ? doomed_states : normal_states)).put(hs, this);
+                ((HashSet)all_states).add(this);
+
+                for(Position p : hs) {
+                    // Step 1a: take note if we are an accepting state
+                    //          (last position of the root Union's sequence)
+                    if (p.next()==null && !doomed && rootUnion.contains(p.owner()))
+                        accept = true;
+
+                    // Step 1b: If any Position in the set is the first position of its sequence, then this
+                    //          state is responsible for spawning the "doomed" states for each of the
+                    //          Sequence's conjuncts.  This obligation is recorded by adding the to-be-spawned
+                    //          states to conjunctStates.
+                    if (!p.isFirst()) continue;
+                    for(Sequence s : p.owner().needs())
+                        if (!hs.contains(s.firstp()))
+                            conjunctStates.add(mkstate(reachable(s.firstp()), true));
+                    for(Sequence s : p.owner().hates())
+                        if (!hs.contains(s.firstp()))
+                            conjunctStates.add(mkstate(reachable(s.firstp()), true));
+                }
 
-                // Step 1a: examine all Position's in this state and compute the mappings from
+                // Step 2a: examine all Position's in this state and compute the mappings from
                 //          sets of follow tokens (tokens which could follow this position) to sets
                 //          of _new_ positions (positions after shifting).  These mappings are
                 //          collectively known as the _closure_
@@ -232,145 +330,69 @@ public abstract class Parser<T extends Token, R> {
                     Atom a = (Atom)position.element();
                     HashSet<Position> hp = new HashSet<Position>();
                     reachable(position.next(), hp);
-                    bag0.addAll(a, hp);
+                    bag0.addAll(a.getTokenTopology(), hp);
                 }
 
-                // Step 1b: for each _minimal, contiguous_ set of characters having an identical next-position
+                // Step 2b: for each _minimal, contiguous_ set of characters having an identical next-position
                 //          set, add that character set to the goto table (with the State corresponding to the
                 //          computed next-position set).
 
                 for(Topology<Token> r : bag0) {
                     HashSet<Position> h = new HashSet<Position>();
                     for(Position p : bag0.getAll(r)) h.add(p);
-                    gotoSetTerminals.put(r, all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h));
+                    ((TopologicalBag)gotoSetTerminals).put(r, mkstate(h, doomed));
                 }
 
-                // Step 2: for every non-Atom element (ie every Element which has a corresponding reduction),
-                //         compute the closure over every position in this set which is followed by a symbol
-                //         which could yield the Element in question.
+                // Step 3: for every Sequence, compute the closure over every position in this set which
+                //         is followed by a symbol which could yield the Sequence.
                 //
                 //         "yields" [in one or more step] is used instead of "produces" [in exactly one step]
                 //         to avoid having to iteratively construct our set of States as shown in most
                 //         expositions of the algorithm (ie "keep doing XYZ until things stop changing").
-                /*
-                for(Element e : all_elements) {
-                    if (e instanceof Atom) continue;
-                    HashSet<Position> h = new Walk.Closure(null, g.cache).closure(e, hs);
-                    State s = all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h);
-                    if (gotoSetNonTerminals.get(e) != null)
-                        throw new Error("this should not happen");
-                    gotoSetNonTerminals.put(e, s);
-                }
-                */
-                HashMapBag<Element,Position> move = new HashMapBag<Element,Position>();
-                for(Position p : hs) {
-                    Element e = p.element();
-                    if (e==null) continue;
-                    HashSet<Element> ys = cache.ys.get(e);
-                    if (ys != null) {
-                        for(Element y : ys) {
+
+                HashMapBag<Sequence,Position> move = new HashMapBag<Sequence,Position>();
+                for(Position p : hs)
+                    if (!p.isLast() && p.element() instanceof Union)
+                        for(Sequence s : ((Union)p.element())) {
                             HashSet<Position> hp = new HashSet<Position>();
                             reachable(p.next(), hp);
-                            move.addAll(y, hp);
+                            move.addAll(s, hp);
                         }
-                    }
-                }
-                for(Element y : move) {
+                OUTER: for(Sequence y : move) {
+                    // if a reduction is "lame", it should wind up in the dead_state after reducing
                     HashSet<Position> h = move.getAll(y);
-                    State s = all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h);
+                    State<Token> s = mkstate(h, doomed);
+                    for(Position p : hs)
+                        if (p.element() != null && (p.element() instanceof Union))
+                            for(Sequence seq : ((Union)p.element()))
+                                if (seq.needs.contains(y) || seq.hates.contains(y)) {
+                                    // FIXME: assumption that no sequence is ever both usefully (non-lamely) matched
+                                    //        and also directly lamely matched
+                                    ((HashMap)gotoSetNonTerminals).put(y, dead_state);
+                                    continue OUTER;
+                                }
                     gotoSetNonTerminals.put(y, s);
                 }
             }
 
-            public String toString() { return "state["+idx+"]"; }
-
-            public int compareTo(Table.State s) { return idx==s.idx ? 0 : idx < s.idx ? -1 : 1; }
-        }
-
-        /**
-         *  the information needed to perform a reduction; copied here to
-         *  avoid keeping references to <tt>Element</tt> objects in a Table
-         */
-        public class Reduction {
-            // FIXME: cleanup; almost everything in here could go in either Sequence.Position.getRewrite() or else in GSS.Reduct
-            public final int numPop;
-            /*private*/ final Position position;
-            private final Forest[] holder;    // to avoid constant reallocation
-            public int hashCode() { return position.hashCode(); }
-            public boolean equals(Object o) {
-                if (o==null) return false;
-                if (o==this) return true;
-                if (!(o instanceof Reduction)) return false;
-                Reduction r = (Reduction)o;
-                return r.position == position;
-            }
-            public Reduction(Position p) {
-                this.position = p;
-                this.numPop = p.pos;
-                this.holder = new Forest[numPop];
-            }
-            public String toString() { return "[reduce " + position + "]"; }
-
-            public Forest reduce(GSS.Phase.Node parent) {
-                Forest rex = numPop==0 ? zero() : null;
-                Forest ret = reduce(parent, numPop-1, rex, null, parent.phase());
+            private State<Token> mkstate(HashSet<Position> h, boolean b) {
+                State ret = (b?doomed_states:normal_states).get(h);
+                if (ret==null) ret = new State<Token>(h,b);
                 return ret;
             }
 
-            public Forest reduce(GSS.Phase.Node parent, GSS.Phase.Node onlychild) {
-                int pos = numPop-1;
-                if (pos>=0) holder[pos] = parent.pending();
-                Forest rex = null;
-                if (pos==0) {
-                    if (rex==null) {
-                        System.arraycopy(holder, 0, position.holder, 0, holder.length);
-                        rex = position.rewrite(parent.phase().getLocation());
-                    }
-                }
-                return reduce(onlychild, pos-1, rex, null, parent.phase());
-            }
-
-            private Forest zero = null;
-            public Forest zero() {
-                if (zero != null) return zero;
-                if (numPop > 0) throw new Error();
-                return zero = position.rewrite(null);
-            }
-
-            // FIXME: this could be more elegant and/or cleaner and/or somewhere else
-            private Forest reduce(GSS.Phase.Node parent, int pos, Forest rex, GSS.Phase.Node onlychild, GSS.Phase target) {
-                if (pos>=0) holder[pos] = parent.pending();
-                if (pos==0) {
-                    if (rex==null) {
-                        System.arraycopy(holder, 0, position.holder, 0, holder.length);
-                        rex = position.rewrite(target.getLocation());
-                    }
-                    if (onlychild != null)
-                        reduce(onlychild, pos-1, rex, null, target);
-                    else 
-                        for(GSS.Phase.Node child : parent.parents())
-                            reduce(child, pos-1, rex, null, target);
-                } else if (pos>0) {
-                    if (onlychild != null)
-                        reduce(onlychild, pos-1, rex, null, target);
-                    else 
-                        for(GSS.Phase.Node child : parent.parents())
-                            reduce(child, pos-1, rex, null, target);
-                } else {
-                    State state = parent.state.gotoSetNonTerminals.get(position.owner());
-                    if (state!=null)
-                        target.newNode(parent, rex, state, numPop<=0, parent.phase());
-                }
-                return rex;
-            }
+            public int toInt() { return idx; }
         }
-    }
-
-    private static final Forest[] emptyForestArray = new Forest[0];
 
+    }
 
     // Helpers //////////////////////////////////////////////////////////////////////////////
-
+    
+    private static HashSet<Position> reachable(Element e) {
+        HashSet<Position> h = new HashSet<Position>();
+        reachable(e, h);
+        return h;
+    }
     private static void reachable(Element e, HashSet<Position> h) {
         if (e instanceof Atom) return;
         for(Sequence s : ((Union)e))
@@ -381,5 +403,10 @@ public abstract class Parser<T extends Token, R> {
         h.add(p);
         if (p.element() != null) reachable(p.element(), h);
     }
+    private static HashSet<Position> reachable(Position p) {
+        HashSet<Position> ret = new HashSet<Position>();
+        reachable(p, ret);
+        return ret;
+    }
 
 }