break Node out of GSS
[sbp.git] / src / edu / berkeley / sbp / GSS.java
index a5d6f88..0358ef6 100644 (file)
@@ -1,3 +1,5 @@
+// Copyright 2006 all rights reserved; see LICENSE file for BSD-style license
+
 package edu.berkeley.sbp;
 import edu.berkeley.sbp.*;
 import edu.berkeley.sbp.util.*;
@@ -10,24 +12,45 @@ import java.lang.reflect.*;
 /** implements Tomita's Graph Structured Stack */
 class GSS {
 
-    public GSS() { }
+    public static Queue<Node> removals = new LinkedList<Node>();
+
+    static String note = "";
+    static int single_newnode = 0;
+    static int toplevel_reductions = 0;
+    static int multi_newnode = 0;
+    static int waiting_newnode = 0;
+    static int shifts = 0;
+
+    static int count = 0;
+    static int reductions = 0;
+    int resets = 0;
+    int waits = 0;
+    
+    Input input;
 
-    private Phase.Node[] reducing_list = null;
-    public int resets = 0;
-    public int waits = 0;
+    public GSS(Input input) { this.input = input; }
 
-    HashMapBag<Integer,Sequence>       inhibited       = new HashMapBag<Integer,Sequence>();
-    HashMapBag<Integer,Sequence>       expectedInhibit = new HashMapBag<Integer,Sequence>();
+    private Node[] reducing_list = null;
+
+    // FIXME: right now, these are the performance bottleneck
     HashMapBag<Sequence,Phase.Waiting> waiting         = new HashMapBag<Sequence,Phase.Waiting>();
     HashMapBag<Integer,Sequence>       performed       = new HashMapBag<Integer,Sequence>();
+    HashMapBag<Integer,Sequence>       lastperformed   = new HashMapBag<Integer,Sequence>();
+    HashMapBag<Integer,Sequence>       expected        = new HashMapBag<Integer,Sequence>();
     
     /** FIXME */
-    public  Forest.Ref finalResult;
+    Forest.Many finalResult;
 
     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
-    public class Phase<Tok> implements Invokable<State, Forest, Phase<Tok>.Node>, IntegerMappable {
+    class Phase<Tok> implements Invokable<State, Forest, Node>, IntegerMappable, GraphViz.ToGraphViz, Iterable<Node> {
+
+        public int pos() { return pos; }
+        public boolean closed() { return closed; }
+        public Tok token() { return token; }
 
+        public Iterator<Node> iterator() { return hash.iterator(); }
         public void invoke(State st, Forest result, Node n) {
+            shifts++;
             good |= next.newNode(n, result, st, false);
         }
 
@@ -37,35 +60,41 @@ class GSS {
         private final int pos;
 
         boolean reducing;
-        private IntPairMap<Phase.Node> hash;  /* ALLOC */
-        private IntPairMap<Forest> singularReductions;  /* ALLOC */
+        public IntPairMap<Node> hash;  /* ALLOC */
         private boolean closed;
         private boolean good;
         private Phase next = null;
         private Phase prev;
         private Input.Location location;
+        private Input.Location nextLocation;
+        private Input.Location prevLocation;
+        
         public final Parser parser;
 
         private Forest forest;
 
-        public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location, Forest forest) throws ParseFailed {
+        public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location,
+                     Input.Location nextLocation, Forest forest) throws ParseFailed {
+            this.prevLocation = prev==null ? location : prev.getLocation();
             this.prev = prev;
             this.forest = forest;
             this.parser = parser;
             this.pos = previous==null ? 0 : previous.pos+1;
             this.token = token;
             this.location = location;
-            inhibited.clear();
+            this.nextLocation = nextLocation;
+            performed.clear();
             reset();
         }
 
         public void reset() throws ParseFailed {
             waiting.clear();
+            expected.clear();
+            lastperformed.clear();
+            lastperformed.addAll(performed);
             performed.clear();
-            hash = new IntPairMap<Phase.Node>();
-            singularReductions = new IntPairMap<Forest>();
-            expectedInhibit.clear();
-            expectedInhibit.addAll(inhibited);
+            hash = new IntPairMap<Node>();
+            reset = false;
             good = false;
             closed = false;
             reducing = false;
@@ -77,11 +106,20 @@ class GSS {
         public boolean isDone() throws ParseFailed {
             if (token != null) return false;
             if (token==null && finalResult==null)
-                throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()), getLocation());
+                ParseFailed.error("unexpected end of file",
+                                  getLocation(),
+                                  token,
+                                  hash.values(),
+                                  getLocation().createRegion(getLocation()),
+                                  input,
+                                  GSS.this);
             return true;
         }
 
+        public Input.Location getPrevLocation() { return prevLocation; }
         public Input.Location getLocation() { return location; }
+        public Input.Region   getRegion() { return getPrevLocation().createRegion(getLocation()); }
+        public Input.Location getNextLocation() { return nextLocation; }
 
         /** add a new node (merging with existing nodes if possible)
          *  @param parent             the parent of the new node
@@ -99,21 +137,34 @@ class GSS {
             int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
             Sequence owner = reduction==null ? null : reduction.owner();
             if (reduction!=null) {
-                if (inhibited.contains(pos, owner)) return;
+                if (owner.hates!=null) {
+                    for (Sequence s : performed.getAll(pos))
+                        if (owner.hates.contains(s))
+                            return;
+                    for (Sequence s : lastperformed.getAll(pos))
+                        if (owner.hates.contains(s)) {
+                            //System.out.println("now expecting ["+pos+"] => " + s);
+                            expected.add(pos, s);
+                            return;
+                        }
+                }
                 if (owner.needs != null)
                     for(Sequence s : owner.needs)
                         if (!performed.contains(pos, s)) {
                             waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
                             return;
                         }
-                if ((owner.needed != null && owner.needed.size()>0) ||
-                    (owner.hated != null && owner.hated.size()>0) ||
-                    (owner.hates != null && owner.hates.size()>0))
+                if (!performed.contains(pos, owner)) {
                     performed.add(pos, owner);
+                    if (owner.hated != null)
+                        for(Sequence seq : owner.hated)
+                            if (performed.contains(pos, seq)) {
+                                performed.remove(pos, seq);
+                                reset = true;
+                            }
+                }
             }
-            if (!owner.lame)
-                newNode(parent, pending, state, fromEmptyReduction);
-            if (reduction!=null) inhibit(reduction, parent==null?0:parent.phase().pos);
+            newNode(parent, pending, state, fromEmptyReduction);
             if (reduction != null) {
                 boolean redo = true;
                 while(redo) {
@@ -129,13 +180,14 @@ class GSS {
                 }
             }
         }
+
         private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
-            p.holder.merge(pending);
-            if (p.parents().contains(parent)) return true;
-            p.parents().add(parent, true);
+            if (p.merge(parent, pending)) return true;
+            p.addParent(parent, true);
             if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent);
             return true;
         }
+
         private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
             do {
                 if (token != null && state.canShift(token)) break;
@@ -148,7 +200,7 @@ class GSS {
                 //return;
             } while(false);
 
-            Node n = new Node(parent, pending, state);  // ALLOC
+            Node n = new Node(Phase.this, parent, pending, state);  // ALLOC
             if (reducing) {
                 n.performEmptyReductions();
                 if (!fromEmptyReduction) n.performReductions(parent);
@@ -156,38 +208,14 @@ class GSS {
             return true;
         }
 
-        public void uninhibit(int p, Sequence s) {
-            if (s.hated!=null)
-                for(Sequence s2 : s.hated)
-                    inhibited.remove(p, s2);
-        }
+        public LinkedList<Node> reductionQueue = new LinkedList<Node>();
 
-        public void inhibit(Position r, int p) {
-            if (r.owner().hated == null) return;
-            // remember that dead states are still allowed to shift -- just not allowed to reduce
-            boolean reset = false;
-            for(Sequence seq : r.owner().hated) {
-                if (performed.contains(p,seq)) {
-                    uninhibit(p, seq);
-                    //System.out.println("\nresetting due to " + r.owner() + " killing " + seq);
-                    //inhibited.clear();
-                    inhibited.add(p, seq);
-                    //inhibited = new HashMapBag<Integer,Sequence>();
-                    reset = true;
-                    resets++;
-                    throw new Reset();
-                }
-                inhibited.add(p, seq);
-                expectedInhibit.remove(p, seq);
-            }
-        }
-        
         /** perform all reduction operations */
-        public void reduce() throws ParseFailed{
+        public void reduce() throws ParseFailed {
             try {
                 reducing = true;
                 if (reducing_list==null || reducing_list.length < hash.size())
-                    reducing_list = new Phase.Node[hash.size() * 4];
+                    reducing_list = new Node[hash.size() * 4];
                 hash.toArray(reducing_list);
                 int num = hash.size();
                 for(int i=0; i<num; i++) {
@@ -196,56 +224,87 @@ class GSS {
                     // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
                 }
                 for(int i=0; i<num; i++) {
-                    Node n = reducing_list[i];
+                    reductionQueue.add(reducing_list[i]);
                     reducing_list[i] = null;
-                    n.performReductions();
                 }
-                if (expectedInhibit.size() > 0) {
-                    inhibited.removeAll(expectedInhibit);
-                    System.out.println("\n!!!!\n");
-                    throw new Reset();
+                while(!reductionQueue.isEmpty()) {
+                    reductionQueue.remove().performReductions();
                 }
+                if (reset) {
+                    reset = false;
+                    resets++;
+                    throw new Reset();
+                }                
+                for(int i : expected)
+                    for(Sequence s : expected.getAll(i))
+                        if (!performed.contains(i, s)) {
+                            //System.out.println("resetting due to pos="+i+": " + s + " " + System.identityHashCode(s));
+                            resets++;
+                            throw new Reset();
+                        }
             } catch (Reset r) {
                 reset();
                 reduce();
             }
+            count = 0;
         }
 
+        private boolean reset = false;
         class Reset extends RuntimeException { }
 
         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
         public void shift(Phase next, Forest result) throws ParseFailed {
             // this massively improves GC performance
-            if (prev!=null) {
-                prev.hash = null;
-                prev.singularReductions = null;
+            if (prev!=null && parser.helpgc) {
+                //prev.hash = null;
+                //System.out.println("\r" + /*shifts + " " + */ single_newnode /*+ "/"+multi_newnode + " " + waiting_newnode*/);
+                //System.out.println("\r" + shifts + " " + note);
+                //System.out.println("\r" + shifts);
+                //System.out.println("\r" + toplevel_reductions);
+                //System.out.println("\r" + multi_newnode);
+                single_newnode = 0;
+                note = "";
+                multi_newnode = 0;
+                toplevel_reductions = 0;
+                waiting_newnode = 0;
+                shifts = 0;
             }
             this.next = next;
             closed = true;
             Forest res = null;
             boolean ok = false;
-            for(Phase.Node n : hash.values()) {
-                if (token == null && n.state.isAccepting()) {
-                    if (finalResult==null) finalResult = new Forest.Ref();
-                    finalResult.merge(n.holder);
+            int count = 0;
+            for(Node n : hash.values()) {
+                if (token == null && n.state().isAccepting()) {
+                    if (finalResult==null) finalResult = new Forest.Many();
+                    for(Object f : n.results())
+                        finalResult.merge((Forest)f);
                 }
                 if (token == null) continue;
-                n.state.invokeShifts(token, this, result, n);
+                n.state().invokeShifts(token, this, result, n);
             }
-
+            //System.out.println(next.hash.size());
             if (!good && token!=null)
-                throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected character ")+" \'"+
-                                                        ANSI.purple(StringUtil.escapify(token+"", "\\\'\r\n"))+
-                                                        "\' encountered at "+
-                                                        ANSI.green(getLocation())+"\n", token, hash.values()),
-                                        getLocation());
+                ParseFailed.error("unexpected character",
+                                  getLocation(),
+                                  token,
+                                  hash.values(),
+                                  getRegion(),
+                                  input,
+                                  GSS.this);
+
             if (token==null && finalResult==null)
-                throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()),
-                                        getLocation());
+                ParseFailed.error("unexpected end of file",
+                                  getLocation(),
+                                  token,
+                                  hash.values(),
+                                  getLocation().createRegion(getLocation()),
+                                  input,
+                                  GSS.this);
         }
 
 
-        public class Waiting {
+        class Waiting {
             Node parent;
             Forest pending;
             State state;
@@ -261,112 +320,28 @@ class GSS {
             }
             public void perform() {
                 //System.out.println("performing: " + reduction.position);
+                waiting_newnode++;
                 newNode(parent, pending, state, fromEmptyReduction, reduction);
             }
         }
        
-        // Node /////////////////////////////////////////////////////////////////////////////////
-
-        /** a node in the GSS */
-        public final class Node extends FastSet<Node> implements Invokable<Position, Node, Node>, IntegerMappable {
-
-            private Forest.Ref holder = null;
-            private boolean allqueued = false;
-
-            /** what state this node is in */
-            public final Parser.Table<Tok>.State<Tok> state;
-
-            /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
-            public  Phase phase() { return Phase.this; }
-            public  Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
-            public  Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
-            public  FastSet<Node> parents() { return this; }
-
-            public void performReductions() {
-                if (allqueued) return;
-                allqueued = true;
-                state.invokeReductions(token, this, this, null);
-            }
-
-            public void performReductions(Node n2) {
-                if (!allqueued) performReductions();
-                else            state.invokeReductions(token, this, this, n2);
-            }
-
-            public void performEmptyReductions() { state.invokeReductions(token, this, null, null); }
-            public final void invoke(Position r, Node n, Node n2) {
-                if (n==null || n2==null || r.pos==0) {
-                    if (r.pos==0) {
-                        if (n==null) n = this;
-                        else return;
-                    }
-                    if (n==null) return;
-                    Forest[] holder = new Forest[r.pos];
-                    if (r.pos==0) n.finish(r, r.zero(), n.phase(), holder);
-                    else                   n.reduce(r, r.pos-1, n.phase(), holder);
-                } else {
-                    Forest[] holder = new Forest[r.pos];
-                    if (r.pos<=0) throw new Error("called wrong form of reduce()");
-                    int pos = r.pos-1;
-                    Forest old = holder[pos];
-                    holder[pos] = n.pending();
-                    if (pos==0) {
-                        System.arraycopy(holder, 0, r.holder, 0, holder.length);
-                        Forest rex = null;
-                        if (r.pos==1)  rex = singularReductions.get(this, r);
-                        if (rex==null) {
-                            rex = r.rewrite(n.phase().getLocation());
-                            if (r.pos==1) singularReductions.put(this, r, rex);
-                        }
-                        n2.finish(r, rex, n.phase(), holder);
-                    } else {
-                        n2.reduce(r, pos-1, n.phase(), holder);
-                    }
-                    holder[pos] = old;
-                }
-            }
 
-            public void reduce(Position r, int pos, Phase target, Forest[] holder) {
-                Forest old = holder[pos];
-                holder[pos] = this.pending();
-                if (pos==0) {
-                    System.arraycopy(holder, 0, r.holder, 0, holder.length);
-                    for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
-                    Forest rex = null;
-                    if (r.pos==1)  rex = singularReductions.get(this, r);
-                    if (rex==null) {
-                        rex = r.rewrite(phase().getLocation());
-                        if (r.pos==1) singularReductions.put(this, r, rex);
-                    }
-                    for(Node child : this.parents()) child.finish(r, rex, target, holder);
-                } else {
-                    for(Node child : this.parents()) child.reduce(r, pos-1, target, holder);
-                }
-                holder[pos] = old;
-            }
+        public int toInt() { return pos+1; }
+        public int size() { return hash==null ? 0 : hash.size(); }
 
-            public void finish(Position r, Forest result, Phase<Tok> target, Forest[] holder) {
-                Parser.Table<Tok>.State<Tok> state0 = state.gotoSetNonTerminals.get(r.owner());
-                if (result==null) throw new Error();
-                if (state0!=null)
-                    target.newNode(this, result, state0, r.pos<=0, r);
-            }
+        // GraphViz //////////////////////////////////////////////////////////////////////////////
 
-            private Node(Node parent, Forest pending, State state) {
-                this.state = state;
-                this.holder().merge(pending);
-                Phase start = parent==null ? null : parent.phase();
-                if (parent != null) parents().add(parent, true);
-                if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
-                Phase.this.hash.put(state, start, this);
-            }
-            public int toInt() { return idx; }
-            private final int idx = node_idx++;
+        public GraphViz.Node toGraphViz(GraphViz gv) {
+            if (gv.hasNode(this)) return gv.createNode(this);
+            GraphViz.Group g = gv.createGroup(this);
+            g.label = "Phase " + pos;
+            g.color = "gray";
+            g.cluster = true;
+            return g;
         }
-        private int node_idx = 0;
+        public boolean isTransparent() { return false; }
+        public boolean isHidden() { return false; }
 
-        public int toInt() { return pos+1; }
-        public int size() { return hash==null ? 0 : hash.size(); }
     }
 
 }