checkpoint harmony
[sbp.git] / src / edu / berkeley / sbp / GSS.java
index 65c12e7..cff543d 100644 (file)
@@ -1,6 +1,9 @@
 package edu.berkeley.sbp;
 import edu.berkeley.sbp.*;
 import edu.berkeley.sbp.util.*;
 package edu.berkeley.sbp;
 import edu.berkeley.sbp.*;
 import edu.berkeley.sbp.util.*;
+import edu.berkeley.sbp.Sequence.Position;
+import edu.berkeley.sbp.Parser.Table.State;
+import edu.berkeley.sbp.Parser.Table.Reduction;
 import java.io.*;
 import java.util.*;
 import java.lang.reflect.*;
 import java.io.*;
 import java.util.*;
 import java.lang.reflect.*;
@@ -11,42 +14,152 @@ class GSS {
     public GSS() { }
 
     private Phase.Node[] reducing_list = null;
     public GSS() { }
 
     private Phase.Node[] reducing_list = null;
-
+    public int resets = 0;
+    public int waits = 0;
+
+    HashMapBag<Integer,Sequence> inhibited = new HashMapBag<Integer,Sequence>();
+    HashMapBag<Integer,Sequence> assumed = new HashMapBag<Integer,Sequence>();
+    HashMapBag<Sequence,Phase.Waiting> waiting   = new HashMapBag<Sequence,Phase.Waiting>();
+    HashMapBag<Integer,Sequence> performed = new HashMapBag<Integer,Sequence>();
+    HashSet<Phase.Waiting> tail = new     HashSet<Phase.Waiting>();
+    
     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
-    public class Phase implements Invokable<Parser.Table.State, Forest, GSS.Phase.Node> {
+    public class Phase implements Invokable<State, Forest, GSS.Phase.Node>, IntegerMappable {
+        public int toInt() { return pos+1; }
 
         /** the token immediately after this phase */
         public  final Token token;
 
 
         /** the token immediately after this phase */
         public  final Token token;
 
-        boolean reducing = false;
+        boolean reducing;
 
         /** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
         private final int pos;
 
         /** FIXME */
 
         /** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
         private final int pos;
 
         /** FIXME */
-        public  Forest.Ref finalResult = null;
+        public  Forest.Ref finalResult;
 
         /** all nodes, keyed by the value returned by code() */
 
         /** all nodes, keyed by the value returned by code() */
-        private HashMap<Long,Phase.Node> hash    = new HashMap<Long,Phase.Node>();  /* ALLOC */
+        /*private*/ IntPairMap<Phase.Node> hash;  /* ALLOC */
 
         /** the number of nodes in this phase */
 
         /** the number of nodes in this phase */
-        private int numNodes = 0;
+        private int numNodes;
+
+        boolean closed;
 
 
-        boolean closed = false;
+        private boolean good;
+        private Phase next = null;
 
         private Token.Location location;
 
         private Token.Location location;
-        public Phase(Phase previous, Token token, Token.Location location) {
+        public final Parser parser;
+
+        private Forest forest;
+        Phase prev;
+        public Phase(Phase prev, Parser parser, Phase previous, Token token, Token.Location location, Forest forest) {
+            this.prev = prev;
+            this.forest = forest;
+            this.parser = parser;
             this.pos = previous==null ? 0 : previous.pos+1;
             this.token = token;
             this.location = location;
             this.pos = previous==null ? 0 : previous.pos+1;
             this.token = token;
             this.location = location;
+            inhibited.clear();
+            assumed.clear();
+            reset();
         }
 
         }
 
-        public boolean isDone() { return token == null; }
+        public void reset() {
+            tail.clear();
+            waiting.clear();
+            performed.clear();
+            hash = new IntPairMap<Phase.Node>();
+            good = false;
+            closed = false;
+            numNodes = 0;
+            reducing = false;
+            finalResult = null;
+            if (prev != null) prev.shift(this, forest);
+        }
 
 
-        private String error = "generic syntax error";
-        public void checkFailure() throws Parser.Failed {
-            if (numNodes <= 0)
-                throw new Parser.Failed(error, getLocation());
+        public void complain(Node n, HashMap<String,HashSet<String>> errors, boolean force) {
+            if (n.touched) return;
+            n.touched = true;
+            for(Position p : n.state) {
+                //if (!p.isLast()) { 
+                if (((p.isFirst() || p.isLast()) && !force) || p.owner().name==null) {
+                    for(Node n2 : n.parents())
+                        complain(n2, errors, force | p.isFirst());
+                } else {
+                    String seqname = p.owner().name;
+                    HashSet<String> hs = errors.get(seqname);
+                    if (hs==null) errors.put(seqname, hs = new HashSet<String>());
+                    hs.add(p.element()+"");
+                    //String s = "  while parsing " + seqname + ": expected a " + p.element();
+                        //"\n";
+                    /*
+                    s +=       "      parsed: ";
+                    for(Position p2 = p.owner().firstp(); p2 != null && p2 != p && !p2.isLast(); p2 = p2.next()) s += (p2.element() + " ");
+                    s += "\n";
+                    s +=       "    expected: ";
+                    for(Position p2 = p; p2 != null && !p2.isLast(); p2 = p2.next()) s += (p2.element() + " ");
+                    */
+                    //s += "\n";
+                    //errors.add(s);
+                }
+            }
+        }
+
+        public String black(Object o) { return "\033[30m"+o+"\033[0m"; }
+        public String red(Object o) { return "\033[31m"+o+"\033[0m"; }
+        public String green(Object o) { return "\033[32m"+o+"\033[0m"; }
+        public String yellow(Object o) { return "\033[33m"+o+"\033[0m"; }
+        public String blue(Object o) { return "\033[34m"+o+"\033[0m"; }
+        public String purple(Object o) { return "\033[35m"+o+"\033[0m"; }
+        public String cyan(Object o) { return "\033[36m"+o+"\033[0m"; }
+        public String el(Object e) {
+            String s = e.toString();
+            if (s.length()==0 || s.charAt(0)!='\"' || s.charAt(s.length()-1)!='\"') return yellow(s);
+            s = s.substring(1);
+            s = s.substring(0, s.length()-1);
+            StringBuffer ret = new StringBuffer();
+            for(int i=0; i<s.length(); i++) {
+                if (s.charAt(i)=='\\' && i<s.length()-1) ret.append(s.charAt(++i));
+                else ret.append(s);
+            }
+            return purple(ret.toString());
+        }
+        public String error(String message) {
+            String lookAhead = token==null ? "<EOF>" : token.toString();
+            StringBuffer ret = new StringBuffer();
+            ret.append("\n  ");
+            ret.append(message);
+            HashMap<String,HashSet<String>> errors = new HashMap<String,HashSet<String>>();
+            for(Node n : hash.values()) {
+                //System.out.println(n.state);
+                complain(n, errors, false);
+            }
+            for(String s : errors.keySet()) {
+                ret.append("    while parsing " + yellow(s));
+                HashSet<String> hs = errors.get(s);
+                if (hs.size()==1) ret.append(" expected " + yellow(el(hs.iterator().next())) + "\n");
+                else {
+                    ret.append(" expected ");
+                    boolean first = true;
+                    for(String s2 : hs) {
+                        if (!first) ret.append(" or ");
+                        first = false;
+                        ret.append(yellow(el(s2)));
+                    }
+                    ret.append("\n");
+                }
+            }
+            return ret.toString();
+        }
+        
+        public boolean isDone() throws Parser.Failed {
+            if (token != null) return false;
+            if (token==null && finalResult==null)
+                throw new Parser.Failed(error(red("unexpected end of file\n")),
+                                        getLocation());
+            return true;
         }
 
         public Token.Location getLocation() { return location; }
         }
 
         public Token.Location getLocation() { return location; }
@@ -58,25 +171,64 @@ class GSS {
          *  @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
          *  @param start              the earliest part of the input contributing to this node (used to make merging decisions)
          */
          *  @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
          *  @param start              the earliest part of the input contributing to this node (used to make merging decisions)
          */
-        public void newNode(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction) {
-            Node p = hash.get(code(state, parent==null?null:parent.phase()));
-            if (p != null)  newNode2(p, parent, pending, state, fromEmptyReduction);
-            else            newNode3(parent, pending, state, fromEmptyReduction);
+        public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
+            Node p = hash.get(state, parent==null?null:parent.phase());
+            if (p != null)  return newNode2(p, parent, pending, state, fromEmptyReduction);
+            else            return newNode3(parent, pending, state, fromEmptyReduction);
+        }
+        public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) {
+            int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
+            Sequence owner = reduction==null ? null : reduction.position.owner();
+            if (reduction!=null) {
+                if (inhibited.contains(pos, owner)) return;
+                /*
+                if (assumed.contains(pos, owner)) {
+                    tail.add(new Waiting(parent, pending, state, fromEmptyReduction, reduction));
+                    return;
+                }
+                */
+                if (owner.needs != null)
+                    for(Sequence s : owner.needs)
+                        if (!performed.contains(pos, s)) {
+                            waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
+                            return;
+                        }
+                if ((owner.needed != null && owner.needed.size()>0) ||
+                    (owner.hated != null && owner.hated.size()>0) ||
+                    (owner.hates != null && owner.hates.size()>0))
+                    performed.add(pos, owner);
+            }
+            if (!owner.lame)
+                newNode(parent, pending, state, fromEmptyReduction);
+            if (reduction!=null) inhibit(reduction, parent==null?0:parent.phase().pos);
+            if (reduction != null) {
+                boolean redo = true;
+                while(redo) {
+                    redo = false;
+                    for(Waiting w : waiting.getAll(owner)) {
+                        if (w.parent==parent || (parent!=null&&w.parent!=null&&w.parent.phase()==parent.phase())) {
+                            waiting.remove(owner, w);
+                            w.perform();
+                            redo = true;
+                            break;
+                        }
+                    }
+                }
+            }
         }
         }
-        private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction) {
+        private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
             p.holder.merge(pending);
             p.holder.merge(pending);
-            if (p.parents().contains(parent)) return;
-            if (p.fe && p.phase() != parent.phase()) throw new Error("yep yep");
-            if (!p.fe && p.phase() == parent.phase()) throw new Error("yep yep2");
+            if (p.parents().contains(parent)) return true;
             p.parents().add(parent, true);
             if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
             p.parents().add(parent, true);
             if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
+            return true;
         }
         }
-        private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction) {
+        private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
             do {
                 if (token != null && state.canShift(token)) break;
                 if (state.isAccepting()) break;
                 if (token==null) break;
             do {
                 if (token != null && state.canShift(token)) break;
                 if (state.isAccepting()) break;
                 if (token==null) break;
-                if (!state.canReduce(token)) return;
+                //if (!state.canReduce(token)) return false;
                 //if (count > 1) break;
                 //if (r.numPop == 0) break;
                 //r.reduce(pending, parent, null, Phase.this, null);
                 //if (count > 1) break;
                 //if (r.numPop == 0) break;
                 //r.reduce(pending, parent, null, Phase.this, null);
@@ -86,95 +238,130 @@ class GSS {
             Node n = new Node(parent, pending, state, fromEmptyReduction);  // ALLOC
             n.queueEmptyReductions();
             if (!fromEmptyReduction) n.queueReductions(parent);
             Node n = new Node(parent, pending, state, fromEmptyReduction);  // ALLOC
             n.queueEmptyReductions();
             if (!fromEmptyReduction) n.queueReductions(parent);
+            return true;
         }
 
         }
 
+        public void uninhibit(int p, Sequence s) {
+            if (s.hated!=null)
+                for(Sequence s2 : s.hated)
+                    inhibited.remove(p, s2);
+        }
+
+        public void inhibit(Reduction r, int p) {
+            if (r.position.owner().hated == null) return;
+            // remember that dead states are still allowed to shift -- just not allowed to reduce
+            boolean reset = false;
+            for(Sequence seq : r.position.owner().hated) {
+                if (performed.contains(p,seq)) {
+                    uninhibit(p, seq);
+                    //System.out.println("\nresetting due to " + r.position.owner() + " killing " + seq);
+                    //inhibited.clear();
+                    inhibited.add(p, seq);
+                    //assumed = inhibited;
+                    //inhibited = new HashMapBag<Integer,Sequence>();
+                    reset = true;
+                    resets++;
+                    throw new Reset();
+                }
+                inhibited.add(p, seq);
+            }
+        }
         
         /** perform all reduction operations */
         public void reduce() {
         
         /** perform all reduction operations */
         public void reduce() {
-            reducing = true;
-            if (reducing_list==null || reducing_list.length < hash.size())
-                reducing_list = new Phase.Node[hash.size() * 4];
-            Collection<Node> hv = hash.values();
-            hv.toArray(reducing_list);
-            int num = hv.size();
-            for(int i=0; i<num; i++) {
-                Node n = reducing_list[i];
-                n.queueEmptyReductions();
-                // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
-            }
-            for(int i=0; i<num; i++) {
-                Node n = reducing_list[i];
-                reducing_list[i] = null;
-                n.queueReductions();
+            try {
+                reducing = true;
+                if (reducing_list==null || reducing_list.length < hash.size())
+                    reducing_list = new Phase.Node[hash.size() * 4];
+                hash.toArray(reducing_list);
+                int num = hash.size();
+                for(int i=0; i<num; i++) {
+                    Node n = reducing_list[i];
+                    n.queueEmptyReductions();
+                    // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
+                }
+                for(int i=0; i<num; i++) {
+                    Node n = reducing_list[i];
+                    reducing_list[i] = null;
+                    n.queueReductions();
+                }
+                //for(Waiting w : tail)
+                //w.perform();
+            } catch (Reset r) {
+                reset();
+                reduce();
             }
         }
 
             }
         }
 
-        public void invoke(Parser.Table.State st, Forest result, Node n) {
-            next.newNode(n, result, st, false);
+        class Reset extends RuntimeException { }
+
+        public void invoke(State st, Forest result, Node n) {
+            boolean ok = next.newNode(n, result, st, false);
+            if (ok && !good) {
+                good = !st.lame();
+                //if (good) System.out.println(st);
+            }
         }
         }
-        private Phase next = null;
 
         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
 
         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
-        public void shift(Phase next, Forest result) {
+        public void shift(Phase next, Forest result) throws Parser.Failed {
+            if (prev!=null) prev.hash = null;
             this.next = next;
             closed = true;
             Forest res = null;
             boolean ok = false;
             for(Phase.Node n : hash.values()) {
             this.next = next;
             closed = true;
             Forest res = null;
             boolean ok = false;
             for(Phase.Node n : hash.values()) {
-                if (n.holder==null) continue;
-                n.holder.resolve();
                 if (token == null && n.state.isAccepting()) {
                 if (token == null && n.state.isAccepting()) {
-                    ok = true;
                     if (finalResult==null) finalResult = new Forest.Ref();
                     finalResult.merge(n.holder);
                 }
                     if (finalResult==null) finalResult = new Forest.Ref();
                     finalResult.merge(n.holder);
                 }
-                if (!n.holder.valid()) continue;
                 if (token == null) continue;
                 n.state.invokeShifts(token, this, result, n);
                 if (token == null) continue;
                 n.state.invokeShifts(token, this, result, n);
-                /*
-                for(Parser.Table.State st : n.state.getShifts(token)) {
-                    if (res == null) res = result;
-                    next.newNode(n, res, st, true, this);
-                    ok = true;
-                }
-                */
             }
 
             }
 
-            if (!ok && token != null) {
-                StringBuffer error = new StringBuffer();
-                error.append("error: unable to shift token \"" + token + "\"\n");
-                //error.append("  before: " +pendingReductions+ "\n");
-                //error.append("  before: " +totalReductions+ "\n");
-                //for(Phase.Node n : hash.values()) {
-                //n.queueReductions();
-                //n.queueEmptyReductions();
-                //}
-                //error.append("  after: " +pendingReductions+ "\n");
-                //error.append("  candidate states:\n");
-                //for(Phase.Node n : hash.values()) {
-                    //for(Sequence.Position p : n.state) error.append("        " + p + "\n");
-                    //error.append("        --\n");
-                //for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append("        " + r + "\n");
-                    //error.append("        ==\n");
-                //}
-                next.error = error.toString();
-            }
+            if (!good && token!=null)
+                throw new Parser.Failed(error(red("unexpected character")+" "+purple(token)+" encountered at "+green(getLocation())+"\n"),
+                                        getLocation());
+            if (token==null && finalResult==null)
+                throw new Parser.Failed(error(red("unexpected end of file\n")),
+                                        getLocation());
 
             // this massively improves GC performance
 
             // this massively improves GC performance
-            hash = null;
+            //hash = null;
         }
 
         }
 
+
+        public class Waiting {
+            Node parent;
+            Forest pending;
+            State state;
+            boolean fromEmptyReduction;
+            Reduction reduction;
+            public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) {
+                waits++;
+                this.parent = parent;
+                this.pending = pending;
+                this.state = state;
+                this.fromEmptyReduction = fromEmptyReduction;
+                this.reduction = reduction;
+            }
+            public void perform() {
+                //System.out.println("performing: " + reduction.position);
+                newNode(parent, pending, state, fromEmptyReduction, reduction);
+            }
+        }
        
         // GSS Nodes //////////////////////////////////////////////////////////////////////////////
 
         /** a node in the GSS */
        
         // GSS Nodes //////////////////////////////////////////////////////////////////////////////
 
         /** a node in the GSS */
-        public final class Node extends FastSet<Node> implements Invokable<Parser.Table.Reduction, Node, Node> {
+        public final class Node extends FastSet<Node> implements Invokable<Reduction, Node, Node> {
 
 
+            public boolean touched = false;
             private Forest.Ref holder = null;
             private boolean allqueued = false;
 
             /** what state this node is in */
             private Forest.Ref holder = null;
             private boolean allqueued = false;
 
             /** what state this node is in */
-            public final Parser.Table.State state;
+            public final State state;
 
             /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
             public Phase phase() { return Phase.this; }
 
             /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
             public Phase phase() { return Phase.this; }
@@ -196,7 +383,7 @@ class GSS {
                 state.invokeReductions(token, this, this, n2);
             }
 
                 state.invokeReductions(token, this, this, n2);
             }
 
-            public final void invoke(Parser.Table.Reduction r, Node n, Node n2) {
+            public final void invoke(Reduction r, Node n, Node n2) {
                 if (n==null) {
                     if (r.numPop==0) r.reduce(this);
                     return;
                 if (n==null) {
                     if (r.numPop==0) r.reduce(this);
                     return;
@@ -211,16 +398,17 @@ class GSS {
             }
 
             private boolean fe;
             }
 
             private boolean fe;
-            private Node(Node parent, Forest pending, Parser.Table.State state, boolean fe) {
+            public boolean dead = false;
+            public boolean redo = false;
+            private Node(Node parent, Forest pending, State state, boolean fe) {
                 this.fe = fe;
                 this.state = state;
                 this.fe = fe;
                 this.state = state;
+                this.holder().merge(pending);
                 Phase start = parent==null ? null : parent.phase();
                 Phase start = parent==null ? null : parent.phase();
-                if (pending != null) this.holder().merge(pending);
                 if (parent != null) parents().add(parent, true);
                 if (parent != null) parents().add(parent, true);
-                if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
-                Phase.this.hash.put(code(state, start), this);
+                if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
+                Phase.this.hash.put(state, start, this);
                 Phase.this.numNodes++;
                 Phase.this.numNodes++;
-                if (parent==null) holder().valid = true; // hack to make sure that the "base" node is always considered valid
             }
         }
 
             }
         }
 
@@ -232,9 +420,4 @@ class GSS {
         if (a==null || b==null) return false;
         return a.equals(b);
     }
         if (a==null || b==null) return false;
         return a.equals(b);
     }
-
-    /** this is something of a hack right now */
-    private static long code(Parser.Table.State state, Phase start) {
-        return (((long)state.idx) << 32) | (start==null ? 0 : (start.pos+1));
-    }
 }
 }