checkpoint harmony
[sbp.git] / src / edu / berkeley / sbp / GSS.java
1 package edu.berkeley.sbp;
2 import edu.berkeley.sbp.*;
3 import edu.berkeley.sbp.util.*;
4 import edu.berkeley.sbp.Parser.Table.*;
5 import edu.berkeley.sbp.Sequence.Position;
6 import java.io.*;
7 import java.util.*;
8 import java.lang.reflect.*;
9
10 /** implements Tomita's Graph Structured Stack */
11 class GSS {
12
13     public GSS() { }
14
15     private Phase.Node[] reducing_list = null;
16     public int resets = 0;
17     public int waits = 0;
18
19     HashMapBag<Integer,Sequence>       inhibited = new HashMapBag<Integer,Sequence>();
20     HashMapBag<Sequence,Phase.Waiting> waiting   = new HashMapBag<Sequence,Phase.Waiting>();
21     HashMapBag<Integer,Sequence>       performed = new HashMapBag<Integer,Sequence>();
22     
23     /** FIXME */
24     public  Forest.Ref finalResult;
25
26     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
27     public class Phase<Tok> implements Invokable<State, Forest, Phase<Tok>.Node>, IntegerMappable {
28
29         public void invoke(State st, Forest result, Node n) {
30             good |= next.newNode(n, result, st, false);
31         }
32
33         /** the token immediately after this phase */
34         final Tok token;
35
36         private final int pos;
37
38         boolean reducing;
39         private IntPairMap<Phase.Node> hash;  /* ALLOC */
40         private boolean closed;
41         private boolean good;
42         private Phase next = null;
43         private Phase prev;
44         private Token.Location location;
45         public final Parser parser;
46
47         private Forest forest;
48
49         public Phase(Phase prev, Parser parser, Phase previous, Tok token, Token.Location location, Forest forest) {
50             this.prev = prev;
51             this.forest = forest;
52             this.parser = parser;
53             this.pos = previous==null ? 0 : previous.pos+1;
54             this.token = token;
55             this.location = location;
56             inhibited.clear();
57             reset();
58         }
59
60         public void reset() {
61             waiting.clear();
62             performed.clear();
63             hash = new IntPairMap<Phase.Node>();
64             good = false;
65             closed = false;
66             reducing = false;
67             finalResult = null;
68             if (prev != null) prev.shift(this, forest);
69         }
70
71       
72         public boolean isDone() throws ParseFailed {
73             if (token != null) return false;
74             if (token==null && finalResult==null)
75                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()), getLocation());
76             return true;
77         }
78
79         public Token.Location getLocation() { return location; }
80
81         /** add a new node (merging with existing nodes if possible)
82          *  @param parent             the parent of the new node
83          *  @param result             the SPPF result corresponding to the new node
84          *  @param state              the state that the new node is in
85          *  @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
86          *  @param start              the earliest part of the input contributing to this node (used to make merging decisions)
87          */
88         public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
89             Node p = hash.get(state, parent==null?null:parent.phase());
90             if (p != null)  return newNode2(p, parent, pending, state, fromEmptyReduction);
91             else            return newNode3(parent, pending, state, fromEmptyReduction);
92         }
93         public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
94             int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
95             Sequence owner = reduction==null ? null : reduction.owner();
96             if (reduction!=null) {
97                 if (inhibited.contains(pos, owner)) return;
98                 if (owner.needs != null)
99                     for(Sequence s : owner.needs)
100                         if (!performed.contains(pos, s)) {
101                             waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
102                             return;
103                         }
104                 if ((owner.needed != null && owner.needed.size()>0) ||
105                     (owner.hated != null && owner.hated.size()>0) ||
106                     (owner.hates != null && owner.hates.size()>0))
107                     performed.add(pos, owner);
108             }
109             if (!owner.lame)
110                 newNode(parent, pending, state, fromEmptyReduction);
111             if (reduction!=null) inhibit(reduction, parent==null?0:parent.phase().pos);
112             if (reduction != null) {
113                 boolean redo = true;
114                 while(redo) {
115                     redo = false;
116                     for(Waiting w : waiting.getAll(owner)) {
117                         if (w.parent==parent || (parent!=null&&w.parent!=null&&w.parent.phase()==parent.phase())) {
118                             waiting.remove(owner, w);
119                             w.perform();
120                             redo = true;
121                             break;
122                         }
123                     }
124                 }
125             }
126         }
127         private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
128             p.holder.merge(pending);
129             if (p.parents().contains(parent)) return true;
130             p.parents().add(parent, true);
131             if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent);
132             return true;
133         }
134         private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
135             do {
136                 if (token != null && state.canShift(token)) break;
137                 if (state.isAccepting()) break;
138                 if (token==null) break;
139                 if (!state.canReduce(token)) return false;
140                 //if (count > 1) break;
141                 //if (r.numPop == 0) break;
142                 //r.reduce(pending, parent, null, Phase.this, null);
143                 //return;
144             } while(false);
145
146             Node n = new Node(parent, pending, state);  // ALLOC
147             if (reducing) {
148                 n.performEmptyReductions();
149                 if (!fromEmptyReduction) n.performReductions(parent);
150             }
151             return true;
152         }
153
154         public void uninhibit(int p, Sequence s) {
155             if (s.hated!=null)
156                 for(Sequence s2 : s.hated)
157                     inhibited.remove(p, s2);
158         }
159
160         public void inhibit(Position r, int p) {
161             if (r.owner().hated == null) return;
162             // remember that dead states are still allowed to shift -- just not allowed to reduce
163             boolean reset = false;
164             for(Sequence seq : r.owner().hated) {
165                 if (performed.contains(p,seq)) {
166                     uninhibit(p, seq);
167                     //System.out.println("\nresetting due to " + r.owner() + " killing " + seq);
168                     //inhibited.clear();
169                     inhibited.add(p, seq);
170                     //inhibited = new HashMapBag<Integer,Sequence>();
171                     reset = true;
172                     resets++;
173                     throw new Reset();
174                 }
175                 inhibited.add(p, seq);
176             }
177         }
178         
179         /** perform all reduction operations */
180         public void reduce() {
181             try {
182                 reducing = true;
183                 if (reducing_list==null || reducing_list.length < hash.size())
184                     reducing_list = new Phase.Node[hash.size() * 4];
185                 hash.toArray(reducing_list);
186                 int num = hash.size();
187                 for(int i=0; i<num; i++) {
188                     Node n = reducing_list[i];
189                     n.performEmptyReductions();
190                     // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
191                 }
192                 for(int i=0; i<num; i++) {
193                     Node n = reducing_list[i];
194                     reducing_list[i] = null;
195                     n.performReductions();
196                 }
197             } catch (Reset r) {
198                 reset();
199                 reduce();
200             }
201         }
202
203         class Reset extends RuntimeException { }
204
205         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
206         public void shift(Phase next, Forest result) throws ParseFailed {
207             // this massively improves GC performance
208             if (prev!=null) prev.hash = null;
209             this.next = next;
210             closed = true;
211             Forest res = null;
212             boolean ok = false;
213             for(Phase.Node n : hash.values()) {
214                 if (token == null && n.state.isAccepting()) {
215                     if (finalResult==null) finalResult = new Forest.Ref();
216                     finalResult.merge(n.holder);
217                 }
218                 if (token == null) continue;
219                 n.state.invokeShifts(token, this, result, n);
220             }
221
222             if (!good && token!=null)
223                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected character")+" "+ANSI.purple(token)+" encountered at "+ANSI.green(getLocation())+"\n", token, hash.values()),
224                                         getLocation());
225             if (token==null && finalResult==null)
226                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()),
227                                         getLocation());
228         }
229
230
231         public class Waiting {
232             Node parent;
233             Forest pending;
234             State state;
235             boolean fromEmptyReduction;
236             Position reduction;
237             public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
238                 waits++;
239                 this.parent = parent;
240                 this.pending = pending;
241                 this.state = state;
242                 this.fromEmptyReduction = fromEmptyReduction;
243                 this.reduction = reduction;
244             }
245             public void perform() {
246                 //System.out.println("performing: " + reduction.position);
247                 newNode(parent, pending, state, fromEmptyReduction, reduction);
248             }
249         }
250        
251         // Node /////////////////////////////////////////////////////////////////////////////////
252
253         /** a node in the GSS */
254         public final class Node extends FastSet<Node> implements Invokable<Position, Node, Node> {
255
256             private Forest.Ref holder = null;
257             private boolean allqueued = false;
258
259             /** what state this node is in */
260             public final Parser.Table<Tok>.State<Tok> state;
261
262             /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
263             public  Phase phase() { return Phase.this; }
264             public  Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
265             public  Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
266             public  FastSet<Node> parents() { return this; }
267
268             public void performReductions() {
269                 if (allqueued) return;
270                 allqueued = true;
271                 state.invokeReductions(token, this, this, null);
272             }
273
274             public void performReductions(Node n2) {
275                 if (!allqueued) performReductions();
276                 else            state.invokeReductions(token, this, this, n2);
277             }
278
279             public final void invoke(Position r, Node n, Node n2) {
280                 if (n==null || n2==null || r.pos==0) {
281                     if (r.pos==0) {
282                         if (n==null) n = this;
283                         else return;
284                     }
285                     if (n==null) return;
286                     Forest[] holder = new Forest[r.pos];
287                     if (r.pos==0) n.finish(r, r.zero(), n.phase(), holder);
288                     else                   n.reduce(r, r.pos-1, n.phase(), holder);
289                 } else {
290                     Forest[] holder = new Forest[r.pos];
291                     if (r.pos<=0) throw new Error("called wrong form of reduce()");
292                     int pos = r.pos-1;
293                     Forest old = holder[pos];
294                     holder[pos] = n.pending();
295                     if (pos==0) {
296                         System.arraycopy(holder, 0, r.holder, 0, holder.length);
297                         Forest rex = r.rewrite(n.phase().getLocation());
298                         n2.finish(r, rex, n.phase(), holder);
299                     } else {
300                         n2.reduce(r, pos-1, n.phase(), holder);
301                     }
302                     holder[pos] = old;
303                 }
304             }
305
306             public void reduce(Position r, int pos, Phase target, Forest[] holder) {
307                 Forest old = holder[pos];
308                 holder[pos] = this.pending();
309                 if (pos==0) {
310                     System.arraycopy(holder, 0, r.holder, 0, holder.length);
311                     for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
312                     Forest rex = r.rewrite(target.getLocation());
313                     for(Node child : this.parents()) child.finish(r, rex, target, holder);
314                 } else {
315                     for(Node child : this.parents()) child.reduce(r, pos-1, target, holder);
316                 }
317                 holder[pos] = old;
318             }
319
320             public void finish(Position r, Forest result, Phase<Tok> target, Forest[] holder) {
321                 Parser.Table<Tok>.State<Tok> state0 = state.gotoSetNonTerminals.get(r.owner());
322                 if (result==null) throw new Error();
323                 if (state0!=null)
324                     target.newNode(this, result, state0, r.pos<=0, r);
325             }
326
327             public void performEmptyReductions() { state.invokeReductions(token, this, null, null); }
328
329             private Node(Node parent, Forest pending, State state) {
330                 this.state = state;
331                 this.holder().merge(pending);
332                 Phase start = parent==null ? null : parent.phase();
333                 if (parent != null) parents().add(parent, true);
334                 if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
335                 Phase.this.hash.put(state, start, this);
336             }
337         }
338
339         public int toInt() { return pos+1; }
340         public int size() { return hash==null ? 0 : hash.size(); }
341     }
342
343 }