8124590dc465f326e7e8e90c33d92c7fed4c4752
[sbp.git] / src / edu / berkeley / sbp / GSS.java
1 package edu.berkeley.sbp;
2 import edu.berkeley.sbp.*;
3 import edu.berkeley.sbp.util.*;
4 import edu.berkeley.sbp.Parser.Table.*;
5 import edu.berkeley.sbp.Sequence.Position;
6 import java.io.*;
7 import java.util.*;
8 import java.lang.reflect.*;
9
10 /** implements Tomita's Graph Structured Stack */
11 class GSS {
12
13     public GSS() { }
14
15     private Phase.Node[] reducing_list = null;
16     public int resets = 0;
17     public int waits = 0;
18
19     HashMapBag<Integer,Sequence>       inhibited       = new HashMapBag<Integer,Sequence>();
20     HashMapBag<Integer,Sequence>       expectedInhibit = new HashMapBag<Integer,Sequence>();
21     HashMapBag<Sequence,Phase.Waiting> waiting         = new HashMapBag<Sequence,Phase.Waiting>();
22     HashMapBag<Integer,Sequence>       performed       = new HashMapBag<Integer,Sequence>();
23     
24     /** FIXME */
25     public  Forest.Ref finalResult;
26
27     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
28     public class Phase<Tok> implements Invokable<State, Forest, Phase<Tok>.Node>, IntegerMappable {
29
30         public void invoke(State st, Forest result, Node n) {
31             good |= next.newNode(n, result, st, false);
32         }
33
34         /** the token immediately after this phase */
35         final Tok token;
36
37         private final int pos;
38
39         boolean reducing;
40         private IntPairMap<Phase.Node> hash;  /* ALLOC */
41         private IntPairMap<Forest> singularReductions;  /* ALLOC */
42         private boolean closed;
43         private boolean good;
44         private Phase next = null;
45         private Phase prev;
46         private Input.Location location;
47         public final Parser parser;
48
49         private Forest forest;
50
51         public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location, Forest forest) {
52             this.prev = prev;
53             this.forest = forest;
54             this.parser = parser;
55             this.pos = previous==null ? 0 : previous.pos+1;
56             this.token = token;
57             this.location = location;
58             inhibited.clear();
59             reset();
60         }
61
62         public void reset() {
63             waiting.clear();
64             performed.clear();
65             hash = new IntPairMap<Phase.Node>();
66             singularReductions = new IntPairMap<Forest>();
67             expectedInhibit.clear();
68             expectedInhibit.addAll(inhibited);
69             good = false;
70             closed = false;
71             reducing = false;
72             finalResult = null;
73             if (prev != null) prev.shift(this, forest);
74         }
75
76       
77         public boolean isDone() throws ParseFailed {
78             if (token != null) return false;
79             if (token==null && finalResult==null)
80                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()), getLocation());
81             return true;
82         }
83
84         public Input.Location getLocation() { return location; }
85
86         /** add a new node (merging with existing nodes if possible)
87          *  @param parent             the parent of the new node
88          *  @param result             the SPPF result corresponding to the new node
89          *  @param state              the state that the new node is in
90          *  @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
91          *  @param start              the earliest part of the input contributing to this node (used to make merging decisions)
92          */
93         public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
94             Node p = hash.get(state, parent==null?null:parent.phase());
95             if (p != null)  return newNode2(p, parent, pending, state, fromEmptyReduction);
96             else            return newNode3(parent, pending, state, fromEmptyReduction);
97         }
98         public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
99             int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
100             Sequence owner = reduction==null ? null : reduction.owner();
101             if (reduction!=null) {
102                 if (inhibited.contains(pos, owner)) return;
103                 if (owner.needs != null)
104                     for(Sequence s : owner.needs)
105                         if (!performed.contains(pos, s)) {
106                             waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
107                             return;
108                         }
109                 if ((owner.needed != null && owner.needed.size()>0) ||
110                     (owner.hated != null && owner.hated.size()>0) ||
111                     (owner.hates != null && owner.hates.size()>0))
112                     performed.add(pos, owner);
113             }
114             if (!owner.lame)
115                 newNode(parent, pending, state, fromEmptyReduction);
116             if (reduction!=null) inhibit(reduction, parent==null?0:parent.phase().pos);
117             if (reduction != null) {
118                 boolean redo = true;
119                 while(redo) {
120                     redo = false;
121                     for(Waiting w : waiting.getAll(owner)) {
122                         if (w.parent==parent || (parent!=null&&w.parent!=null&&w.parent.phase()==parent.phase())) {
123                             waiting.remove(owner, w);
124                             w.perform();
125                             redo = true;
126                             break;
127                         }
128                     }
129                 }
130             }
131         }
132         private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
133             p.holder.merge(pending);
134             if (p.parents().contains(parent)) return true;
135             p.parents().add(parent, true);
136             if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent);
137             return true;
138         }
139         private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
140             do {
141                 if (token != null && state.canShift(token)) break;
142                 if (state.isAccepting()) break;
143                 if (token==null) break;
144                 if (!state.canReduce(token)) return false;
145                 //if (count > 1) break;
146                 //if (r.numPop == 0) break;
147                 //r.reduce(pending, parent, null, Phase.this, null);
148                 //return;
149             } while(false);
150
151             Node n = new Node(parent, pending, state);  // ALLOC
152             if (reducing) {
153                 n.performEmptyReductions();
154                 if (!fromEmptyReduction) n.performReductions(parent);
155             }
156             return true;
157         }
158
159         public void uninhibit(int p, Sequence s) {
160             if (s.hated!=null)
161                 for(Sequence s2 : s.hated)
162                     inhibited.remove(p, s2);
163         }
164
165         public void inhibit(Position r, int p) {
166             if (r.owner().hated == null) return;
167             // remember that dead states are still allowed to shift -- just not allowed to reduce
168             boolean reset = false;
169             for(Sequence seq : r.owner().hated) {
170                 if (performed.contains(p,seq)) {
171                     uninhibit(p, seq);
172                     //System.out.println("\nresetting due to " + r.owner() + " killing " + seq);
173                     //inhibited.clear();
174                     inhibited.add(p, seq);
175                     //inhibited = new HashMapBag<Integer,Sequence>();
176                     reset = true;
177                     resets++;
178                     throw new Reset();
179                 }
180                 inhibited.add(p, seq);
181                 expectedInhibit.remove(p, seq);
182             }
183         }
184         
185         /** perform all reduction operations */
186         public void reduce() {
187             try {
188                 reducing = true;
189                 if (reducing_list==null || reducing_list.length < hash.size())
190                     reducing_list = new Phase.Node[hash.size() * 4];
191                 hash.toArray(reducing_list);
192                 int num = hash.size();
193                 for(int i=0; i<num; i++) {
194                     Node n = reducing_list[i];
195                     n.performEmptyReductions();
196                     // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
197                 }
198                 for(int i=0; i<num; i++) {
199                     Node n = reducing_list[i];
200                     reducing_list[i] = null;
201                     n.performReductions();
202                 }
203                 if (expectedInhibit.size() > 0) {
204                     inhibited.removeAll(expectedInhibit);
205                     System.out.println("\n!!!!\n");
206                     throw new Reset();
207                 }
208             } catch (Reset r) {
209                 reset();
210                 reduce();
211             }
212         }
213
214         class Reset extends RuntimeException { }
215
216         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
217         public void shift(Phase next, Forest result) throws ParseFailed {
218             // this massively improves GC performance
219             if (prev!=null) {
220                 prev.hash = null;
221                 prev.singularReductions = null;
222             }
223             this.next = next;
224             closed = true;
225             Forest res = null;
226             boolean ok = false;
227             for(Phase.Node n : hash.values()) {
228                 if (token == null && n.state.isAccepting()) {
229                     if (finalResult==null) finalResult = new Forest.Ref();
230                     finalResult.merge(n.holder);
231                 }
232                 if (token == null) continue;
233                 n.state.invokeShifts(token, this, result, n);
234             }
235
236             if (!good && token!=null)
237                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected character ")+" \'"+
238                                                         ANSI.purple(StringUtil.escapify(token+"", "\\\'\r\n"))+
239                                                         "\' encountered at "+
240                                                         ANSI.green(getLocation())+"\n", token, hash.values()),
241                                         getLocation());
242             if (token==null && finalResult==null)
243                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()),
244                                         getLocation());
245         }
246
247
248         public class Waiting {
249             Node parent;
250             Forest pending;
251             State state;
252             boolean fromEmptyReduction;
253             Position reduction;
254             public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
255                 waits++;
256                 this.parent = parent;
257                 this.pending = pending;
258                 this.state = state;
259                 this.fromEmptyReduction = fromEmptyReduction;
260                 this.reduction = reduction;
261             }
262             public void perform() {
263                 //System.out.println("performing: " + reduction.position);
264                 newNode(parent, pending, state, fromEmptyReduction, reduction);
265             }
266         }
267        
268         // Node /////////////////////////////////////////////////////////////////////////////////
269
270         /** a node in the GSS */
271         public final class Node extends FastSet<Node> implements Invokable<Position, Node, Node>, IntegerMappable {
272
273             private Forest.Ref holder = null;
274             private boolean allqueued = false;
275
276             /** what state this node is in */
277             public final Parser.Table<Tok>.State<Tok> state;
278
279             /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
280             public  Phase phase() { return Phase.this; }
281             public  Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
282             public  Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
283             public  FastSet<Node> parents() { return this; }
284
285             public void performReductions() {
286                 if (allqueued) return;
287                 allqueued = true;
288                 state.invokeReductions(token, this, this, null);
289             }
290
291             public void performReductions(Node n2) {
292                 if (!allqueued) performReductions();
293                 else            state.invokeReductions(token, this, this, n2);
294             }
295
296             public final void invoke(Position r, Node n, Node n2) {
297                 if (n==null || n2==null || r.pos==0) {
298                     if (r.pos==0) {
299                         if (n==null) n = this;
300                         else return;
301                     }
302                     if (n==null) return;
303                     Forest[] holder = new Forest[r.pos];
304                     if (r.pos==0) n.finish(r, r.zero(), n.phase(), holder);
305                     else                   n.reduce(r, r.pos-1, n.phase(), holder);
306                 } else {
307                     Forest[] holder = new Forest[r.pos];
308                     if (r.pos<=0) throw new Error("called wrong form of reduce()");
309                     int pos = r.pos-1;
310                     Forest old = holder[pos];
311                     holder[pos] = n.pending();
312                     if (pos==0) {
313                         System.arraycopy(holder, 0, r.holder, 0, holder.length);
314                         Forest rex = null;
315                         if (r.pos==1)  rex = singularReductions.get(this, r);
316                         if (rex==null) {
317                             rex = r.rewrite(n.phase().getLocation());
318                             if (r.pos==1) singularReductions.put(this, r, rex);
319                         }
320                         n2.finish(r, rex, n.phase(), holder);
321                     } else {
322                         n2.reduce(r, pos-1, n.phase(), holder);
323                     }
324                     holder[pos] = old;
325                 }
326             }
327
328             public void reduce(Position r, int pos, Phase target, Forest[] holder) {
329                 Forest old = holder[pos];
330                 holder[pos] = this.pending();
331                 if (pos==0) {
332                     System.arraycopy(holder, 0, r.holder, 0, holder.length);
333                     for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
334                     Forest rex = null;
335                     if (r.pos==1)  rex = singularReductions.get(this, r);
336                     if (rex==null) {
337                         rex = r.rewrite(phase().getLocation());
338                         if (r.pos==1) singularReductions.put(this, r, rex);
339                     }
340                     for(Node child : this.parents()) child.finish(r, rex, target, holder);
341                 } else {
342                     for(Node child : this.parents()) child.reduce(r, pos-1, target, holder);
343                 }
344                 holder[pos] = old;
345             }
346
347             public void finish(Position r, Forest result, Phase<Tok> target, Forest[] holder) {
348                 Parser.Table<Tok>.State<Tok> state0 = state.gotoSetNonTerminals.get(r.owner());
349                 if (result==null) throw new Error();
350                 if (state0!=null)
351                     target.newNode(this, result, state0, r.pos<=0, r);
352             }
353
354             public void performEmptyReductions() { state.invokeReductions(token, this, null, null); }
355
356             private Node(Node parent, Forest pending, State state) {
357                 this.state = state;
358                 this.holder().merge(pending);
359                 Phase start = parent==null ? null : parent.phase();
360                 if (parent != null) parents().add(parent, true);
361                 if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
362                 Phase.this.hash.put(state, start, this);
363             }
364             public int toInt() { return idx; }
365             private final int idx = node_idx++;
366         }
367         private int node_idx = 0;
368
369         public int toInt() { return pos+1; }
370         public int size() { return hash==null ? 0 : hash.size(); }
371     }
372
373 }