743cfd381622afc430958b743fe5ac622ccb203a
[sbp.git] / src / edu / berkeley / sbp / GSS.java
1 package edu.berkeley.sbp;
2 import edu.berkeley.sbp.*;
3 import edu.berkeley.sbp.util.*;
4 import edu.berkeley.sbp.Parser.Table.*;
5 import edu.berkeley.sbp.Sequence.Position;
6 import java.io.*;
7 import java.util.*;
8 import java.lang.reflect.*;
9
10 /** implements Tomita's Graph Structured Stack */
11 class GSS {
12
13     public static int count = 0;
14     public GSS() { }
15
16     private Phase.Node[] reducing_list = null;
17     public int resets = 0;
18     public int waits = 0;
19
20     HashMapBag<Sequence,Phase.Waiting> waiting         = new HashMapBag<Sequence,Phase.Waiting>();
21     HashMapBag<Integer,Sequence>       performed       = new HashMapBag<Integer,Sequence>();
22     HashMapBag<Integer,Sequence>       lastperformed   = new HashMapBag<Integer,Sequence>();
23     HashMapBag<Integer,Sequence>       expected        = new HashMapBag<Integer,Sequence>();
24     
25     /** FIXME */
26     public  Forest.Ref finalResult;
27
28     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
29     class Phase<Tok> implements Invokable<State, Forest, Phase<Tok>.Node>, IntegerMappable {
30
31         public void invoke(State st, Forest result, Node n) {
32             good |= next.newNode(n, result, st, false);
33         }
34
35         /** the token immediately after this phase */
36         final Tok token;
37
38         private final int pos;
39
40         boolean reducing;
41         private IntPairMap<Phase.Node> hash;  /* ALLOC */
42         private IntPairMap<Forest> singularReductions;  /* ALLOC */
43         private boolean closed;
44         private boolean good;
45         private Phase next = null;
46         private Phase prev;
47         private Input.Location location;
48         public final Parser parser;
49
50         private Forest forest;
51
52         public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location, Forest forest) throws ParseFailed {
53             this.prev = prev;
54             this.forest = forest;
55             this.parser = parser;
56             this.pos = previous==null ? 0 : previous.pos+1;
57             this.token = token;
58             this.location = location;
59             performed.clear();
60             reset();
61         }
62
63         public void reset() throws ParseFailed {
64             waiting.clear();
65             expected.clear();
66             lastperformed.clear();
67             lastperformed.addAll(performed);
68             performed.clear();
69             hash = new IntPairMap<Phase.Node>();
70             singularReductions = new IntPairMap<Forest>();
71             reset = false;
72             good = false;
73             closed = false;
74             reducing = false;
75             finalResult = null;
76             if (prev != null) prev.shift(this, forest);
77         }
78
79       
80         public boolean isDone() throws ParseFailed {
81             if (token != null) return false;
82             if (token==null && finalResult==null)
83                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()), getLocation());
84             return true;
85         }
86
87         public Input.Location getLocation() { return location; }
88
89         /** add a new node (merging with existing nodes if possible)
90          *  @param parent             the parent of the new node
91          *  @param result             the SPPF result corresponding to the new node
92          *  @param state              the state that the new node is in
93          *  @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
94          *  @param start              the earliest part of the input contributing to this node (used to make merging decisions)
95          */
96         public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
97             Node p = hash.get(state, parent==null?null:parent.phase());
98             if (p != null)  return newNode2(p, parent, pending, state, fromEmptyReduction);
99             else            return newNode3(parent, pending, state, fromEmptyReduction);
100         }
101         public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
102             int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
103             Sequence owner = reduction==null ? null : reduction.owner();
104             if (reduction!=null) {
105                 if (owner.hates!=null) {
106                     for (Sequence s : performed.getAll(pos))
107                         if (owner.hates.contains(s))
108                             return;
109                     for (Sequence s : lastperformed.getAll(pos))
110                         if (owner.hates.contains(s)) {
111                             //System.out.println("now expecting ["+pos+"] => " + s);
112                             expected.add(pos, s);
113                             return;
114                         }
115                 }
116                 if (owner.needs != null)
117                     for(Sequence s : owner.needs)
118                         if (!performed.contains(pos, s)) {
119                             waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
120                             return;
121                         }
122                 if (!performed.contains(pos, owner)) {
123                     performed.add(pos, owner);
124                     if (owner.hated != null)
125                         for(Sequence seq : owner.hated)
126                             if (performed.contains(pos, seq)) {
127                                 performed.remove(pos, seq);
128                                 reset = true;
129                             }
130                 }
131             }
132             if (!owner.lame)
133                 newNode(parent, pending, state, fromEmptyReduction);
134             if (reduction != null) {
135                 boolean redo = true;
136                 while(redo) {
137                     redo = false;
138                     for(Waiting w : waiting.getAll(owner)) {
139                         if (w.parent==parent || (parent!=null&&w.parent!=null&&w.parent.phase()==parent.phase())) {
140                             waiting.remove(owner, w);
141                             w.perform();
142                             redo = true;
143                             break;
144                         }
145                     }
146                 }
147             }
148         }
149         private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
150             p.holder.merge(pending);
151             if (p.parents().contains(parent)) return true;
152             p.parents().add(parent, true);
153             if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent);
154             return true;
155         }
156         private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
157             do {
158                 if (token != null && state.canShift(token)) break;
159                 if (state.isAccepting()) break;
160                 if (token==null) break;
161                 if (!state.canReduce(token)) return false;
162                 //if (count > 1) break;
163                 //if (r.numPop == 0) break;
164                 //r.reduce(pending, parent, null, Phase.this, null);
165                 //return;
166             } while(false);
167
168             Node n = new Node(parent, pending, state);  // ALLOC
169             if (reducing) {
170                 n.performEmptyReductions();
171                 if (!fromEmptyReduction) n.performReductions(parent);
172             }
173             return true;
174         }
175
176         /** perform all reduction operations */
177         public void reduce() throws ParseFailed {
178             try {
179                 reducing = true;
180                 if (reducing_list==null || reducing_list.length < hash.size())
181                     reducing_list = new Phase.Node[hash.size() * 4];
182                 hash.toArray(reducing_list);
183                 int num = hash.size();
184                 for(int i=0; i<num; i++) {
185                     Node n = reducing_list[i];
186                     n.performEmptyReductions();
187                     // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
188                 }
189                 for(int i=0; i<num; i++) {
190                     Node n = reducing_list[i];
191                     reducing_list[i] = null;
192                     n.performReductions();
193                 }
194                 if (reset) {
195                     reset = false;
196                     resets++;
197                     throw new Reset();
198                 }                
199                 for(int i : expected)
200                     for(Sequence s : expected.getAll(i))
201                         if (!performed.contains(i, s)) {
202                             //System.out.println("resetting due to pos="+i+": " + s + " " + System.identityHashCode(s));
203                             resets++;
204                             throw new Reset();
205                         }
206             } catch (Reset r) {
207                 reset();
208                 reduce();
209             }
210             count = 0;
211         }
212
213         private boolean reset = false;
214         class Reset extends RuntimeException { }
215
216         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
217         public void shift(Phase next, Forest result) throws ParseFailed {
218             // this massively improves GC performance
219             if (prev!=null) {
220                 prev.hash = null;
221                 prev.singularReductions = null;
222             }
223             this.next = next;
224             closed = true;
225             Forest res = null;
226             boolean ok = false;
227             for(Phase.Node n : hash.values()) {
228                 if (token == null && n.state.isAccepting()) {
229                     if (finalResult==null) finalResult = new Forest.Ref();
230                     finalResult.merge(n.holder);
231                 }
232                 if (token == null) continue;
233                 n.state.invokeShifts(token, this, result, n);
234             }
235
236             if (!good && token!=null)
237                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected character ")+" \'"+
238                                                         ANSI.purple(StringUtil.escapify(token+"", "\\\'\r\n"))+
239                                                         "\' encountered at "+
240                                                         ANSI.green(getLocation())+"\n", token, hash.values()),
241                                         getLocation());
242             if (token==null && finalResult==null)
243                 throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()),
244                                         getLocation());
245         }
246
247
248         class Waiting {
249             Node parent;
250             Forest pending;
251             State state;
252             boolean fromEmptyReduction;
253             Position reduction;
254             public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
255                 waits++;
256                 this.parent = parent;
257                 this.pending = pending;
258                 this.state = state;
259                 this.fromEmptyReduction = fromEmptyReduction;
260                 this.reduction = reduction;
261             }
262             public void perform() {
263                 //System.out.println("performing: " + reduction.position);
264                 newNode(parent, pending, state, fromEmptyReduction, reduction);
265             }
266         }
267        
268         // Node /////////////////////////////////////////////////////////////////////////////////
269
270         /** a node in the GSS */
271         final class Node extends FastSet<Node> implements Invokable<Position, Node, Node>, IntegerMappable {
272
273             private Forest.Ref holder = null;
274             private boolean allqueued = false;
275
276             /** what state this node is in */
277             public final Parser.Table<Tok>.State<Tok> state;
278
279             /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
280             public  Phase phase() { return Phase.this; }
281             public  Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
282             public  Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
283             public  FastSet<Node> parents() { return this; }
284
285             public void performReductions() {
286                 if (allqueued) return;
287                 allqueued = true;
288                 state.invokeReductions(token, this, this, null);
289             }
290
291             public void performReductions(Node n2) {
292                 if (!allqueued) performReductions();
293                 else            state.invokeReductions(token, this, this, n2);
294             }
295
296             public void performEmptyReductions() { state.invokeReductions(token, this, null, null); }
297             public final void invoke(Position r, Node n, Node n2) {
298                 if (n==null || n2==null || r.pos==0) {
299                     if (r.pos==0) {
300                         if (n==null) n = this;
301                         else return;
302                     }
303                     if (n==null) return;
304                     Forest[] holder = new Forest[r.pos];
305                     if (r.pos==0) n.finish(r, r.zero(), n.phase(), holder);
306                     else                   n.reduce(r, r.pos-1, n.phase(), holder);
307                 } else {
308                     Forest[] holder = new Forest[r.pos];
309                     if (r.pos<=0) throw new Error("called wrong form of reduce()");
310                     int pos = r.pos-1;
311                     n.reduce(r, pos, n.phase(), holder, n2);
312                 }
313             }
314
315             public void reduce(Position r, int pos, Phase target, Forest[] holder) { reduce(r, pos, target, holder, null); }
316             public void reduce(Position r, int pos, Phase target, Forest[] holder, Node only) {
317                 Forest old = holder[pos];
318                 holder[pos] = this.pending();
319                 if (pos==0) {
320                     System.arraycopy(holder, 0, r.holder, 0, holder.length);
321                     for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
322                     Forest rex = null;
323
324                     // FIXME: I'm unsure about this -- basically we want to deal with the case where
325                     //        there are two nodes, each of whose Ref points to the same Forest instance.
326                     //        Some node in the next phase has both of these as parents.  This might happen
327                     //        since the same reduction can appear in more than one state.
328                     if (r.pos==1)  rex = singularReductions.get(this.pending(), r);
329                     if (rex==null) {
330                         rex = r.rewrite(phase().getLocation());
331                         if (r.pos==1) singularReductions.put(this.pending(), r, rex);
332                     }
333                     if (only != null)  only.finish(r, rex, target, holder);
334                     else               for(Node child : this.parents()) child.finish(r, rex, target, holder);
335                 } else {
336                     if (only != null)  only.reduce(r, pos-1, target, holder);
337                     else               for(Node child : this.parents()) child.reduce(r, pos-1, target, holder);
338                 }
339                 holder[pos] = old;
340             }
341
342             public void finish(Position r, Forest result, Phase<Tok> target, Forest[] holder) {
343                 Parser.Table<Tok>.State<Tok> state0 = state.gotoSetNonTerminals.get(r.owner());
344                 if (result==null) throw new Error();
345                 if (state0!=null)
346                     target.newNode(this, result, state0, r.pos<=0, r);
347             }
348
349             private Node(Node parent, Forest pending, State state) {
350                 this.state = state;
351                 this.holder().merge(pending);
352                 Phase start = parent==null ? null : parent.phase();
353                 if (parent != null) parents().add(parent, true);
354                 if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
355                 Phase.this.hash.put(state, start, this);
356             }
357             public int toInt() { return idx; }
358             private final int idx = node_idx++;
359         }
360         private int node_idx = 0;
361
362         public int toInt() { return pos+1; }
363         public int size() { return hash==null ? 0 : hash.size(); }
364     }
365
366 }