break Node out of GSS
[sbp.git] / src / edu / berkeley / sbp / GSS.java
1 // Copyright 2006 all rights reserved; see LICENSE file for BSD-style license
2
3 package edu.berkeley.sbp;
4 import edu.berkeley.sbp.*;
5 import edu.berkeley.sbp.util.*;
6 import edu.berkeley.sbp.Parser.Table.*;
7 import edu.berkeley.sbp.Sequence.Position;
8 import java.io.*;
9 import java.util.*;
10 import java.lang.reflect.*;
11
12 /** implements Tomita's Graph Structured Stack */
13 class GSS {
14
15     public static Queue<Node> removals = new LinkedList<Node>();
16
17     static String note = "";
18     static int single_newnode = 0;
19     static int toplevel_reductions = 0;
20     static int multi_newnode = 0;
21     static int waiting_newnode = 0;
22     static int shifts = 0;
23
24     static int count = 0;
25     static int reductions = 0;
26     int resets = 0;
27     int waits = 0;
28     
29     Input input;
30
31     public GSS(Input input) { this.input = input; }
32
33     private Node[] reducing_list = null;
34
35     // FIXME: right now, these are the performance bottleneck
36     HashMapBag<Sequence,Phase.Waiting> waiting         = new HashMapBag<Sequence,Phase.Waiting>();
37     HashMapBag<Integer,Sequence>       performed       = new HashMapBag<Integer,Sequence>();
38     HashMapBag<Integer,Sequence>       lastperformed   = new HashMapBag<Integer,Sequence>();
39     HashMapBag<Integer,Sequence>       expected        = new HashMapBag<Integer,Sequence>();
40     
41     /** FIXME */
42     Forest.Many finalResult;
43
44     /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
45     class Phase<Tok> implements Invokable<State, Forest, Node>, IntegerMappable, GraphViz.ToGraphViz, Iterable<Node> {
46
47         public int pos() { return pos; }
48         public boolean closed() { return closed; }
49         public Tok token() { return token; }
50
51         public Iterator<Node> iterator() { return hash.iterator(); }
52         public void invoke(State st, Forest result, Node n) {
53             shifts++;
54             good |= next.newNode(n, result, st, false);
55         }
56
57         /** the token immediately after this phase */
58         final Tok token;
59
60         private final int pos;
61
62         boolean reducing;
63         public IntPairMap<Node> hash;  /* ALLOC */
64         private boolean closed;
65         private boolean good;
66         private Phase next = null;
67         private Phase prev;
68         private Input.Location location;
69         private Input.Location nextLocation;
70         private Input.Location prevLocation;
71         
72         public final Parser parser;
73
74         private Forest forest;
75
76         public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location,
77                      Input.Location nextLocation, Forest forest) throws ParseFailed {
78             this.prevLocation = prev==null ? location : prev.getLocation();
79             this.prev = prev;
80             this.forest = forest;
81             this.parser = parser;
82             this.pos = previous==null ? 0 : previous.pos+1;
83             this.token = token;
84             this.location = location;
85             this.nextLocation = nextLocation;
86             performed.clear();
87             reset();
88         }
89
90         public void reset() throws ParseFailed {
91             waiting.clear();
92             expected.clear();
93             lastperformed.clear();
94             lastperformed.addAll(performed);
95             performed.clear();
96             hash = new IntPairMap<Node>();
97             reset = false;
98             good = false;
99             closed = false;
100             reducing = false;
101             finalResult = null;
102             if (prev != null) prev.shift(this, forest);
103         }
104
105       
106         public boolean isDone() throws ParseFailed {
107             if (token != null) return false;
108             if (token==null && finalResult==null)
109                 ParseFailed.error("unexpected end of file",
110                                   getLocation(),
111                                   token,
112                                   hash.values(),
113                                   getLocation().createRegion(getLocation()),
114                                   input,
115                                   GSS.this);
116             return true;
117         }
118
119         public Input.Location getPrevLocation() { return prevLocation; }
120         public Input.Location getLocation() { return location; }
121         public Input.Region   getRegion() { return getPrevLocation().createRegion(getLocation()); }
122         public Input.Location getNextLocation() { return nextLocation; }
123
124         /** add a new node (merging with existing nodes if possible)
125          *  @param parent             the parent of the new node
126          *  @param result             the SPPF result corresponding to the new node
127          *  @param state              the state that the new node is in
128          *  @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
129          *  @param start              the earliest part of the input contributing to this node (used to make merging decisions)
130          */
131         public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
132             Node p = hash.get(state, parent==null?null:parent.phase());
133             if (p != null)  return newNode2(p, parent, pending, state, fromEmptyReduction);
134             else            return newNode3(parent, pending, state, fromEmptyReduction);
135         }
136         public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
137             int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
138             Sequence owner = reduction==null ? null : reduction.owner();
139             if (reduction!=null) {
140                 if (owner.hates!=null) {
141                     for (Sequence s : performed.getAll(pos))
142                         if (owner.hates.contains(s))
143                             return;
144                     for (Sequence s : lastperformed.getAll(pos))
145                         if (owner.hates.contains(s)) {
146                             //System.out.println("now expecting ["+pos+"] => " + s);
147                             expected.add(pos, s);
148                             return;
149                         }
150                 }
151                 if (owner.needs != null)
152                     for(Sequence s : owner.needs)
153                         if (!performed.contains(pos, s)) {
154                             waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
155                             return;
156                         }
157                 if (!performed.contains(pos, owner)) {
158                     performed.add(pos, owner);
159                     if (owner.hated != null)
160                         for(Sequence seq : owner.hated)
161                             if (performed.contains(pos, seq)) {
162                                 performed.remove(pos, seq);
163                                 reset = true;
164                             }
165                 }
166             }
167             newNode(parent, pending, state, fromEmptyReduction);
168             if (reduction != null) {
169                 boolean redo = true;
170                 while(redo) {
171                     redo = false;
172                     for(Waiting w : waiting.getAll(owner)) {
173                         if (w.parent==parent || (parent!=null&&w.parent!=null&&w.parent.phase()==parent.phase())) {
174                             waiting.remove(owner, w);
175                             w.perform();
176                             redo = true;
177                             break;
178                         }
179                     }
180                 }
181             }
182         }
183
184         private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
185             if (p.merge(parent, pending)) return true;
186             p.addParent(parent, true);
187             if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent);
188             return true;
189         }
190
191         private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
192             do {
193                 if (token != null && state.canShift(token)) break;
194                 if (state.isAccepting()) break;
195                 if (token==null) break;
196                 if (!state.canReduce(token)) return false;
197                 //if (count > 1) break;
198                 //if (r.numPop == 0) break;
199                 //r.reduce(pending, parent, null, Phase.this, null);
200                 //return;
201             } while(false);
202
203             Node n = new Node(Phase.this, parent, pending, state);  // ALLOC
204             if (reducing) {
205                 n.performEmptyReductions();
206                 if (!fromEmptyReduction) n.performReductions(parent);
207             }
208             return true;
209         }
210
211         public LinkedList<Node> reductionQueue = new LinkedList<Node>();
212
213         /** perform all reduction operations */
214         public void reduce() throws ParseFailed {
215             try {
216                 reducing = true;
217                 if (reducing_list==null || reducing_list.length < hash.size())
218                     reducing_list = new Node[hash.size() * 4];
219                 hash.toArray(reducing_list);
220                 int num = hash.size();
221                 for(int i=0; i<num; i++) {
222                     Node n = reducing_list[i];
223                     n.performEmptyReductions();
224                     // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
225                 }
226                 for(int i=0; i<num; i++) {
227                     reductionQueue.add(reducing_list[i]);
228                     reducing_list[i] = null;
229                 }
230                 while(!reductionQueue.isEmpty()) {
231                     reductionQueue.remove().performReductions();
232                 }
233                 if (reset) {
234                     reset = false;
235                     resets++;
236                     throw new Reset();
237                 }                
238                 for(int i : expected)
239                     for(Sequence s : expected.getAll(i))
240                         if (!performed.contains(i, s)) {
241                             //System.out.println("resetting due to pos="+i+": " + s + " " + System.identityHashCode(s));
242                             resets++;
243                             throw new Reset();
244                         }
245             } catch (Reset r) {
246                 reset();
247                 reduce();
248             }
249             count = 0;
250         }
251
252         private boolean reset = false;
253         class Reset extends RuntimeException { }
254
255         /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
256         public void shift(Phase next, Forest result) throws ParseFailed {
257             // this massively improves GC performance
258             if (prev!=null && parser.helpgc) {
259                 //prev.hash = null;
260                 //System.out.println("\r" + /*shifts + " " + */ single_newnode /*+ "/"+multi_newnode + " " + waiting_newnode*/);
261                 //System.out.println("\r" + shifts + " " + note);
262                 //System.out.println("\r" + shifts);
263                 //System.out.println("\r" + toplevel_reductions);
264                 //System.out.println("\r" + multi_newnode);
265                 single_newnode = 0;
266                 note = "";
267                 multi_newnode = 0;
268                 toplevel_reductions = 0;
269                 waiting_newnode = 0;
270                 shifts = 0;
271             }
272             this.next = next;
273             closed = true;
274             Forest res = null;
275             boolean ok = false;
276             int count = 0;
277             for(Node n : hash.values()) {
278                 if (token == null && n.state().isAccepting()) {
279                     if (finalResult==null) finalResult = new Forest.Many();
280                     for(Object f : n.results())
281                         finalResult.merge((Forest)f);
282                 }
283                 if (token == null) continue;
284                 n.state().invokeShifts(token, this, result, n);
285             }
286             //System.out.println(next.hash.size());
287             if (!good && token!=null)
288                 ParseFailed.error("unexpected character",
289                                   getLocation(),
290                                   token,
291                                   hash.values(),
292                                   getRegion(),
293                                   input,
294                                   GSS.this);
295
296             if (token==null && finalResult==null)
297                 ParseFailed.error("unexpected end of file",
298                                   getLocation(),
299                                   token,
300                                   hash.values(),
301                                   getLocation().createRegion(getLocation()),
302                                   input,
303                                   GSS.this);
304         }
305
306
307         class Waiting {
308             Node parent;
309             Forest pending;
310             State state;
311             boolean fromEmptyReduction;
312             Position reduction;
313             public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) {
314                 waits++;
315                 this.parent = parent;
316                 this.pending = pending;
317                 this.state = state;
318                 this.fromEmptyReduction = fromEmptyReduction;
319                 this.reduction = reduction;
320             }
321             public void perform() {
322                 //System.out.println("performing: " + reduction.position);
323                 waiting_newnode++;
324                 newNode(parent, pending, state, fromEmptyReduction, reduction);
325             }
326         }
327        
328
329         public int toInt() { return pos+1; }
330         public int size() { return hash==null ? 0 : hash.size(); }
331
332         // GraphViz //////////////////////////////////////////////////////////////////////////////
333
334         public GraphViz.Node toGraphViz(GraphViz gv) {
335             if (gv.hasNode(this)) return gv.createNode(this);
336             GraphViz.Group g = gv.createGroup(this);
337             g.label = "Phase " + pos;
338             g.color = "gray";
339             g.cluster = true;
340             return g;
341         }
342         public boolean isTransparent() { return false; }
343         public boolean isHidden() { return false; }
344
345     }
346
347 }