1 package edu.berkeley.sbp;
2 import edu.berkeley.sbp.*;
3 import edu.berkeley.sbp.*;
4 import edu.berkeley.sbp.*;
5 import edu.berkeley.sbp.util.*;
8 import java.lang.reflect.*;
10 //////////////////////////////////////////////////////////////////////////////
13 // - fix public/package/private status
16 //////////////////////////////////////////////////////////////////////////////
17 // Optimizations to add
19 // ** NOTE: not all of these are appropriate for this class -- it is
20 // simply a list of optimizations not implemented. This
21 // class is meant to remain simple and easy to understand;
22 // optimizations which obscure that do not belong here (they
23 // should go into the compiled version instead)
25 /** implements Tomita's Graph Structured Stack */
30 private Phase.Node[] reducing_list = null;
32 /** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
35 /** the token immediately after this phase */
36 public final Token token;
38 boolean reducing = false;
40 /** currently this is necessary only for the code() hack -- it doesn't actually correspond to the input */
41 private final int pos;
44 public Forest.Ref finalResult = null;
46 /** all nodes, keyed by the value returned by code() */
47 private HashMap<Long,Phase.Node> hash = new HashMap<Long,Phase.Node>(); /* ALLOC */
49 /** the number of nodes in this phase */
50 private int numNodes = 0;
52 boolean closed = false;
54 private Token.Location location;
55 public Phase(Phase previous, Token token, Token.Location location) {
56 this.pos = previous==null ? 0 : previous.pos+1;
58 this.location = location;
61 public boolean isDone() { return token == null; }
63 private String error = "generic syntax error";
64 public void checkFailure() throws Parser.Failed {
66 throw new Parser.Failed(error, getLocation());
69 public Token.Location getLocation() { return location; }
71 /** add a new node (merging with existing nodes if possible)
72 * @param parent the parent of the new node
73 * @param result the SPPF result corresponding to the new node
74 * @param state the state that the new node is in
75 * @param fromEmptyReduction true iff this node is being created as a result of a reduction of length zero (see GRMLR paper)
76 * @param start the earliest part of the input contributing to this node (used to make merging decisions)
78 public void newNode(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
79 Node p = hash.get(code(state, start));
80 if (p != null) newNode2(p, parent, pending, state, fromEmptyReduction, start);
81 else newNode3(parent, pending, state, fromEmptyReduction, start);
83 private void newNode2(Node p, Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
84 p.holder.merge(pending);
85 if (p.parents().contains(parent)) return;
86 p.addParent(parent, fromEmptyReduction);
88 private void newNode3(Node parent, Forest pending, Parser.Table.State state, boolean fromEmptyReduction, Phase start) {
90 if (token != null && state.canShift(token)) break;
91 if (state.isAccepting()) break;
92 if (token==null) break;
94 Parser.Table.Reduction r = null;
95 for(Parser.Table.Reduction red : token==null ? state.getEofReductions() : state.getReductions(token)) { r = red; count++; }
96 if (count==0) return; // BEWARE! this optimization is suspected to cause really nasty heisenbugs
97 //if (count > 1) break;
98 //if (r.numPop == 0) break;
99 //r.reduce(pending, parent, null, Phase.this, null);
103 Node n = new Node(parent, pending, state, start); // ALLOC
104 n.queueEmptyReductions();
105 if (!fromEmptyReduction) n.queueReductions(parent);
109 /** perform all reduction operations */
110 public void reduce() {
112 if (reducing_list==null || reducing_list.length < hash.size())
113 reducing_list = new Phase.Node[hash.size() * 4];
114 Collection<Node> hv = hash.values();
115 hv.toArray(reducing_list);
117 for(int i=0; i<num; i++) {
118 Node n = reducing_list[i];
119 n.queueEmptyReductions();
120 // INVARIANT: we never "see" a node until its parent-set is complete, modulo merges
122 for(int i=0; i<num; i++) {
123 Node n = reducing_list[i];
124 reducing_list[i] = null;
125 n.queueEmptyReductions();
130 /** perform all shift operations, adding promoted nodes to <tt>next</tt> */
131 public void shift(Phase next, Forest result) {
135 for(Phase.Node n : hash.values()) {
136 if (n.holder==null) continue;
138 if (token == null && n.state.isAccepting()) {
140 if (finalResult==null) finalResult = new Forest.Ref();
141 finalResult.merge(n.holder);
143 if (!n.holder.valid()) continue;
144 if (token == null) continue;
145 for(Parser.Table.State st : n.state.getShifts(token)) {
146 if (res == null) res = result;
147 next.newNode(n, res, st, true, this);
152 if (!ok && token != null) {
153 StringBuffer error = new StringBuffer();
154 error.append("error: unable to shift token \"" + token + "\"\n");
155 //error.append(" before: " +pendingReductions+ "\n");
156 //error.append(" before: " +totalReductions+ "\n");
157 //for(Phase.Node n : hash.values()) {
158 //n.queueReductions();
159 //n.queueEmptyReductions();
161 //error.append(" after: " +pendingReductions+ "\n");
162 //error.append(" candidate states:\n");
163 //for(Phase.Node n : hash.values()) {
164 //for(Sequence.Position p : n.state) error.append(" " + p + "\n");
165 //error.append(" --\n");
166 //for(Parser.Table.Reduction r : n.state.getReductions(token)) error.append(" " + r + "\n");
167 //error.append(" ==\n");
169 next.error = error.toString();
172 // this massively improves GC performance
177 // GSS Nodes //////////////////////////////////////////////////////////////////////////////
179 /** a node in the GSS */
180 public final class Node extends FastSet<Node> {
182 public void addParent(Node parent, boolean fromEmptyReduction) {
183 parents().add(parent, true);
184 if (this!=parent && !fromEmptyReduction) queueReductions(parent);
187 private Forest.Ref holder = null;
188 private boolean allqueued = false;
190 private HashMap<Parser.Table.Reduction,Forest> cache = null;
192 /** the set of nodes to which there is an edge starting at this node */
193 //public final FastSet<Node> parents = new FastSet<Node>(); /* ALLOC */
195 /** what state this node is in */
196 public final Parser.Table.State state;
198 /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
199 public Phase phase() { return Phase.this; }
201 public HashMap<Parser.Table.Reduction,Forest> cache() {
202 return cache==null ? (cache = new HashMap<Parser.Table.Reduction,Forest>()) : cache;
204 public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
205 public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
206 public FastSet<Node> parents() { return this; }
209 public void queueReductions() {
210 if (allqueued) return;
212 int where = parents().size();
213 for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token))
215 r.reduce(this, null, null);
219 public void queueReductions(Node n2) { queueReductions(n2, true); }
220 public void queueReductions(Node n2, boolean includeLongs) {
221 if (!allqueued) { queueReductions(); return; }
223 for(Parser.Table.Reduction r : token==null ? n.state.getEofReductions() : n.state.getReductions(token)) {
226 // The problem here is that a "reduction of length 1"
227 // performed twice with different values of n2 needs
228 // to only create a *single* new result, but must add
229 // multiple parents to the node holding that result.
230 // The current reducer doesn't differentiate between
231 // the next node of an n-pop reduction and the
232 // ultimate parent of the last pop, so we need to
233 // cache instances here as a way of avoiding
235 if (r.numPop <= 0) continue;
237 Forest ret = n.cache().get(r);
238 if (ret != null) r.reduce(this, n2, ret);
239 else n.cache().put(r, r.reduce(this, n2, null));
241 r.reduce(this, n2, null);
248 public void queueEmptyReductions() {
250 for(Parser.Table.Reduction r : token==null ? state.getEofReductions() : state.getReductions(token))
252 r.reduce(this, null, r.zero());
255 private Node(Node parent, Forest pending, Parser.Table.State state, Phase start) {
257 if (pending != null) this.holder().merge(pending);
258 if (parent != null) parents().add(parent, true);
259 if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
260 Phase.this.hash.put(code(state, start), this);
261 Phase.this.numNodes++;
262 if (parent==null) holder().valid = true; // hack to make sure that the "base" node is always considered valid
269 private static boolean equal(Object a, Object b) {
270 if (a==null && b==null) return true;
271 if (a==null || b==null) return false;
275 /** this is something of a hack right now */
276 private static long code(Parser.Table.State state, Phase start) {
277 return (((long)state.idx) << 32) | (start==null ? 0 : start.pos);