public int waits = 0;
HashMapBag<Integer,Sequence> inhibited = new HashMapBag<Integer,Sequence>();
+ HashMapBag<Integer,Sequence> assumed = new HashMapBag<Integer,Sequence>();
HashMapBag<Sequence,Phase.Waiting> waiting = new HashMapBag<Sequence,Phase.Waiting>();
HashMapBag<Integer,Sequence> performed = new HashMapBag<Integer,Sequence>();
+ HashSet<Phase.Waiting> tail = new HashSet<Phase.Waiting>();
/** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
- public class Phase implements Invokable<State, Forest, GSS.Phase.Node> {
+ public class Phase implements Invokable<State, Forest, GSS.Phase.Node>, IntegerMappable {
+ public int toInt() { return pos+1; }
/** the token immediately after this phase */
public final Token token;
public Forest.Ref finalResult;
/** all nodes, keyed by the value returned by code() */
- /*private*/ HashMap<Long,Phase.Node> hash; /* ALLOC */
+ /*private*/ IntPairMap<Phase.Node> hash; /* ALLOC */
/** the number of nodes in this phase */
private int numNodes;
this.token = token;
this.location = location;
inhibited.clear();
+ assumed.clear();
reset();
}
public void reset() {
+ tail.clear();
waiting.clear();
performed.clear();
- hash = new HashMap<Long,Phase.Node>();
+ hash = new IntPairMap<Phase.Node>();
good = false;
closed = false;
numNodes = 0;
ret.append("\n ");
ret.append(message);
HashMap<String,HashSet<String>> errors = new HashMap<String,HashSet<String>>();
- for(Node n : hash.values()) complain(n, errors, false);
+ for(Node n : hash.values()) {
+ //System.out.println(n.state);
+ complain(n, errors, false);
+ }
for(String s : errors.keySet()) {
ret.append(" while parsing " + yellow(s));
HashSet<String> hs = errors.get(s);
return ret.toString();
}
- public boolean isDone() throws Parser.Failed {
+ public boolean isDone() throws ParseFailed {
if (token != null) return false;
if (token==null && finalResult==null)
- throw new Parser.Failed(error(red("unexpected end of file\n")),
+ throw new ParseFailed(error(red("unexpected end of file\n")),
getLocation());
return true;
}
* @param start the earliest part of the input contributing to this node (used to make merging decisions)
*/
public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
- return newNode(parent, pending, state, fromEmptyReduction, null); }
- public boolean newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) {
+ Node p = hash.get(state, parent==null?null:parent.phase());
+ if (p != null) return newNode2(p, parent, pending, state, fromEmptyReduction);
+ else return newNode3(parent, pending, state, fromEmptyReduction);
+ }
+ public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) {
int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos;
+ Sequence owner = reduction==null ? null : reduction.position.owner();
if (reduction!=null) {
- if (inhibited.contains(pos, reduction.position.owner())) return false;
- if (reduction.position.owner().needs != null) {
- for(Sequence s : reduction.position.owner().needs) {
+ if (inhibited.contains(pos, owner)) return;
+ /*
+ if (assumed.contains(pos, owner)) {
+ tail.add(new Waiting(parent, pending, state, fromEmptyReduction, reduction));
+ return;
+ }
+ */
+ if (owner.needs != null)
+ for(Sequence s : owner.needs)
if (!performed.contains(pos, s)) {
waiting.add(s, new Waiting(parent, pending, state, fromEmptyReduction, reduction));
- return false;
+ return;
}
- }
- }
- if ((reduction.position.owner().needed != null && reduction.position.owner().needed.size()>0) ||
- (reduction.position.owner().hated != null && reduction.position.owner().hated.size()>0) ||
- (reduction.position.owner().hates != null && reduction.position.owner().hates.size()>0))
- performed.add(pos, reduction.position.owner());
+ if ((owner.needed != null && owner.needed.size()>0) ||
+ (owner.hated != null && owner.hated.size()>0) ||
+ (owner.hates != null && owner.hates.size()>0))
+ performed.add(pos, owner);
}
- Node p = hash.get(code(state, parent==null?null:parent.phase()));
- boolean ret;
+ if (!owner.lame)
+ newNode(parent, pending, state, fromEmptyReduction);
if (reduction!=null) inhibit(reduction, parent==null?0:parent.phase().pos);
- if (p != null) ret = newNode2(p, parent, pending, state, fromEmptyReduction, reduction);
- else ret = newNode3(parent, pending, state, fromEmptyReduction, reduction);
if (reduction != null) {
boolean redo = true;
while(redo) {
redo = false;
- for(Waiting w : waiting.getAll(reduction.position.owner())) {
+ for(Waiting w : waiting.getAll(owner)) {
if (w.parent==parent || (parent!=null&&w.parent!=null&&w.parent.phase()==parent.phase())) {
- waiting.remove(reduction.position.owner(), w);
+ waiting.remove(owner, w);
w.perform();
redo = true;
break;
}
}
}
- return ret;
}
- private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) {
+ private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
p.holder.merge(pending);
if (p.parents().contains(parent)) return true;
- //if (p.fe && p.phase() != parent.phase()) throw new Error("yep yep");
- //if (!p.fe && p.phase() == parent.phase()) throw new Error("yep yep2");
p.parents().add(parent, true);
if (p!=parent && !fromEmptyReduction) p.queueReductions(parent);
return true;
}
- private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) {
+ private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) {
do {
if (token != null && state.canShift(token)) break;
if (state.isAccepting()) break;
if (performed.contains(p,seq)) {
uninhibit(p, seq);
//System.out.println("\nresetting due to " + r.position.owner() + " killing " + seq);
- if (!reset) inhibited.add(p, seq);
+ //inhibited.clear();
+ inhibited.add(p, seq);
+ //assumed = inhibited;
+ //inhibited = new HashMapBag<Integer,Sequence>();
reset = true;
+ resets++;
+ throw new Reset();
}
- if (!reset) inhibited.add(p, seq);
- }
- if (reset) {
- resets++;
- throw new Reset();
+ inhibited.add(p, seq);
}
}
reducing = true;
if (reducing_list==null || reducing_list.length < hash.size())
reducing_list = new Phase.Node[hash.size() * 4];
- Collection<Node> hv = hash.values();
- hv.toArray(reducing_list);
- int num = hv.size();
+ hash.toArray(reducing_list);
+ int num = hash.size();
for(int i=0; i<num; i++) {
Node n = reducing_list[i];
n.queueEmptyReductions();
reducing_list[i] = null;
n.queueReductions();
}
+ //for(Waiting w : tail)
+ //w.perform();
} catch (Reset r) {
reset();
reduce();
}
/** perform all shift operations, adding promoted nodes to <tt>next</tt> */
- public void shift(Phase next, Forest result) throws Parser.Failed {
+ public void shift(Phase next, Forest result) throws ParseFailed {
if (prev!=null) prev.hash = null;
this.next = next;
closed = true;
Forest res = null;
boolean ok = false;
for(Phase.Node n : hash.values()) {
- if (n.holder==null) continue;
- n.holder.resolve();
if (token == null && n.state.isAccepting()) {
if (finalResult==null) finalResult = new Forest.Ref();
finalResult.merge(n.holder);
}
- if (!n.holder.valid()) continue;
if (token == null) continue;
n.state.invokeShifts(token, this, result, n);
}
if (!good && token!=null)
- throw new Parser.Failed(error(red("unexpected character")+" "+purple(token)+" encountered at "+green(getLocation())+"\n"),
+ throw new ParseFailed(error(red("unexpected character")+" "+purple(token)+" encountered at "+green(getLocation())+"\n"),
getLocation());
if (token==null && finalResult==null)
- throw new Parser.Failed(error(red("unexpected end of file\n")),
+ throw new ParseFailed(error(red("unexpected end of file\n")),
getLocation());
// this massively improves GC performance
private Node(Node parent, Forest pending, State state, boolean fe) {
this.fe = fe;
this.state = state;
- for(Position p : state) {
- if (p.owner().needs!=null)
- for(Sequence s : p.owner().needs) {
- //dead = true;
- //redo = false;
- }
- }
+ this.holder().merge(pending);
Phase start = parent==null ? null : parent.phase();
- if (pending != null) this.holder().merge(pending);
if (parent != null) parents().add(parent, true);
- if (Phase.this.hash.get(code(state, start)) != null) throw new Error("severe problem!");
- Phase.this.hash.put(code(state, start), this);
+ if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
+ Phase.this.hash.put(state, start, this);
Phase.this.numNodes++;
- if (parent==null) holder().valid = true; // hack to make sure that the "base" node is always considered valid
}
}
if (a==null || b==null) return false;
return a.equals(b);
}
-
- /** this is something of a hack right now */
- private static long code(State state, Phase start) {
- return (((long)state.idx) << 32) | (start==null ? 0 : (start.pos+1));
- }
}