public int resets = 0;
public int waits = 0;
- HashMapBag<Integer,Sequence> inhibited = new HashMapBag<Integer,Sequence>();
- HashMapBag<Sequence,Phase.Waiting> waiting = new HashMapBag<Sequence,Phase.Waiting>();
- HashMapBag<Integer,Sequence> performed = new HashMapBag<Integer,Sequence>();
+ HashMapBag<Integer,Sequence> inhibited = new HashMapBag<Integer,Sequence>();
+ HashMapBag<Integer,Sequence> expectedInhibit = new HashMapBag<Integer,Sequence>();
+ HashMapBag<Sequence,Phase.Waiting> waiting = new HashMapBag<Sequence,Phase.Waiting>();
+ HashMapBag<Integer,Sequence> performed = new HashMapBag<Integer,Sequence>();
/** FIXME */
public Forest.Ref finalResult;
private boolean good;
private Phase next = null;
private Phase prev;
- private Token.Location location;
+ private Input.Location location;
public final Parser parser;
private Forest forest;
- public Phase(Phase prev, Parser parser, Phase previous, Tok token, Token.Location location, Forest forest) {
+ public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location, Forest forest) {
this.prev = prev;
this.forest = forest;
this.parser = parser;
performed.clear();
hash = new IntPairMap<Phase.Node>();
singularReductions = new IntPairMap<Forest>();
+ expectedInhibit.clear();
+ expectedInhibit.addAll(inhibited);
good = false;
closed = false;
reducing = false;
return true;
}
- public Token.Location getLocation() { return location; }
+ public Input.Location getLocation() { return location; }
/** add a new node (merging with existing nodes if possible)
* @param parent the parent of the new node
throw new Reset();
}
inhibited.add(p, seq);
+ expectedInhibit.remove(p, seq);
}
}
reducing_list[i] = null;
n.performReductions();
}
+ if (expectedInhibit.size() > 0) {
+ inhibited.removeAll(expectedInhibit);
+ System.out.println("\n!!!!\n");
+ throw new Reset();
+ }
} catch (Reset r) {
reset();
reduce();