package edu.berkeley.sbp;
import edu.berkeley.sbp.util.*;
import edu.berkeley.sbp.Sequence.Position;
+import edu.berkeley.sbp.Sequence.Pos;
import java.io.*;
import java.util.*;
-// FEATURE: try harder to "fuse" states together along two dimensions:
-// - identical (equivalent) states, or states that subsume each other
-// - unnecessary intermediate states ("short cut" GLR)
-
/** a parser which translates an Input<Token> into a Forest<NodeType> */
public abstract class Parser<Token, NodeType> {
_last = c;
switch(c) {
case edu.berkeley.sbp.chr.CharAtom.left:
- buf += "\033[31m{\033[0m";
+ buf += "\033[31m>\033[0m";
break;
case edu.berkeley.sbp.chr.CharAtom.right:
- buf += "\033[31m}\033[0m";
+ buf += "\033[31m<\033[0m";
break;
case -1: // FIXME
case '\n':
// Table //////////////////////////////////////////////////////////////////////////////
/** an SLR(1) parse table which may contain conflicts */
- class Table extends Grammar<Token> {
+ class Table extends Grammar<Token> implements Serializable {
/** the start state */
final State<Token> start;
private int master_state_idx = 0;
/** all the states for this table */
- HashSet<State<Token>> all_states = new HashSet<State<Token>>();
+ private transient HashSet<State<Token>> all_states = new HashSet<State<Token>>();
/** all the doomed states in this table */
- HashMap<HashSet<Position>,State<Token>> doomed_states = new HashMap<HashSet<Position>,State<Token>>();
+ private transient HashMap<HashSet<Position>,State<Token>> doomed_states = new HashMap<HashSet<Position>,State<Token>>();
/** all the non-doomed states in this table */
- HashMap<HashSet<Position>,State<Token>> normal_states = new HashMap<HashSet<Position>,State<Token>>();
+ private transient HashMap<HashSet<Position>,State<Token>> normal_states = new HashMap<HashSet<Position>,State<Token>>();
Topology<Token> emptyTopology() { return Parser.this.emptyTopology(); }
// al will be sorted in DECREASING order (al[0] >= al[1])
ArrayList<Sequence.Position> al = new ArrayList<Sequence.Position>();
for(State s : all_states) {
- for(Object po : s) {
+ for(Object po : s.positions()) {
Sequence.Position p = (Sequence.Position)po;
if (al.contains(p)) continue;
int i=0;
* space+time complexity in otherwise simple grammars. There
* is an example of this in the regression suite.
*/
- class State<Token> implements IntegerMappable, Iterable<Position> {
+ class State<Token> implements IntegerMappable, Serializable {
public final int idx = master_state_idx++;
- private final HashSet<Position> hs;
+ private final transient HashSet<Position> hs;
public HashSet<State<Token>> conjunctStates = new HashSet<State<Token>>();
- HashMap<Sequence,State<Token>> gotoSetNonTerminals = new HashMap<Sequence,State<Token>>();
+ HashMap<Pos,State<Token>> gotoSetNonTerminals = new HashMap<Pos,State<Token>>();
private transient TopologicalBag<Token,State<Token>> gotoSetTerminals = new TopologicalBag<Token,State<Token>>();
- private TopologicalBag<Token,Position> reductions = new TopologicalBag<Token,Position>();
- private HashSet<Position> eofReductions = new HashSet<Position>();
+ private TopologicalBag<Token,Pos> reductions = new TopologicalBag<Token,Pos>();
+ private HashSet<Pos> eofReductions = new HashSet<Pos>();
private TopologicalBag<Token,State<Token>> shifts = new TopologicalBag<Token,State<Token>>();
private boolean accept = false;
private VisitableMap<Token,State<Token>> oshifts = null;
- private VisitableMap<Token,Position> oreductions = null;
+ private VisitableMap<Token,Pos> oreductions = null;
public final boolean doomed;
// Interface Methods //////////////////////////////////////////////////////////////////////////////
public boolean doomed() { return doomed; }
boolean isAccepting() { return accept; }
- public Iterator<Position> iterator() { return hs.iterator(); }
+
+ Iterable<Position> positions() { return hs; }
+
boolean canShift(Token t) { return oshifts!=null && oshifts.contains(t); }
void invokeShifts(Token t, GSS.Phase phase, Result r) { oshifts.invoke(t, phase, r); }
boolean canReduce(Token t) {
return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); }
void invokeEpsilonReductions(Token t, Node node) {
- if (t==null) for(Position r : eofReductions) node.invoke(r, null);
+ if (t==null) for(Pos r : eofReductions) node.invoke(r, null);
else oreductions.invoke(t, node, null);
}
void invokeReductions(Token t, Node node, Result b) {
- if (t==null) for(Position r : eofReductions) node.invoke(r, b);
+ if (t==null) for(Pos r : eofReductions) node.invoke(r, b);
else oreductions.invoke(t, node, b);
}
if (seq.needs.contains(y) || seq.hates.contains(y)) {
// FIXME: assumption that no sequence is ever both usefully (non-lamely) matched
// and also directly lamely matched
- ((HashMap)gotoSetNonTerminals).put(y, dead_state);
+ for(Position pp = y.firstp(); pp != null; pp = pp.next())
+ ((HashMap)gotoSetNonTerminals).put(pp, dead_state);
continue OUTER;
}
- gotoSetNonTerminals.put(y, s);
+ for(Position pp = y.firstp(); pp != null; pp = pp.next())
+ gotoSetNonTerminals.put(pp, s);
}
}