import java.io.*;
import java.util.*;
-/** a parser which translates streams of Tokens of type T into a Forest<R> */
-public abstract class Parser<Tok, Result> {
+/** a parser which translates an Input<Token> into a Forest<NodeType> */
+public abstract class Parser<Token, NodeType> {
- protected final Table<Tok> pt;
+ protected final Table<Token> pt;
/** create a parser to parse the grammar with start symbol <tt>u</tt> */
- protected Parser(Union u, Topology<Tok> top) { this.pt = new Table<Tok>(u, top); }
- protected Parser(Table<Tok> pt) { this.pt = pt; }
+ protected Parser(Union u, Topology<Token> top) { this.pt = new Table<Token>(u, top); }
+ protected Parser(Table<Token> pt) { this.pt = pt; }
/** implement this method to create the output forest corresponding to a lone shifted input token */
- protected abstract Forest<Result> shiftToken(Tok t, Input.Location newloc);
+ protected abstract Forest<NodeType> shiftToken(Token t, Input.Location newloc);
boolean helpgc = true;
public String toString() { return pt.toString(); }
/** parse <tt>input</tt>, and return the shared packed parse forest (or throw an exception) */
- public Forest<Result> parse(Input<Tok> input) throws IOException, ParseFailed {
+ public Forest<NodeType> parse(Input<Token> input) throws IOException, ParseFailed {
GSS gss = new GSS();
Input.Location loc = input.getLocation();
- GSS.Phase current = gss.new Phase<Tok>(null, this, null, input.next(), loc, null);
+ GSS.Phase current = gss.new Phase<Token>(null, this, null, input.next(), loc, null);
current.newNode(null, Forest.create(null, null, null, false), pt.start, true);
int count = 1;
for(int idx=0;;idx++) {
Input.Location oldloc = loc;
loc = input.getLocation();
current.reduce();
- Forest forest = current.token==null ? null : shiftToken((Tok)current.token, loc);
- GSS.Phase next = gss.new Phase<Tok>(current, this, current, input.next(), loc, forest);
+ Forest forest = current.token==null ? null : shiftToken((Token)current.token, loc);
+ GSS.Phase next = gss.new Phase<Token>(current, this, current, input.next(), loc, forest);
if (!helpgc) {
FileOutputStream fos = new FileOutputStream("out-"+idx+".dot");
PrintWriter p = new PrintWriter(new OutputStreamWriter(fos));
p.close();
}
count = next.size();
- if (current.isDone()) return (Forest<Result>)gss.finalResult;
+ if (current.isDone()) return (Forest<NodeType>)gss.finalResult;
current = next;
}
}
// Table //////////////////////////////////////////////////////////////////////////////
/** an SLR(1) parse table which may contain conflicts */
- static class Table<Tok> extends Walk.Cache {
+ static class Table<Token> extends Walk.Cache {
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("parse table");
- for(State<Tok> state : all_states.values()) {
+ for(State<Token> state : all_states.values()) {
sb.append(" " + state + "\n");
- for(Topology<Tok> t : state.shifts) {
+ for(Topology<Token> t : state.shifts) {
sb.append(" shift \""+
new edu.berkeley.sbp.chr.CharTopology((IntegerTopology<Character>)t)+"\" => ");
for(State st : state.shifts.getAll(t))
sb.append(st.idx+" ");
sb.append("\n");
}
- for(Topology<Tok> t : state.reductions)
+ for(Topology<Token> t : state.reductions)
sb.append(" reduce \""+
new edu.berkeley.sbp.chr.CharTopology((IntegerTopology<Character>)t)+"\" => " +
state.reductions.getAll(t) + "\n");
}
/** the start state */
- public final State<Tok> start;
+ public final State<Token> start;
/** the state from which no reductions can be done */
- private final State<Tok> dead_state;
+ private final State<Token> dead_state;
/** used to generate unique values for State.idx */
private int master_state_idx = 0;
- HashMap<HashSet<Position>,State<Tok>> all_states = new HashMap<HashSet<Position>,State<Tok>>();
+ HashMap<HashSet<Position>,State<Token>> all_states = new HashMap<HashSet<Position>,State<Token>>();
/** construct a parse table for the given grammar */
public Table(Topology top) { this("s", top); }
HashSet<Position> hp = new HashSet<Position>();
reachable(start0, hp);
- this.dead_state = new State<Tok>(new HashSet<Position>(), all_states, all_elements);
- this.start = new State<Tok>(hp, all_states, all_elements);
+ this.dead_state = new State<Token>(new HashSet<Position>(), all_states, all_elements);
+ this.start = new State<Token>(hp, all_states, all_elements);
// for each state, fill in the corresponding "row" of the parse table
- for(State<Tok> state : all_states.values())
+ for(State<Token> state : all_states.values())
for(Position p : state.hs) {
// the Grammar's designated "last position" is the only accepting state
state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element()).getTokenTopology()));
}
if (top instanceof IntegerTopology)
- for(State<Tok> state : all_states.values()) {
+ for(State<Token> state : all_states.values()) {
state.oreductions = state.reductions.optimize(((IntegerTopology)top).functor());
state.oshifts = state.shifts.optimize(((IntegerTopology)top).functor());
}
/** a single state in the LR table and the transitions possible from it */
- class State<Tok> implements Comparable<State<Tok>>, IntegerMappable, Iterable<Position> {
+ class State<Token> implements Comparable<State<Token>>, IntegerMappable, Iterable<Position> {
public final int idx = master_state_idx++;
private final HashSet<Position> hs;
- public transient HashMap<Sequence,State<Tok>> gotoSetNonTerminals = new HashMap<Sequence,State<Tok>>();
- private transient TopologicalBag<Tok,State<Tok>> gotoSetTerminals = new TopologicalBag<Tok,State<Tok>>();
+ public transient HashMap<Sequence,State<Token>> gotoSetNonTerminals = new HashMap<Sequence,State<Token>>();
+ private transient TopologicalBag<Token,State<Token>> gotoSetTerminals = new TopologicalBag<Token,State<Token>>();
- private TopologicalBag<Tok,Position> reductions = new TopologicalBag<Tok,Position>();
+ private TopologicalBag<Token,Position> reductions = new TopologicalBag<Token,Position>();
private HashSet<Position> eofReductions = new HashSet<Position>();
- private TopologicalBag<Tok,State<Tok>> shifts = new TopologicalBag<Tok,State<Tok>>();
+ private TopologicalBag<Token,State<Token>> shifts = new TopologicalBag<Token,State<Token>>();
private boolean accept = false;
- private VisitableMap<Tok,State<Tok>> oshifts = null;
- private VisitableMap<Tok,Position> oreductions = null;
+ private VisitableMap<Token,State<Token>> oshifts = null;
+ private VisitableMap<Token,Position> oreductions = null;
// Interface Methods //////////////////////////////////////////////////////////////////////////////
boolean isAccepting() { return accept; }
public Iterator<Position> iterator() { return hs.iterator(); }
- boolean canShift(Tok t) { return oshifts!=null && oshifts.contains(t); }
- <B,C> void invokeShifts(Tok t, Invokable<State<Tok>,B,C> irbc, B b, C c) {
+ boolean canShift(Token t) { return oshifts!=null && oshifts.contains(t); }
+ <B,C> void invokeShifts(Token t, Invokable<State<Token>,B,C> irbc, B b, C c) {
oshifts.invoke(t, irbc, b, c);
}
- boolean canReduce(Tok t) { return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); }
- <B,C> void invokeReductions(Tok t, Invokable<Position,B,C> irbc, B b, C c) {
+ boolean canReduce(Token t) { return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); }
+ <B,C> void invokeReductions(Token t, Invokable<Position,B,C> irbc, B b, C c) {
if (t==null) for(Position r : eofReductions) irbc.invoke(r, b, c);
else oreductions.invoke(t, irbc, b, c);
}
* </ul>
*/
public State(HashSet<Position> hs,
- HashMap<HashSet<Position>,State<Tok>> all_states,
+ HashMap<HashSet<Position>,State<Token>> all_states,
HashSet<SequenceOrElement> all_elements) {
this.hs = hs;
// of _new_ positions (positions after shifting). These mappings are
// collectively known as the _closure_
- TopologicalBag<Tok,Position> bag0 = new TopologicalBag<Tok,Position>();
+ TopologicalBag<Token,Position> bag0 = new TopologicalBag<Token,Position>();
for(Position position : hs) {
if (position.isLast() || !(position.element() instanceof Atom)) continue;
Atom a = (Atom)position.element();
// set, add that character set to the goto table (with the State corresponding to the
// computed next-position set).
- for(Topology<Tok> r : bag0) {
+ for(Topology<Token> r : bag0) {
HashSet<Position> h = new HashSet<Position>();
for(Position p : bag0.getAll(r)) h.add(p);
- gotoSetTerminals.put(r, all_states.get(h) == null ? new State<Tok>(h, all_states, all_elements) : all_states.get(h));
+ gotoSetTerminals.put(r, all_states.get(h) == null ? new State<Token>(h, all_states, all_elements) : all_states.get(h));
}
// Step 2: for every non-Atom element (ie every Element which has a corresponding reduction),
}
OUTER: for(SequenceOrElement y : move) {
HashSet<Position> h = move.getAll(y);
- State<Tok> s = all_states.get(h) == null ? new State<Tok>(h, all_states, all_elements) : all_states.get(h);
+ State<Token> s = all_states.get(h) == null ? new State<Token>(h, all_states, all_elements) : all_states.get(h);
// if a reduction is "lame", it should wind up in the dead_state after reducing
if (y instanceof Sequence) {
for(Position p : hs) {
return ret.toString();
}
- public int compareTo(State<Tok> s) { return idx==s.idx ? 0 : idx < s.idx ? -1 : 1; }
+ public int compareTo(State<Token> s) { return idx==s.idx ? 0 : idx < s.idx ? -1 : 1; }
public int toInt() { return idx; }
}
}