X-Git-Url: http://git.megacz.com/?p=sbp.git;a=blobdiff_plain;f=src%2Fedu%2Fberkeley%2Fsbp%2FParser.java;h=bfc022dd43bb68e86fcaba97d1f4a3bd01c56de2;hp=c2e7023c42a5ff31a22a4453d1fef9c2a4ed3a83;hb=HEAD;hpb=9d727bd14c659cdc6c34153b988e8d3fdb8067f5 diff --git a/src/edu/berkeley/sbp/Parser.java b/src/edu/berkeley/sbp/Parser.java index c2e7023..bfc022d 100644 --- a/src/edu/berkeley/sbp/Parser.java +++ b/src/edu/berkeley/sbp/Parser.java @@ -1,157 +1,261 @@ -// Copyright 2006 all rights reserved; see LICENSE file for BSD-style license +// Copyright 2006-2007 all rights reserved; see LICENSE file for BSD-style license package edu.berkeley.sbp; -import edu.berkeley.sbp.*; import edu.berkeley.sbp.util.*; -import edu.berkeley.sbp.Sequence.Position; +import edu.berkeley.sbp.Sequence.Pos; +import edu.berkeley.sbp.Sequence.Pos; import java.io.*; import java.util.*; /** a parser which translates an Input<Token> into a Forest<NodeType> */ -public abstract class Parser { +public abstract class Parser implements Serializable { - protected final Table pt; + final Table pt; /** create a parser to parse the grammar with start symbol u */ - public Parser(Union u, Topology top) { this.pt = new Table(u, top); } + public Parser(Union u) { this.pt = new Table(u); } /** implement this method to create the output forest corresponding to a lone shifted input token */ - public abstract Forest shiftToken(Token t, Input.Location newloc); + public abstract Forest shiftToken(Token t, Input.Region region); - public String toString() { return pt.toString(); } + public abstract Topology emptyTopology(); - private boolean verbose = false;; - private static final char[] spin = new char[] { '-', '\\', '|', '/' }; - private int spinpos = 0; - private long last = 0; - void spin() { - if (verbose) { - long now = System.currentTimeMillis(); - if (now-last < 100) return; - last = now; - System.err.print("\r " + spin[spinpos++ % (spin.length)]+ANSI.clreol()+"\r"); - } - } + public String toString() { return pt.toString(); } /** parse input, and return the shared packed parse forest (or throw an exception) */ public Forest parse(Input input) throws IOException, ParseFailed { + long start = System.currentTimeMillis(); verbose = System.getProperty("sbp.verbose", null) != null; spinpos = 0; + GSS gss = new GSS(input, this); + int idmax = 0; + int[][] count = new int[1024*1024][]; + HashMap ids = new HashMap(); try { - GSS gss = new GSS(input, this); for(GSS.Phase current = gss.new Phase(pt.start); ;) { - - if (verbose) { - String s; - s = " " + spin[spinpos++ % (spin.length)]+" parsing "; - s += input.getName(); - s += " "+input.getLocation(); - while(s.indexOf(':') != -1 && s.indexOf(':') < 8) s = " " + s; - String y = "@"+gss.viewPos+" "; - while(y.length() < 9) y = " " + y; - s += y; - //s += " doom="+Node.doomedNodes; - //while(s.length() < 40) s = s + " "; - s += " nodes="+gss.numOldNodes; - while(s.length() < 50) s = s + " "; - s += " shifted="+gss.numNewNodes; - while(s.length() < 60) s = s + " "; - s += " reductions="+gss.numReductions; - System.err.print("\r"+s+ANSI.clreol()+"\r"); - } - - // FIXME: make sure all the locations line up properly in here + if (verbose) debug(current.token, gss, input); if (current.isDone()) return (Forest)current.finalResult; - Forest forest = shiftToken((Token)current.token, input.getLocation()); + Input.Region region = current.getLocation().createRegion(current.getNextLocation()); + Forest forest = shiftToken((Token)current.token, region); + /* + int maxid = 0; + for(Reduction r : gss.finishedReductions) + if (ids.get(r.reduction())==null) + ids.put(r.reduction(), idmax++); + count[current.pos] = new int[idmax]; + for(Reduction r : gss.finishedReductions) + count[current.pos][ids.get(r.reduction())]++; + */ current = gss.new Phase(current, forest); } } finally { - if (verbose) - System.err.print("\r \r"); + if (verbose) { + long time = System.currentTimeMillis() - start; + System.err.println("\r parse time: " + time +"ms "+ ANSI.clreol()); + debug(null, gss, input); + } + /* + PrintWriter pw = new PrintWriter(new OutputStreamWriter(new FileOutputStream("out.plot"))); + boolean[] use = new boolean[idmax]; + for(int i=0; i20) + use[j] = true; + for(int i=0; i=count[i].length ? 0 : count[i][j])); + } + pw.println(); + } + pw.close(); + pw = new PrintWriter(new OutputStreamWriter(new FileOutputStream("test.plot"))); + pw.println("set terminal postscript enhanced color"); + pw.println("set output \"out.ps\""); + pw.println("set pm3d map"); + pw.println("set autoscale"); + pw.println("set view 0,0"); + pw.println("set ytics (\\"); + int q = -1; + for(int j=0; j extends Cache { + class Table implements Serializable { /** the start state */ - public final State start; + final State start; - /** the state from which no reductions can be done */ + /** a dummy state from which no reductions can be performed */ private final State dead_state; /** used to generate unique values for State.idx */ private int master_state_idx = 0; - HashSet> all_states = new HashSet>(); - HashMap,State> doomed_states = new HashMap,State>(); - HashMap,State> normal_states = new HashMap,State>(); + + /** all the states for this table */ + private transient HashSet> all_states = new HashSet>(); + + /** all the doomed states in this table */ + private transient HashMap,State> doomed_states = new HashMap,State>(); + + /** all the non-doomed states in this table */ + private transient HashMap,State> normal_states = new HashMap,State>(); /** construct a parse table for the given grammar */ - public Table(Topology top) { this("s", top); } - public Table(String startSymbol, Topology top) { this(new Union(startSymbol), top); } - public Table(Union ux, Topology top) { - super(ux, top); - Union start0 = new Union("0"); - Sequence seq0 = new Sequence.Singleton(ux); - start0.add(seq0); - buildFollowSet(seq0, top, true); - - // construct the set of states - HashSet hp = new HashSet(); - reachable(start0, hp); - - this.dead_state = new State(new HashSet(), true); - this.start = new State(hp); + Table(Union ux) { + Union rootUnion = new Union("0", Sequence.create(ux), true); + Grammar grammar = new Grammar(rootUnion) { + public Topology emptyTopology() { return Parser.this.emptyTopology(); } + }; + + // create the "dead state" + this.dead_state = new State(new HashSet(), true, grammar); + // construct the start state; this will recursively create *all* the states + this.start = new State(reachable(rootUnion), false, grammar); + + buildReductions(grammar); + sortReductions(grammar); + } + + /** fill in the reductions table */ + private void buildReductions(Grammar grammar) { // for each state, fill in the corresponding "row" of the parse table for(State state : all_states) - for(Position p : state.hs) { - - // the Grammar's designated "last position" is the only accepting state - if (start0.contains(p.owner()) && p.next()==null && !state.doomed) - state.accept = true; - - if (isRightNullable(p)) { - Topology follow = (Topology)follow(p.owner()); - for(Position p2 = p; p2 != null && p2.element() != null; p2 = p2.next()) { - if (!(p2.element() instanceof Union)) throw new Error("impossible"); - Union u = (Union)p2.element(); - Atom set = new edu.berkeley.sbp.chr.CharAtom(new edu.berkeley.sbp.chr.CharTopology((Topology)epsilonFollowSet(u))); - Element p2e = p2.element(); - if (p2e instanceof Union) - for(Sequence p2es : ((Union)p2e)) - follow = follow.intersect(follow(p2es)); - if (set != null) follow = follow.intersect(set.getTokenTopology()); - } - state.reductions.put(follow, p); - if (followEof.contains(p.owner())) state.eofReductions.add(p); - } + for(Pos p : state.hs) { // if the element following this position is an atom, copy the corresponding // set of rows out of the "master" goto table and into this state's shift table if (p.element() != null && p.element() instanceof Atom) state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element()).getTokenTopology())); + + // RNGLR: we can potentially reduce from any "right-nullable" position -- that is, + // any position for which all Elements after it in the Sequence are capable of + // matching the empty string. + if (!grammar.isRightNullable(p)) continue; + Topology follow = grammar.follow(p.owner()); + for(Pos p2 = p; p2 != null && p2.element() != null; p2 = p2.next()) { + if (!(p2.element() instanceof Union)) + throw new Error("impossible -- only Unions can be nullable"); + + // interesting RNGLR-followRestriction interaction: we must intersect + // not just the follow-set of the last non-nullable element, but the + // follow-sets of the nulled elements as well. + for(Sequence s : ((Union)p2.element())) + follow = follow.intersect(grammar.follow(s)); + Topology set = grammar.epsilonFollowSet((Union)p2.element()); + if (set != null) follow = follow.intersect(set); + } + + // indicate that when the next token is in the set "follow", nodes in this + // state should reduce according to Pos "p" + state.reductions.put(follow, p); + if (grammar.followEof.contains(p.owner())) state.eofReductions.add(p); } - if (top instanceof IntegerTopology) + // optimize the reductions table + if (emptyTopology() instanceof IntegerTopology) for(State state : all_states) { - state.oreductions = state.reductions.optimize(((IntegerTopology)top).functor()); - state.oshifts = state.shifts.optimize(((IntegerTopology)top).functor()); + // FIXME: this is pretty ugly + state.oreductions = state.reductions.optimize(((IntegerTopology)emptyTopology()).functor()); + state.oshifts = state.shifts.optimize(((IntegerTopology)emptyTopology()).functor()); } + } + // FIXME: this method needs to be cleaned up and documented + private void sortReductions(Grammar grammar) { // crude algorithm to assing an ordinal ordering to every position // al will be sorted in DECREASING order (al[0] >= al[1]) - ArrayList al = new ArrayList(); + ArrayList al = new ArrayList(); for(State s : all_states) { - for(Object po : s) { - Sequence.Position p = (Sequence.Position)po; + for(Object po : s.positions()) { + Sequence.Pos p = (Sequence.Pos)po; if (al.contains(p)) continue; int i=0; for(; i { OUTER: while(true) { for(int i=0; i 0) { - Sequence.Position p = al.remove(j); + if (grammar.comparePositions(al.get(i), al.get(j)) > 0) { + Sequence.Pos p = al.remove(j); al.add(i, p); continue OUTER; } @@ -175,7 +279,7 @@ public abstract class Parser { for(int i=0; i 0) + if (grammar.comparePositions(al.get(k), al.get(i)) > 0) { inc = true; break; } } inc = true; @@ -185,56 +289,81 @@ public abstract class Parser { } al.get(i).ord = j; } - - /* - for(int i=0; i implements IntegerMappable, Iterable { + /** + * A single state in the LR table and the transitions + * possible from it + * + * A state corresponds to a set of Sequence.Pos's. Each + * StateNode in the GSS has a State; the StateNode represents a set of + * possible parses, one for each Pos in the State. + * + * Every state is either "doomed" or "normal". If a Pos + * is part of a Sequence which is a conjunct (that is, it was + * passed to Sequence.{and(),andnot()}), then that Pos + * will appear only in doomed States. Furthermore, any set + * of Positions reachable from a doomed State also forms a + * doomed State. Note that in this latter case, a doomed + * state might have exactly the same set of Positions as a + * non-doomed state. + * + * Nodes with non-doomed states represent nodes which + * contribute to actual valid parses. Nodes with doomed + * States exist for no other purpose than to enable/disable + * some future reduction from a non-doomed StateNode. Because of + * this, we "garbage-collect" Nodes with doomed states if + * there are no more non-doomed Nodes which they could + * affect (see ResultNode, Reduction, and StateNode for details). + * + * Without this optimization, many seemingly-innocuous uses + * of positive and negative conjuncts can trigger O(n^2) + * space+time complexity in otherwise simple grammars. There + * is an example of this in the regression suite. + */ + class State implements IntegerMappable, Serializable { public final int idx = master_state_idx++; - private final HashSet hs; - public HashSet> also = new HashSet>(); + private final transient HashSet hs; + public HashSet> conjunctStates = new HashSet>(); - public transient HashMap> gotoSetNonTerminals = new HashMap>(); - private transient TopologicalBag> gotoSetTerminals = new TopologicalBag>(); + HashMap> gotoSetNonTerminals = new HashMap>(); + private transient TopologicalBag> gotoSetTerminals = new TopologicalBag>(); - private TopologicalBag reductions = new TopologicalBag(); - private HashSet eofReductions = new HashSet(); + TopologicalBag reductions = new TopologicalBag(); + HashSet eofReductions = new HashSet(); private TopologicalBag> shifts = new TopologicalBag>(); private boolean accept = false; private VisitableMap> oshifts = null; - private VisitableMap oreductions = null; + private VisitableMap oreductions = null; + public final boolean doomed; // Interface Methods ////////////////////////////////////////////////////////////////////////////// + public boolean doomed() { return doomed; } boolean isAccepting() { return accept; } - public Iterator iterator() { return hs.iterator(); } + + Iterable positions() { return hs; } + boolean canShift(Token t) { return oshifts!=null && oshifts.contains(t); } - void invokeShifts(Token t, GSS.Phase phase, Result r) { oshifts.invoke(t, phase, r); } + void invokeShifts(Token t, GSS.Phase phase, StateNode pred, Forest f) { oshifts.invoke(t, phase, pred, f); } boolean canReduce(Token t) { return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); } - void invokeEpsilonReductions(Token t, Node node) { - if (t==null) for(Position r : eofReductions) node.invoke(r, null); - else oreductions.invoke(t, node, null); + void invokeEpsilonReductions(Token t, StateNode node) { + if (t==null) for(Pos r : eofReductions) node.invoke(r, null, null); + else oreductions.invoke(t, node, null, null); } - void invokeReductions(Token t, Node node, Result b) { - //System.err.println("\rinvokage: " + this); - if (t==null) for(Position r : eofReductions) node.invoke(r, b); - else oreductions.invoke(t, node, b); + void invokeReductions(Token t, StateNode node, ResultNode only) { + if (t==null) for(Pos r : eofReductions) node.invoke(r, only, null); + else oreductions.invoke(t, node, only, null); } // Constructor ////////////////////////////////////////////////////////////////////////////// /** - * create a new state consisting of all the Positions in hs - * @param hs the set of Positions comprising this State + * create a new state consisting of all the Poss in hs + * @param hs the set of Poss comprising this State * @param all the set of all elements (Atom instances need not be included) * * In principle these two steps could be merged, but they @@ -253,152 +382,130 @@ public abstract class Parser { * for non-Atom Elements. * */ - public State(HashSet hs) { this(hs, false); } - public boolean doomed; - public State(HashSet hs, boolean doomed) { + public State(HashSet hs, boolean doomed, Grammar grammar) { this.hs = hs; this.doomed = doomed; - // register ourselves in the all_states hash so that no - // two states are ever created with an identical position set + // register ourselves so that no two states are ever + // created with an identical position set (termination depends on this) ((HashMap)(doomed ? doomed_states : normal_states)).put(hs, this); ((HashSet)all_states).add(this); - - for(Position p : hs) { + + for(Pos p : hs) { + // Step 1a: take note if we are an accepting state + // (last position of the root Union's sequence) + if (p.next()==null && !doomed && grammar.rootUnion.contains(p.owner())) + accept = true; + + // Step 1b: If any Pos in the set is the first position of its sequence, then this + // state is responsible for spawning the "doomed" states for each of the + // Sequence's conjuncts. This obligation is recorded by adding the to-be-spawned + // states to conjunctStates. if (!p.isFirst()) continue; - for(Sequence s : p.owner().needs()) { - if (hs.contains(s.firstp())) continue; - HashSet h2 = new HashSet(); - reachable(s, h2); - also.add(mkstate(h2, true)); - } - for(Sequence s : p.owner().hates()) { - if (hs.contains(s.firstp())) continue; - HashSet h2 = new HashSet(); - reachable(s, h2); - also.add(mkstate(h2, true)); - } + for(Sequence s : p.owner().needs()) + if (!hs.contains(s.firstp())) + conjunctStates.add(mkstate(reachable(s.firstp()), true, grammar)); + for(Sequence s : p.owner().hates()) + if (!hs.contains(s.firstp())) + conjunctStates.add(mkstate(reachable(s.firstp()), true, grammar)); } - // Step 1a: examine all Position's in this state and compute the mappings from + // Step 2a: examine all Pos's in this state and compute the mappings from // sets of follow tokens (tokens which could follow this position) to sets // of _new_ positions (positions after shifting). These mappings are // collectively known as the _closure_ - TopologicalBag bag0 = new TopologicalBag(); - for(Position position : hs) { + TopologicalBag bag0 = new TopologicalBag(); + for(Pos position : hs) { if (position.isLast() || !(position.element() instanceof Atom)) continue; Atom a = (Atom)position.element(); - HashSet hp = new HashSet(); + HashSet hp = new HashSet(); reachable(position.next(), hp); bag0.addAll(a.getTokenTopology(), hp); } - // Step 1b: for each _minimal, contiguous_ set of characters having an identical next-position + // Step 2b: for each _minimal, contiguous_ set of characters having an identical next-position // set, add that character set to the goto table (with the State corresponding to the // computed next-position set). for(Topology r : bag0) { - HashSet h = new HashSet(); - for(Position p : bag0.getAll(r)) h.add(p); - ((TopologicalBag)gotoSetTerminals).put(r, mkstate(h, doomed)); + HashSet h = new HashSet(); + for(Pos p : bag0.getAll(r)) h.add(p); + ((TopologicalBag)gotoSetTerminals).put(r, mkstate(h, doomed, grammar)); } - // Step 2: for every Sequence, compute the closure over every position in this set which + // Step 3: for every Sequence, compute the closure over every position in this set which // is followed by a symbol which could yield the Sequence. // // "yields" [in one or more step] is used instead of "produces" [in exactly one step] // to avoid having to iteratively construct our set of States as shown in most // expositions of the algorithm (ie "keep doing XYZ until things stop changing"). - HashMapBag move = new HashMapBag(); - for(Position p : hs) + HashMapBag move = new HashMapBag(); + for(Pos p : hs) if (!p.isLast() && p.element() instanceof Union) for(Sequence s : ((Union)p.element())) { - HashSet hp = new HashSet(); + HashSet hp = new HashSet(); reachable(p.next(), hp); move.addAll(s, hp); } OUTER: for(Sequence y : move) { // if a reduction is "lame", it should wind up in the dead_state after reducing - HashSet h = move.getAll(y); - State s = mkstate(h, doomed); - for(Position p : hs) + HashSet h = move.getAll(y); + State s = mkstate(h, doomed, grammar); + for(Pos p : hs) if (p.element() != null && (p.element() instanceof Union)) for(Sequence seq : ((Union)p.element())) if (seq.needs.contains(y) || seq.hates.contains(y)) { // FIXME: assumption that no sequence is ever both usefully (non-lamely) matched // and also directly lamely matched - ((HashMap)gotoSetNonTerminals).put(y, dead_state); + for(Pos pp = y.firstp(); pp != null; pp = pp.next()) + ((HashMap)gotoSetNonTerminals).put(pp, dead_state); continue OUTER; } - gotoSetNonTerminals.put(y, s); + for(Pos pp = y.firstp(); pp != null; pp = pp.next()) + gotoSetNonTerminals.put(pp, s); } } - private State mkstate(HashSet h, boolean b) { - if (b) return doomed_states.get(h) == null ? (State)new State(h,b) : (State)doomed_states.get(h); - else return normal_states.get(h) == null ? (State)new State(h,b) : (State)normal_states.get(h); - } - - public String toStringx() { - StringBuffer st = new StringBuffer(); - for(Position p : this) { - if (st.length() > 0) st.append("\n"); - st.append(p); - } - return st.toString(); + private State mkstate(HashSet h, boolean b, Grammar grammar) { + State ret = (b?doomed_states:normal_states).get(h); + if (ret==null) ret = new State(h,b, grammar); + return ret; } + public int toInt() { return idx; } public String toString() { StringBuffer ret = new StringBuffer(); - ret.append("state["+idx+"]: "); - for(Position p : this) ret.append("{"+p+"} "); + for(Pos p : hs) + ret.append(p+"\n"); return ret.toString(); } - - public int toInt() { return idx; } } - public String toString() { - StringBuffer sb = new StringBuffer(); - sb.append("parse table"); - for(State state : all_states) { - sb.append(" " + state + "\n"); - for(Topology t : state.shifts) { - sb.append(" shift \""+ - new edu.berkeley.sbp.chr.CharTopology((IntegerTopology)t)+"\" => "); - for(State st : state.shifts.getAll(t)) - sb.append(st.idx+" "); - sb.append("\n"); - } - for(Topology t : state.reductions) - sb.append(" reduce \""+ - new edu.berkeley.sbp.chr.CharTopology((IntegerTopology)t)+"\" => " + - state.reductions.getAll(t) + "\n"); - for(Sequence s : state.gotoSetNonTerminals.keySet()) - sb.append(" goto "+state.gotoSetNonTerminals.get(s)+" from " + s + "\n"); - } - return sb.toString(); - } } // Helpers ////////////////////////////////////////////////////////////////////////////// - private static void reachable(Sequence s, HashSet h) { - reachable(s.firstp(), h); - //for(Sequence ss : s.needs()) reachable(ss, h); - //for(Sequence ss : s.hates()) reachable(ss, h); + private static HashSet reachable(Element e) { + HashSet h = new HashSet(); + reachable(e, h); + return h; } - private static void reachable(Element e, HashSet h) { + private static void reachable(Element e, HashSet h) { if (e instanceof Atom) return; for(Sequence s : ((Union)e)) - reachable(s, h); + reachable(s.firstp(), h); } - private static void reachable(Position p, HashSet h) { + private static void reachable(Pos p, HashSet h) { if (h.contains(p)) return; h.add(p); if (p.element() != null) reachable(p.element(), h); } - //public static Cache mastercache = null; + private static HashSet reachable(Pos p) { + HashSet ret = new HashSet(); + reachable(p, ret); + return ret; + } + }