X-Git-Url: http://git.megacz.com/?p=sbp.git;a=blobdiff_plain;f=src%2Fedu%2Fberkeley%2Fsbp%2FParser.java;h=64ad67ac9abc46b3790a890dfd9398c4694f9121;hp=ab0688966495d0ee149c14e02306e2eee7c5ce38;hb=dc9bb3a45ed306e2e35549076842b3e74efecb48;hpb=6b53048f4413f3c618acc3581d0b4f60a236a9bc diff --git a/src/edu/berkeley/sbp/Parser.java b/src/edu/berkeley/sbp/Parser.java index ab06889..64ad67a 100644 --- a/src/edu/berkeley/sbp/Parser.java +++ b/src/edu/berkeley/sbp/Parser.java @@ -1,203 +1,279 @@ +// Copyright 2006 all rights reserved; see LICENSE file for BSD-style license + package edu.berkeley.sbp; -import edu.berkeley.sbp.*; import edu.berkeley.sbp.util.*; -import edu.berkeley.sbp.*; import edu.berkeley.sbp.Sequence.Position; -import edu.berkeley.sbp.*; import java.io.*; import java.util.*; -import java.lang.reflect.*; - -/** a parser which translates streams of Tokens of type T into a Forest */ -public class Parser { - - private final Table pt; - - private static void reachable(Element e, HashSet h) { - if (e instanceof Atom) return; - for(Sequence s : ((Union)e)) - reachable(s.firstp(), h); - } - private static void reachable(Position p, HashSet h) { - if (h.contains(p)) return; - h.add(p); - if (p.element() != null) reachable(p.element(), h); - } - - //public Parser( Topology top) { this(new Table( top)); } - //public Parser(String s, Topology top) { this(new Table(s, top)); } - - /** - * create a parser to parse the grammar with start symbol u - * @param top a "sample" Topology that can be cloned (FIXME, demanding this is lame) - */ - public Parser(Union u, Topology top) { this(new Table(u, top)); } - - Parser(Table pt) { this.pt = pt; } - - /** parse input for a exactly one unique result, throwing Ambiguous if not unique or Failed if none */ - public Tree parse1(Token.Stream input) throws IOException, Failed, Ambiguous { return parse(input).expand1(); } - - /** parse input, using the table pt to drive the parser */ - public Forest parse(Token.Stream input) throws IOException, Failed { - GSS gss = new GSS(); - GSS.Phase current = gss.new Phase(null, input.next()); - current.newNode(null, null, pt.start, true, null); - for(;;) { - GSS.Phase next = gss.new Phase(current, input.next()); - current.reduce(); - current.shift(next); - if (current.isDone()) return (Forest)current.finalResult; - current.checkFailure(); - current = next; - } - } - - // Exceptions ////////////////////////////////////////////////////////////////////////////// - - public static class Failed extends Exception { - private final Token.Location location; - private final String message; - public Failed() { this("", null); } - public Failed(String message, Token.Location loc) { this.location = loc; this.message = message; } - public Token.Location getLocation() { return location; } - public String toString() { return message + (location==null ? "" : (" at " + location + "\n" + location.getContext())); } +// FEATURE: try harder to "fuse" states together along two dimensions: +// - identical (equivalent) states, or states that subsume each other +// - unnecessary intermediate states ("short cut" GLR) + +/** a parser which translates an Input<Token> into a Forest<NodeType> */ +public abstract class Parser { + + final Table pt; + + /** create a parser to parse the grammar with start symbol u */ + public Parser(Union u) { this.pt = new Table(u); } + + /** implement this method to create the output forest corresponding to a lone shifted input token */ + public abstract Forest shiftToken(Token t, Input.Region region); + + public abstract Topology emptyTopology(); + + public String toString() { return pt.toString(); } + Cache cache() { return pt; } + + /** parse input, and return the shared packed parse forest (or throw an exception) */ + public Forest parse(Input input) throws IOException, ParseFailed { + verbose = System.getProperty("sbp.verbose", null) != null; + spinpos = 0; + try { + GSS gss = new GSS(input, this); + for(GSS.Phase current = gss.new Phase(pt.start); ;) { + + if (verbose) { + // FIXME: clean this up + String s; + s = " " + spin[spinpos++ % (spin.length)]+" parsing "; + s += input.getName(); + s += " "+input.getLocation(); + while(s.indexOf(':') != -1 && s.indexOf(':') < 8) s = " " + s; + String y = "@"+gss.viewPos+" "; + while(y.length() < 9) y = " " + y; + s += y; + s += " nodes="+gss.numOldNodes; + while(s.length() < 50) s = s + " "; + s += " shifted="+gss.numNewNodes; + while(s.length() < 60) s = s + " "; + s += " reductions="+gss.numReductions; + System.err.print("\r"+s+ANSI.clreol()+"\r"); + } + + if (current.isDone()) return (Forest)current.finalResult; + Forest forest = shiftToken((Token)current.token, current.getRegion()); + current = gss.new Phase(current, forest); + } + } finally { if (verbose) System.err.print("\r"+ANSI.clreol()); } } - public static class Ambiguous extends RuntimeException { - public final Forest ambiguity; - public Ambiguous(Forest ambiguity) { this.ambiguity = ambiguity; } - public String toString() { - StringBuffer sb = new StringBuffer(); - sb.append("unresolved ambiguity "/*"at " + ambiguity.getLocation() + ":"*/); - for(Object result : ambiguity.expand(false)) - sb.append("\n " + result); - return sb.toString(); - } + // Spinner ////////////////////////////////////////////////////////////////////////////// + + private boolean verbose = false; + private static final char[] spin = new char[] { '-', '\\', '|', '/' }; + private int spinpos = 0; + private long last = 0; + void spin() { + if (!verbose) return; + long now = System.currentTimeMillis(); + if (now-last < 70) return; + last = now; + System.err.print("\r " + spin[spinpos++ % (spin.length)]+"\r"); } - // Table ////////////////////////////////////////////////////////////////////////////// /** an SLR(1) parse table which may contain conflicts */ - static class Table { - - private final Union start0 = new Top(); - private final Sequence start0seq; - static class Top extends Union { public Top() { super("0"); } } - - public final Walk.Cache cache = new Walk.Cache(); - - public HashSet closure() { - HashSet hp = new HashSet(); - reachable(start0, hp); - return hp; - } - public Position firstPosition() { return start0seq.firstp(); } - public Position lastPosition() { Position ret = start0seq.firstp(); while(!ret.isLast()) ret = ret.next(); return ret; } - - private void walk(Element e, HashSet hs) { - if (e==null) return; - if (hs.contains(e)) return; - hs.add(e); - if (e instanceof Atom) return; - for(Sequence s : (Union)e) { - hs.add(s); - for(Position p = s.firstp(); p != null; p = p.next()) - walk(p.element(), hs); - } - } - public HashSet walk() { - HashSet ret = new HashSet(); - walk(start0, ret); - return ret; - } - - /* - public String toString() { - StringBuffer sb = new StringBuffer(); - for(Element e : walk()) - if (e instanceof Union) - ((Union)e).toString(sb); - return sb.toString(); - } - */ + class Table extends Cache { /** the start state */ - public final State start; + final State start; + + /** a dummy state from which no reductions can be performed */ + private final State dead_state; /** used to generate unique values for State.idx */ private int master_state_idx = 0; + /** all the states for this table */ + HashSet> all_states = new HashSet>(); + + /** all the doomed states in this table */ + HashMap,State> doomed_states = new HashMap,State>(); + + /** all the non-doomed states in this table */ + HashMap,State> normal_states = new HashMap,State>(); + + Topology emptyTopology() { return Parser.this.emptyTopology(); } + /** construct a parse table for the given grammar */ - public Table(Topology top) { this("s", top); } - public Table(String startSymbol, Topology top) { this(new Union(startSymbol), top); } - public Table(Union u, Topology top) { - start0seq = new Sequence.Singleton(u, null, null); - start0.add(start0seq); - - // construct the set of states - HashMap,State> all_states = new HashMap,State>(); - HashSet all_elements = walk(); - for(Element e : all_elements) - cache.ys.put(e, new Walk.YieldSet(e, cache).walk()); - this.start = new State(closure(), all_states, all_elements); + Table(Union ux) { + super(new Union("0", Sequence.create(ux), true)); + + // create the "dead state" + this.dead_state = new State(new HashSet(), true); + + // construct the start state; this will recursively create *all* the states + this.start = new State(reachable(rootUnion), false); + + buildReductions(); + sortReductions(); + } + /** fill in the reductions table */ + private void buildReductions() { // for each state, fill in the corresponding "row" of the parse table - for(State state : all_states.values()) + for(State state : all_states) for(Position p : state.hs) { - // the Grammar's designated "last position" is the only accepting state - if (p==lastPosition()) - state.accept = true; - - // FIXME: how does right-nullability interact with follow restrictions? - // all right-nullable rules get a reduction [Johnstone 2000] - if (p.isRightNullable(cache)) { - Walk.Follow wf = new Walk.Follow(top.empty(), p.owner(), all_elements, cache); - Reduction red = new Reduction(p); - state.reductions.put(wf.walk(p.owner()), red); - if (wf.includesEof()) state.eofReductions.add(red, true); - } - // if the element following this position is an atom, copy the corresponding // set of rows out of the "master" goto table and into this state's shift table if (p.element() != null && p.element() instanceof Atom) - state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element()))); + state.shifts.addAll(state.gotoSetTerminals.subset(((Atom)p.element()).getTokenTopology())); + + // RNGLR: we can potentially reduce from any "right-nullable" position -- that is, + // any position for which all Elements after it in the Sequence are capable of + // matching the empty string. + if (!isRightNullable(p)) continue; + Topology follow = follow(p.owner()); + for(Position p2 = p; p2 != null && p2.element() != null; p2 = p2.next()) { + if (!(p2.element() instanceof Union)) + throw new Error("impossible -- only Unions can be nullable"); + + // interesting RNGLR-followRestriction interaction: we must intersect + // not just the follow-set of the last non-nullable element, but the + // follow-sets of the nulled elements as well. + for(Sequence s : ((Union)p2.element())) + follow = follow.intersect(follow(s)); + Topology set = epsilonFollowSet((Union)p2.element()); + if (set != null) follow = follow.intersect(set); + } + + // indicate that when the next token is in the set "follow", nodes in this + // state should reduce according to Position "p" + state.reductions.put(follow, p); + if (followEof.contains(p.owner())) state.eofReductions.add(p); + } + + // optimize the reductions table + if (emptyTopology() instanceof IntegerTopology) + for(State state : all_states) { + // FIXME: this is pretty ugly + state.oreductions = state.reductions.optimize(((IntegerTopology)emptyTopology()).functor()); + state.oshifts = state.shifts.optimize(((IntegerTopology)emptyTopology()).functor()); } } - /** a single state in the LR table and the transitions possible from it */ - public class State implements Comparable, Iterable { + // FIXME: this method needs to be cleaned up and documented + private void sortReductions() { + // crude algorithm to assing an ordinal ordering to every position + // al will be sorted in DECREASING order (al[0] >= al[1]) + ArrayList al = new ArrayList(); + for(State s : all_states) { + for(Object po : s) { + Sequence.Position p = (Sequence.Position)po; + if (al.contains(p)) continue; + int i=0; + for(; i 0) { + Sequence.Position p = al.remove(j); + al.add(i, p); + continue OUTER; + } + break; + } + + int j = 1; + int pk = 0; + for(int i=0; i 0) + { inc = true; break; } + } + inc = true; + if (inc) { + j++; + pk = i; + } + al.get(i).ord = j; + } + } + + /** + * A single state in the LR table and the transitions + * possible from it + * + * A state corresponds to a set of Sequence.Position's. Each + * Node in the GSS has a State; the Node represents a set of + * possible parses, one for each Position in the State. + * + * Every state is either "doomed" or "normal". If a Position + * is part of a Sequence which is a conjunct (that is, it was + * passed to Sequence.{and(),andnot()}), then that Position + * will appear only in doomed States. Furthermore, any set + * of Positions reachable from a doomed State also forms a + * doomed State. Note that in this latter case, a doomed + * state might have exactly the same set of Positions as a + * non-doomed state. + * + * Nodes with non-doomed states represent nodes which + * contribute to actual valid parses. Nodes with doomed + * States exist for no other purpose than to enable/disable + * some future reduction from a non-doomed Node. Because of + * this, we "garbage-collect" Nodes with doomed states if + * there are no more non-doomed Nodes which they could + * affect (see Result, Reduction, and Node for details). + * + * Without this optimization, many seemingly-innocuous uses + * of positive and negative conjuncts can trigger O(n^2) + * space+time complexity in otherwise simple grammars. There + * is an example of this in the regression suite. + */ + class State implements IntegerMappable, Iterable { - public final int idx = master_state_idx++; + public final int idx = master_state_idx++; private final HashSet hs; + public HashSet> conjunctStates = new HashSet>(); - private transient HashMap gotoSetNonTerminals = new HashMap(); - private transient TopologicalBag gotoSetTerminals = new TopologicalBag(); + HashMap> gotoSetNonTerminals = new HashMap>(); + private transient TopologicalBag> gotoSetTerminals = new TopologicalBag>(); - private TopologicalBag reductions = new TopologicalBag(); - private FastSet eofReductions = new FastSet(); - private TopologicalBag shifts = new TopologicalBag(); - private boolean accept = false; + private TopologicalBag reductions = new TopologicalBag(); + private HashSet eofReductions = new HashSet(); + private TopologicalBag> shifts = new TopologicalBag>(); + private boolean accept = false; + + private VisitableMap> oshifts = null; + private VisitableMap oreductions = null; + public final boolean doomed; // Interface Methods ////////////////////////////////////////////////////////////////////////////// - public boolean canShift(Token t) { return shifts.contains(t); } - public Iterable getShifts(Token t) { return shifts.get(t); } - public boolean isAccepting() { return accept; } - public Iterable getReductions(Token t) { return reductions.get(t); } - public Iterable getEofReductions() { return eofReductions; } - public Iterator iterator() { return hs.iterator(); } + boolean isAccepting() { return accept; } + public Iterator iterator() { return hs.iterator(); } + boolean canShift(Token t) { return oshifts!=null && oshifts.contains(t); } + void invokeShifts(Token t, GSS.Phase phase, Result r) { oshifts.invoke(t, phase, r); } + boolean canReduce(Token t) { + return oreductions != null && (t==null ? eofReductions.size()>0 : oreductions.contains(t)); } + void invokeEpsilonReductions(Token t, Node node) { + if (t==null) for(Position r : eofReductions) node.invoke(r, null); + else oreductions.invoke(t, node, null); + } + void invokeReductions(Token t, Node node, Result b) { + if (t==null) for(Position r : eofReductions) node.invoke(r, b); + else oreductions.invoke(t, node, b); + } // Constructor ////////////////////////////////////////////////////////////////////////////// /** * create a new state consisting of all the Positions in hs * @param hs the set of Positions comprising this State - * @param all_states the set of states already constructed (to avoid recreating states) - * @param all_elements the set of all elements (Atom instances need not be included) + * @param all the set of all elements (Atom instances need not be included) * * In principle these two steps could be merged, but they * are written separately to highlight these two facts: @@ -215,16 +291,35 @@ public class Parser { * for non-Atom Elements. * */ - public State(HashSet hs, - HashMap,State> all_states, - HashSet all_elements) { + public State(HashSet hs, boolean doomed) { this.hs = hs; + this.doomed = doomed; - // register ourselves in the all_states hash so that no - // two states are ever created with an identical position set - all_states.put(hs, this); + // register ourselves so that no two states are ever + // created with an identical position set (termination depends on this) + ((HashMap)(doomed ? doomed_states : normal_states)).put(hs, this); + ((HashSet)all_states).add(this); + + for(Position p : hs) { + // Step 1a: take note if we are an accepting state + // (last position of the root Union's sequence) + if (p.next()==null && !doomed && rootUnion.contains(p.owner())) + accept = true; + + // Step 1b: If any Position in the set is the first position of its sequence, then this + // state is responsible for spawning the "doomed" states for each of the + // Sequence's conjuncts. This obligation is recorded by adding the to-be-spawned + // states to conjunctStates. + if (!p.isFirst()) continue; + for(Sequence s : p.owner().needs()) + if (!hs.contains(s.firstp())) + conjunctStates.add(mkstate(reachable(s.firstp()), true)); + for(Sequence s : p.owner().hates()) + if (!hs.contains(s.firstp())) + conjunctStates.add(mkstate(reachable(s.firstp()), true)); + } - // Step 1a: examine all Position's in this state and compute the mappings from + // Step 2a: examine all Position's in this state and compute the mappings from // sets of follow tokens (tokens which could follow this position) to sets // of _new_ positions (positions after shifting). These mappings are // collectively known as the _closure_ @@ -235,114 +330,83 @@ public class Parser { Atom a = (Atom)position.element(); HashSet hp = new HashSet(); reachable(position.next(), hp); - bag0.addAll(a, /*clo.walk()*/hp); + bag0.addAll(a.getTokenTopology(), hp); } - // Step 1b: for each _minimal, contiguous_ set of characters having an identical next-position + // Step 2b: for each _minimal, contiguous_ set of characters having an identical next-position // set, add that character set to the goto table (with the State corresponding to the // computed next-position set). for(Topology r : bag0) { HashSet h = new HashSet(); for(Position p : bag0.getAll(r)) h.add(p); - gotoSetTerminals.put(r, all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h)); + ((TopologicalBag)gotoSetTerminals).put(r, mkstate(h, doomed)); } - // Step 2: for every non-Atom element (ie every Element which has a corresponding reduction), - // compute the closure over every position in this set which is followed by a symbol - // which could yield the Element in question. + // Step 3: for every Sequence, compute the closure over every position in this set which + // is followed by a symbol which could yield the Sequence. // // "yields" [in one or more step] is used instead of "produces" [in exactly one step] // to avoid having to iteratively construct our set of States as shown in most // expositions of the algorithm (ie "keep doing XYZ until things stop changing"). - /* - for(Element e : all_elements) { - if (e instanceof Atom) continue; - HashSet h = new Walk.Closure(null, g.cache).closure(e, hs); - State s = all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h); - if (gotoSetNonTerminals.get(e) != null) - throw new Error("this should not happen"); - gotoSetNonTerminals.put(e, s); - } - */ - HashMapBag move = new HashMapBag(); - for(Position p : hs) { - Element e = p.element(); - if (e==null) continue; - HashSet ys = cache.ys.get(e); - if (ys != null) { - for(Element y : ys) { + + HashMapBag move = new HashMapBag(); + for(Position p : hs) + if (!p.isLast() && p.element() instanceof Union) + for(Sequence s : ((Union)p.element())) { HashSet hp = new HashSet(); reachable(p.next(), hp); - move.addAll(y, hp); + move.addAll(s, hp); } - } - } - for(Element y : move) { + OUTER: for(Sequence y : move) { + // if a reduction is "lame", it should wind up in the dead_state after reducing HashSet h = move.getAll(y); - State s = all_states.get(h) == null ? new State(h, all_states, all_elements) : all_states.get(h); + State s = mkstate(h, doomed); + for(Position p : hs) + if (p.element() != null && (p.element() instanceof Union)) + for(Sequence seq : ((Union)p.element())) + if (seq.needs.contains(y) || seq.hates.contains(y)) { + // FIXME: assumption that no sequence is ever both usefully (non-lamely) matched + // and also directly lamely matched + ((HashMap)gotoSetNonTerminals).put(y, dead_state); + continue OUTER; + } gotoSetNonTerminals.put(y, s); } } - public String toString() { return "state["+idx+"]"; } + private State mkstate(HashSet h, boolean b) { + State ret = (b?doomed_states:normal_states).get(h); + if (ret==null) ret = new State(h,b); + return ret; + } - public int compareTo(Table.State s) { return idx==s.idx ? 0 : idx < s.idx ? -1 : 1; } + public int toInt() { return idx; } } - /** - * the information needed to perform a reduction; copied here to - * avoid keeping references to Element objects in a Table - */ - public class Reduction { - // FIXME: cleanup; almost everything in here could go in either Sequence.Position.getRewrite() or else in GSS.Reduct - public final int numPop; - private final Position position; - private final Forest[] holder; // to avoid constant reallocation - public int hashCode() { return position.hashCode(); } - public boolean equals(Object o) { - if (o==null) return false; - if (o==this) return true; - if (!(o instanceof Reduction)) return false; - Reduction r = (Reduction)o; - return r.position == position; - } - public Reduction(Position p) { - this.position = p; - this.numPop = p.pos; - this.holder = new Forest[numPop]; - } - public String toString() { return "[reduce " + position + "]"; } - public Forest reduce(Forest f, GSS.Phase.Node parent, GSS.Phase.Node onlychild, GSS.Phase target, Forest rex) { - holder[numPop-1] = f; - return reduce(parent, numPop-2, rex, onlychild, target); - } - public Forest reduce(GSS.Phase.Node parent, GSS.Phase.Node onlychild, GSS.Phase target, Forest rex) { - return reduce(parent, numPop-1, rex, onlychild, target); - } + } - // FIXME: this could be more elegant and/or cleaner and/or somewhere else - private Forest reduce(GSS.Phase.Node parent, int pos, Forest rex, GSS.Phase.Node onlychild, GSS.Phase target) { - if (pos>=0) holder[pos] = parent.pending(); - if (pos<=0 && rex==null) { - System.arraycopy(holder, 0, position.holder, 0, holder.length); - rex = position.rewrite(target.getLocation()); - } - if (pos >=0) { - if (onlychild != null) - reduce(onlychild, pos-1, rex, null, target); - else - for(GSS.Phase.Node child : parent.parents()) - reduce(child, pos-1, rex, null, target); - } else { - State state = parent.state.gotoSetNonTerminals.get(position.owner()); - if (state!=null) - target.newNode(parent, rex, state, numPop<=0, parent.phase); - } - return rex; - } - } + // Helpers ////////////////////////////////////////////////////////////////////////////// + + private static HashSet reachable(Element e) { + HashSet h = new HashSet(); + reachable(e, h); + return h; + } + private static void reachable(Element e, HashSet h) { + if (e instanceof Atom) return; + for(Sequence s : ((Union)e)) + reachable(s.firstp(), h); + } + private static void reachable(Position p, HashSet h) { + if (h.contains(p)) return; + h.add(p); + if (p.element() != null) reachable(p.element(), h); + } + private static HashSet reachable(Position p) { + HashSet ret = new HashSet(); + reachable(p, ret); + return ret; } - private static final Forest[] emptyForestArray = new Forest[0]; }