X-Git-Url: http://git.megacz.com/?p=sbp.git;a=blobdiff_plain;f=src%2Fedu%2Fberkeley%2Fsbp%2FGSS.java;h=2154731274229e22643d1775550864f295eab26a;hp=d2b040d42fa475eae6bba5d4a3d1e32ca5c9790f;hb=474037fe8463b96dfaf0209be157cbf5223a0910;hpb=ddf7ffb485e6b3dd0fe6b1b39b47a21a1e4b8973 diff --git a/src/edu/berkeley/sbp/GSS.java b/src/edu/berkeley/sbp/GSS.java index d2b040d..2154731 100644 --- a/src/edu/berkeley/sbp/GSS.java +++ b/src/edu/berkeley/sbp/GSS.java @@ -1,9 +1,8 @@ package edu.berkeley.sbp; import edu.berkeley.sbp.*; import edu.berkeley.sbp.util.*; +import edu.berkeley.sbp.Parser.Table.*; import edu.berkeley.sbp.Sequence.Position; -import edu.berkeley.sbp.Parser.Table.State; -import edu.berkeley.sbp.Parser.Table.Reduction; import java.io.*; import java.util.*; import java.lang.reflect.*; @@ -11,39 +10,46 @@ import java.lang.reflect.*; /** implements Tomita's Graph Structured Stack */ class GSS { + public static int count = 0; public GSS() { } private Phase.Node[] reducing_list = null; public int resets = 0; public int waits = 0; - HashMapBag inhibited = new HashMapBag(); - HashMapBag waiting = new HashMapBag(); - HashMapBag performed = new HashMapBag(); + HashMapBag inhibited = new HashMapBag(); + HashMapBag expectedInhibit = new HashMapBag(); + HashMapBag waiting = new HashMapBag(); + HashMapBag performed = new HashMapBag(); /** FIXME */ public Forest.Ref finalResult; /** corresponds to a positions between tokens the input stream; same as Tomita's U_i's */ - public class Phase implements Invokable, IntegerMappable { + class Phase implements Invokable.Node>, IntegerMappable { + + public void invoke(State st, Forest result, Node n) { + good |= next.newNode(n, result, st, false); + } /** the token immediately after this phase */ - final Token token; + final Tok token; private final int pos; boolean reducing; private IntPairMap hash; /* ALLOC */ + private IntPairMap singularReductions; /* ALLOC */ private boolean closed; private boolean good; private Phase next = null; private Phase prev; - private Token.Location location; + private Input.Location location; public final Parser parser; private Forest forest; - public Phase(Phase prev, Parser parser, Phase previous, Token token, Token.Location location, Forest forest) { + public Phase(Phase prev, Parser parser, Phase previous, Tok token, Input.Location location, Forest forest) throws ParseFailed { this.prev = prev; this.forest = forest; this.parser = parser; @@ -54,10 +60,14 @@ class GSS { reset(); } - public void reset() { + public void reset() throws ParseFailed { waiting.clear(); performed.clear(); hash = new IntPairMap(); + singularReductions = new IntPairMap(); + expectedInhibit.clear(); + expectedInhibit.addAll(inhibited); + reset = false; good = false; closed = false; reducing = false; @@ -73,7 +83,7 @@ class GSS { return true; } - public Token.Location getLocation() { return location; } + public Input.Location getLocation() { return location; } /** add a new node (merging with existing nodes if possible) * @param parent the parent of the new node @@ -87,9 +97,9 @@ class GSS { if (p != null) return newNode2(p, parent, pending, state, fromEmptyReduction); else return newNode3(parent, pending, state, fromEmptyReduction); } - public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) { + public void newNode(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) { int pos = parent==null?0:parent.phase()==null?0:parent.phase().pos; - Sequence owner = reduction==null ? null : reduction.position.owner(); + Sequence owner = reduction==null ? null : reduction.owner(); if (reduction!=null) { if (inhibited.contains(pos, owner)) return; if (owner.needs != null) @@ -105,7 +115,7 @@ class GSS { } if (!owner.lame) newNode(parent, pending, state, fromEmptyReduction); - if (reduction!=null) inhibit(reduction, parent==null?0:parent.phase().pos); + if (reduction!=null) uninhibit(reduction, parent==null?0:parent.phase().pos); if (reduction != null) { boolean redo = true; while(redo) { @@ -125,7 +135,7 @@ class GSS { p.holder.merge(pending); if (p.parents().contains(parent)) return true; p.parents().add(parent, true); - if (p!=parent && !fromEmptyReduction) p.queueReductions(parent); + if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent); return true; } private boolean newNode3(Node parent, Forest pending, State state, boolean fromEmptyReduction) { @@ -140,26 +150,28 @@ class GSS { //return; } while(false); - Node n = new Node(parent, pending, state, fromEmptyReduction); // ALLOC - n.queueEmptyReductions(); - if (!fromEmptyReduction) n.queueReductions(parent); + Node n = new Node(parent, pending, state); // ALLOC + if (reducing) { + n.performEmptyReductions(); + if (!fromEmptyReduction) n.performReductions(parent); + } return true; } - public void uninhibit(int p, Sequence s) { + public void inhibit(int p, Sequence s) { if (s.hated!=null) for(Sequence s2 : s.hated) inhibited.remove(p, s2); } - public void inhibit(Reduction r, int p) { - if (r.position.owner().hated == null) return; + public void uninhibit(Position r, int p) { + if (r.owner().hated == null) return; // remember that dead states are still allowed to shift -- just not allowed to reduce boolean reset = false; - for(Sequence seq : r.position.owner().hated) { + for(Sequence seq : r.owner().hated) { if (performed.contains(p,seq)) { - uninhibit(p, seq); - //System.out.println("\nresetting due to " + r.position.owner() + " killing " + seq); + inhibit(p, seq); + //System.out.println("\nresetting due to " + r.owner() + " killing " + seq); //inhibited.clear(); inhibited.add(p, seq); //inhibited = new HashMapBag(); @@ -168,11 +180,12 @@ class GSS { throw new Reset(); } inhibited.add(p, seq); + expectedInhibit.remove(p, seq); } } /** perform all reduction operations */ - public void reduce() { + public void reduce() throws ParseFailed{ try { reducing = true; if (reducing_list==null || reducing_list.length < hash.size()) @@ -181,30 +194,36 @@ class GSS { int num = hash.size(); for(int i=0; i 0) { + inhibited.removeAll(expectedInhibit); + System.out.println("\n!!!!\n"); + throw new Reset(); } } catch (Reset r) { reset(); reduce(); } + count = 0; } + private boolean reset = false; class Reset extends RuntimeException { } - public void invoke(State st, Forest result, Node n) { - good |= next.newNode(n, result, st, false); - } - /** perform all shift operations, adding promoted nodes to next */ public void shift(Phase next, Forest result) throws ParseFailed { // this massively improves GC performance - if (prev!=null) prev.hash = null; + if (prev!=null) { + prev.hash = null; + prev.singularReductions = null; + } this.next = next; closed = true; Forest res = null; @@ -219,7 +238,10 @@ class GSS { } if (!good && token!=null) - throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected character")+" "+ANSI.purple(token)+" encountered at "+ANSI.green(getLocation())+"\n", token, hash.values()), + throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected character ")+" \'"+ + ANSI.purple(StringUtil.escapify(token+"", "\\\'\r\n"))+ + "\' encountered at "+ + ANSI.green(getLocation())+"\n", token, hash.values()), getLocation()); if (token==null && finalResult==null) throw new ParseFailed(ParseFailed.error(ANSI.red("unexpected end of file\n"), token, hash.values()), @@ -227,13 +249,13 @@ class GSS { } - public class Waiting { + class Waiting { Node parent; Forest pending; State state; boolean fromEmptyReduction; - Reduction reduction; - public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Reduction reduction) { + Position reduction; + public Waiting(Node parent, Forest pending, State state, boolean fromEmptyReduction, Position reduction) { waits++; this.parent = parent; this.pending = pending; @@ -247,57 +269,88 @@ class GSS { } } - // GSS Nodes ////////////////////////////////////////////////////////////////////////////// + // Node ///////////////////////////////////////////////////////////////////////////////// /** a node in the GSS */ - public final class Node extends FastSet implements Invokable { + final class Node extends FastSet implements Invokable, IntegerMappable { - public boolean touched = false; private Forest.Ref holder = null; private boolean allqueued = false; /** what state this node is in */ - public final State state; + public final Parser.Table.State state; /** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */ - public Phase phase() { return Phase.this; } - + public Phase phase() { return Phase.this; } public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; } public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; } public FastSet parents() { return this; } - public void queueReductions() { - if (!reducing) return; + public void performReductions() { if (allqueued) return; allqueued = true; - int where = parents().size(); state.invokeReductions(token, this, this, null); } - public void queueReductions(Node n2) { - if (!allqueued) { queueReductions(); return; } - state.invokeReductions(token, this, this, n2); + public void performReductions(Node n2) { + if (!allqueued) performReductions(); + else state.invokeReductions(token, this, this, n2); } - public final void invoke(Reduction r, Node n, Node n2) { - if (n==null) { - if (r.position.pos==0) r.reduce(this); - return; + public void performEmptyReductions() { state.invokeReductions(token, this, null, null); } + public final void invoke(Position r, Node n, Node n2) { + if (n==null || n2==null || r.pos==0) { + if (r.pos==0) { + if (n==null) n = this; + else return; + } + if (n==null) return; + Forest[] holder = new Forest[r.pos]; + if (r.pos==0) n.finish(r, r.zero(), n.phase(), holder); + else n.reduce(r, r.pos-1, n.phase(), holder); + } else { + Forest[] holder = new Forest[r.pos]; + if (r.pos<=0) throw new Error("called wrong form of reduce()"); + int pos = r.pos-1; + n.reduce(r, pos, n.phase(), holder, n2); } - if (r.position.pos==0) return; - if (n2==null) r.reduce(n); - else r.reduce(n, n2); } - public void queueEmptyReductions() { - if (!reducing) return; - state.invokeReductions(token, this, null, null); + + public void reduce(Position r, int pos, Phase target, Forest[] holder) { reduce(r, pos, target, holder, null); } + public void reduce(Position r, int pos, Phase target, Forest[] holder, Node only) { + Forest old = holder[pos]; + holder[pos] = this.pending(); + if (pos==0) { + System.arraycopy(holder, 0, r.holder, 0, holder.length); + for(int i=0; i target, Forest[] holder) { + Parser.Table.State state0 = state.gotoSetNonTerminals.get(r.owner()); + if (result==null) throw new Error(); + if (state0!=null) + target.newNode(this, result, state0, r.pos<=0, r); } - private boolean fe; - public boolean dead = false; - public boolean redo = false; - private Node(Node parent, Forest pending, State state, boolean fe) { - this.fe = fe; + private Node(Node parent, Forest pending, State state) { this.state = state; this.holder().merge(pending); Phase start = parent==null ? null : parent.phase(); @@ -305,7 +358,10 @@ class GSS { if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!"); Phase.this.hash.put(state, start, this); } + public int toInt() { return idx; } + private final int idx = node_idx++; } + private int node_idx = 0; public int toInt() { return pos+1; } public int size() { return hash==null ? 0 : hash.size(); }