tests/testcase.g \
tests/java.tc
+loop: edu.berkeley.sbp.jar
+ $(java) -cp $< edu.berkeley.sbp.misc.RegressionTests \
+ -graph \
+ tests/meta.g \
+ tests/testcase.g \
+ tests/loop.tc
+
boot: edu.berkeley.sbp.jar
cd src; \
$(java) -cp ../$< \
darcs dist
echo '<html><head><meta HTTP-EQUIV="Refresh" CONTENT="0;URL=doc/sbp.html"></head></html>' > index.html
rsync -are ssh --progress --verbose --delete ./ argus.cs.berkeley.edu:public_html/sbp/
+
create( $c:{...} ) =
- - clean up the visualization (?)
-
- - I still don't like Atom.Infer and Atom.Invert...
-
- - better ambiguity debugging tools
+ - better ambiguity debugging tools / visualization
- ParseFailed, GSS, Walk, Parser, Sequence, Forest
class GSS {
public static int count = 0;
+
public GSS() { }
private Phase.Node[] reducing_list = null;
public Forest.Ref finalResult;
/** corresponds to a positions <i>between tokens</i> the input stream; same as Tomita's U_i's */
- class Phase<Tok> implements Invokable<State, Forest, Phase<Tok>.Node>, IntegerMappable {
+ class Phase<Tok> implements Invokable<State, Forest, Phase<Tok>.Node>, IntegerMappable, GraphViz.ToGraphViz, Iterable<Phase.Node> {
+ public Iterator<Phase.Node> iterator() { return hash.iterator(); }
public void invoke(State st, Forest result, Node n) {
good |= next.newNode(n, result, st, false);
}
}
}
private boolean newNode2(Node p, Node parent, Forest pending, State state, boolean fromEmptyReduction) {
- p.holder.merge(pending);
- if (p.parents().contains(parent)) return true;
+ //if (p.parents().contains(parent)) return true;
+ p.merge(parent, pending);
p.parents().add(parent, true);
if (p!=parent && !fromEmptyReduction && reducing) p.performReductions(parent);
return true;
/** perform all shift operations, adding promoted nodes to <tt>next</tt> */
public void shift(Phase next, Forest result) throws ParseFailed {
// this massively improves GC performance
- if (prev!=null) {
+ if (prev!=null && parser.helpgc) {
prev.hash = null;
prev.singularReductions = null;
}
for(Phase.Node n : hash.values()) {
if (token == null && n.state.isAccepting()) {
if (finalResult==null) finalResult = new Forest.Ref();
- finalResult.merge(n.holder);
+ for(Object f : n.results())
+ finalResult.merge((Forest)f);
}
if (token == null) continue;
n.state.invokeShifts(token, this, result, n);
// Node /////////////////////////////////////////////////////////////////////////////////
/** a node in the GSS */
- final class Node extends FastSet<Node> implements Invokable<Position, Node, Node>, IntegerMappable {
+ final class Node implements Invokable<Position, Node, Node>, IntegerMappable, GraphViz.ToGraphViz {
+ public FastSet<Node> set = new FastSet<Node>();
+
+ public GraphViz.Node toGraphViz(GraphViz gv) {
+ if (gv.hasNode(this)) return gv.createNode(this);
+ GraphViz.Node n = gv.createNode(this);
+ n.label = ""+state.toStringx();
+ n.shape = "rectangle";
+ n.fill = "green";
+ //GraphViz.Node f = pending().toGraphViz(gv);
+ //n.add(f);
+ for(Forest result : results()) n.edge(result, "");
+ for(Node parent : parents()) n.edge(parent, "");
+ ((GraphViz.Group)phase().toGraphViz(gv)).add(n);
+ return n;
+ }
+ public boolean isTransparent() { return false; }
+ public boolean isHidden() { return false; }
+
- private Forest.Ref holder = null;
+ //private Forest.Ref holder = new Forest.Ref();
private boolean allqueued = false;
/** what state this node is in */
/** which Phase this Node belongs to (node that Node is also a non-static inner class of Phase) */
public Phase phase() { return Phase.this; }
- public Forest.Ref holder() { return holder==null ? (holder = new Forest.Ref()) : holder; }
- public Forest pending() { return Phase.this.closed ? holder().resolve() : holder; }
- public FastSet<Node> parents() { return this; }
+ //private Forest pending() { return !Phase.this.closed ? holder : holder.resolve(); }
+ private HashMap<Node,Forest> resultMap = new HashMap<Node,Forest>();
+ public void merge(Node parent, Forest result) {
+ //holder.merge(result);
+ Forest.Ref f = (Forest.Ref)resultMap.get(parent);
+ if (f==null) { f = new Forest.Ref(); resultMap.put(parent, f); }
+ f.merge(result);
+ set.add(parent, true);
+ }
+ //public Iterable<Node> childrenFor(Forest result) { return resultMap.getAll(result); }
+ public Iterable<Forest> results() { return resultMap.values(); }
+ private Forest pending(Node n) {
+ //return !Phase.this.closed ? holder : holder.resolve();
+ /*
+ for(Forest f : resultMap)
+ if (resultMap.contains(f, n))
+ return f;
+ return null;
+ */
+ return resultMap.get(n);
+ }
+ public FastSet<Node> parents() { return set; }
public void performReductions() {
if (allqueued) return;
public void performEmptyReductions() { state.invokeReductions(token, this, null, null); }
public final void invoke(Position r, Node n, Node n2) {
+ //if (r.owner().lame) return;
if (n==null || n2==null || r.pos==0) {
if (r.pos==0) {
if (n==null) n = this;
if (n==null) return;
Forest[] holder = new Forest[r.pos];
if (r.pos==0) n.finish(r, r.zero(), n.phase(), holder);
- else n.reduce(r, r.pos-1, n.phase(), holder);
+ else n.reduce(r, r.pos-1, n.phase(), holder, null, n.pending(n));
} else {
Forest[] holder = new Forest[r.pos];
if (r.pos<=0) throw new Error("called wrong form of reduce()");
int pos = r.pos-1;
- n.reduce(r, pos, n.phase(), holder, n2);
+ n.reduce(r, pos, n.phase(), holder, n2, n.pending(n));
}
}
- public void reduce(Position r, int pos, Phase target, Forest[] holder) { reduce(r, pos, target, holder, null); }
+ /*
+ public void reduce(Position r, int pos, Phase target, Forest[] holder) {
+ reduce(r, pos, target, holder, null); }
public void reduce(Position r, int pos, Phase target, Forest[] holder, Node only) {
+ reduce(r, pos, target, holder, only, this.pending());
+ }
+ */
+ public void reduce(Position r, int pos, Phase target, Forest[] holder, Node only, Forest pending) {
Forest old = holder[pos];
- holder[pos] = this.pending();
+ holder[pos] = pending;
if (pos==0) {
- System.arraycopy(holder, 0, r.holder, 0, holder.length);
- for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
- Forest rex = null;
// FIXME: I'm unsure about this -- basically we want to deal with the case where
// there are two nodes, each of whose Ref points to the same Forest instance.
// Some node in the next phase has both of these as parents. This might happen
// since the same reduction can appear in more than one state.
- if (r.pos==1) rex = singularReductions.get(this.pending(), r);
- if (rex==null) {
- rex = r.rewrite(phase().getLocation());
- if (r.pos==1) singularReductions.put(this.pending(), r, rex);
+
+ if (only != null) {
+ holder[pos] = pending(only);
+ System.arraycopy(holder, 0, r.holder, 0, holder.length);
+ for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
+ Forest rex = null;
+ if (r.pos==1) rex = singularReductions.get(pending, r);
+ if (rex==null) {
+ rex = r.rewrite(phase().getLocation());
+ if (r.pos==1) singularReductions.put(pending, r, rex);
+ }
+ only.finish(r, rex, target, holder);
+ } else {
+ for(Forest result : results()) {
+ pending = holder[pos] = result;
+ System.arraycopy(holder, 0, r.holder, 0, holder.length);
+ for(int i=0; i<r.pos; i++) if (r.holder[i]==null) throw new Error("realbad");
+ Forest rex = null;
+ if (rex==null && r.pos==1) rex = singularReductions.get(pending, r);
+ if (rex==null) {
+ rex = r.rewrite(phase().getLocation());
+ if (r.pos==1) singularReductions.put(pending, r, rex);
+ }
+ for(Node child : parents()) {
+ if (pending(child)==result)
+ child.finish(r, rex, target, holder);
+ }
+ }
}
- if (only != null) only.finish(r, rex, target, holder);
- else for(Node child : this.parents()) child.finish(r, rex, target, holder);
} else {
- if (only != null) only.reduce(r, pos-1, target, holder);
- else for(Node child : this.parents()) child.reduce(r, pos-1, target, holder);
+ if (only != null) {
+ holder[pos] = pending(only);
+ only.reduce(r, pos-1, target, holder, null, only.pending(only));
+ } else {
+ for(Node child : this.parents()) {
+ holder[pos] = pending(child);
+ child.reduce(r, pos-1, target, holder, null, child.pending(child));
+ }
+ }
}
holder[pos] = old;
}
private Node(Node parent, Forest pending, State state) {
this.state = state;
- this.holder().merge(pending);
+ this.merge(parent, pending);
Phase start = parent==null ? null : parent.phase();
if (parent != null) parents().add(parent, true);
if (Phase.this.hash.get(state, start) != null) throw new Error("severe problem!");
public int toInt() { return pos+1; }
public int size() { return hash==null ? 0 : hash.size(); }
- }
+ // GraphViz //////////////////////////////////////////////////////////////////////////////
+
+ public GraphViz.Node toGraphViz(GraphViz gv) {
+ if (gv.hasNode(this)) return gv.createNode(this);
+ GraphViz.Group g = gv.createGroup(this);
+ g.label = "Phase " + pos;
+ g.color = "gray";
+ g.cluster = true;
+ return g;
+ }
+ public boolean isTransparent() { return false; }
+ public boolean isHidden() { return false; }
+
+ }
}
/** implement this method to create the output forest corresponding to a lone shifted input token */
public abstract Forest<Result> shiftToken(Tok t, Input.Location loc);
+ public boolean helpgc = true;
+
+ public String toString() { return pt.toString(); }
+
/** parse <tt>input</tt>, using the table <tt>pt</tt> to drive the parser */
public Forest<Result> parse(Input<Tok> input) throws IOException, ParseFailed {
GSS gss = new GSS();
GSS.Phase current = gss.new Phase<Tok>(null, this, null, input.next(), loc, null);
current.newNode(null, Forest.leaf(null, null, null), pt.start, true);
int count = 1;
- for(;;) {
+ for(int idx=0;;idx++) {
loc = input.getLocation();
current.reduce();
Forest forest = current.token==null ? null : shiftToken((Tok)current.token, loc);
GSS.Phase next = gss.new Phase<Tok>(current, this, current, input.next(), loc, forest);
+ if (!helpgc) {
+ FileOutputStream fos = new FileOutputStream("out-"+idx+".dot");
+ PrintWriter p = new PrintWriter(new OutputStreamWriter(fos));
+ GraphViz gv = new GraphViz();
+ for(Object n : next)
+ ((GSS.Phase.Node)n).toGraphViz(gv);
+ gv.dump(p);
+ p.flush();
+ p.close();
+ }
count = next.size();
if (current.isDone()) return (Forest<Result>)gss.finalResult;
current = next;
/** an SLR(1) parse table which may contain conflicts */
static class Table<Tok> extends Walk.Cache {
+ public String toString() {
+ StringBuffer sb = new StringBuffer();
+ sb.append("parse table");
+ for(State<Tok> state : all_states.values()) {
+ sb.append(" " + state + "\n");
+ for(Topology<Tok> t : state.shifts) {
+ sb.append(" shift \""+
+ new edu.berkeley.sbp.chr.CharTopology((IntegerTopology<Character>)t)+"\" => ");
+ for(State st : state.shifts.getAll(t))
+ sb.append(st.idx+" ");
+ sb.append("\n");
+ }
+ for(Topology<Tok> t : state.reductions)
+ sb.append(" reduce \""+
+ new edu.berkeley.sbp.chr.CharTopology((IntegerTopology<Character>)t)+"\" => " +
+ state.reductions.getAll(t) + "\n");
+ }
+ return sb.toString();
+ }
+
public final Walk.Cache cache = this;
private void walk(Element e, HashSet<Element> hs) {
}
}
+ public String toStringx() {
+ StringBuffer st = new StringBuffer();
+ for(Position p : this) {
+ if (st.length() > 0) st.append("\n");
+ st.append(p);
+ }
+ return st.toString();
+ }
public String toString() {
StringBuffer ret = new StringBuffer();
ret.append("state["+idx+"]: ");
for(Sequence s : needs) { ret.needs.add(s); s.needed.add(ret); }
for(Sequence s : hates) { ret.hates.add(s); s.hated.add(ret); }
ret.follow = follow;
+ ret.lame = lame;
return ret;
}
* after matching the sequence, create the specified output tree
* @param tag the tag for the output tree
* @param e the elements to match
- * @param drops only elements of <tt>e</tt> whose corresponding <tt>boolean</tt> in <tt>drops</tt> is <i>false</i> will be included in the output tree
+ * @param drops only elements of <tt>e</tt> whose corresponding <tt>boolean</tt> in <tt>drops</tt>
+ * is <i>false</i> will be included in the output tree
**/
public static Sequence rewritingSequence(Object tag, Element[] e, Object[] labs, boolean[] drops) {
return new RewritingSequence(tag, e, labs, drops); }
public final Topology follow() { return follow==null ? null : Atom.toAtom(follow); }
Topology toAtom() {
- if (elements.length!=1) throw new RuntimeException("cannot invoke toAtom() on a Sequence with " + elements.length + " elements: " + this);
+ if (elements.length!=1)
+ throw new RuntimeException("cannot invoke toAtom() on a Sequence with " + elements.length + " elements: " + this);
return Atom.toAtom(elements[0]);
}
protected final Element[] elements;
final HashSet<Sequence> needed = new HashSet<Sequence>();
- final HashSet<Sequence> hated = new HashSet<Sequence>();
- final HashSet<Sequence> needs = new HashSet<Sequence>();
- final HashSet<Sequence> hates = new HashSet<Sequence>();
+ final HashSet<Sequence> hated = new HashSet<Sequence>();
+ final HashSet<Sequence> needs = new HashSet<Sequence>();
+ final HashSet<Sequence> hates = new HashSet<Sequence>();
public boolean lame = false;
final Position firstp;
public CharRange(char a) { this(a,a); }
public CharRange(char a, char b) { this(new CharTopology(a, b)); }
- public CharRange(Topology<Character> t) { this.t = t; }
+ public CharRange(CharTopology t) { this.t = t; }
- private Topology<Character> t;
+ private CharTopology t;
public Topology<Character> top() { return t; }
public static final char left = (char)9998;
public CharTopology() { super(null); }
public CharTopology(Range.Set r) { super(null, r); }
+ public CharTopology(Topology<Character> it) { this(((IntegerTopology<Character>)it.unwrap()).getRanges()); }
public CharTopology(char a, char b) { super(null, a, b); }
public Integer invoke(Character c) { return (int)c.charValue(); }
private static Element set(Range.Set r) { return CharRange.set(r); }
private static Element string(String s) { return CharRange.string(s); }
- private static Atom infer(Element e) { return infer(Atom.toAtom(e)); }
- private static Atom infer(Topology t) { return new CharRange((Topology<Character>)t); }
+ private static Atom infer(Element e) { return infer((Topology<Character>)Atom.toAtom(e)); }
+ private static Atom infer(Topology<Character> t) { return new CharRange(new CharTopology(t)); }
private MetaGrammar() { }
public MetaClause element;
public MetaInvert(Tree<String> t, MetaConjunct c) { this.element = make(t, c); }
public String toString() { return "~"+element; }
- public Element build(BuildContext bc) { return infer(Atom.toAtom(element.build(bc)).complement()); }
+ public Element build(BuildContext bc) { return infer((Topology<Character>)Atom.toAtom(element.build(bc)).complement()); }
}
}
+
+
+
+
+
+
+
+
+
+
+
+
// DO NOT EDIT STUFF BELOW: IT IS AUTOMATICALLY GENERATED
new edu.berkeley.sbp.Tree(null, "grammar", new edu.berkeley.sbp.Tree[] { new edu.berkeley.sbp.Tree(null, null, new edu.berkeley.sbp.Tree[] { new edu.berkeley.sbp.Tree(null, "=", new edu.berkeley.sbp.Tree[] { new edu.berkeley.sbp.Tree(null, null, new edu.berkeley.sbp.Tree[] { new edu.berkeley.sbp.Tree(null, "G", new edu.berkeley.sbp.Tree[] { }),
new edu.berkeley.sbp.Tree(null, "r", new edu.berkeley.sbp.Tree[] { }),
+
+
+
+
+
+
+
+
+
+
+
+
public class RegressionTests {
public static boolean yes = false;
+ public static boolean graph = false;
public static void main(String[] s) throws Exception {
try {
boolean profile = false;
+ if (s[0].equals("-graph")) {
+ graph = true;
+ String[] s2 = new String[s.length-1];
+ System.arraycopy(s, 1, s2, 0, s2.length);
+ s = s2;
+ }
if (s[0].equals("-profile")) {
profile = true;
String[] s2 = new String[s.length-1];
System.err.println("parsing " + s[0]);
Tree<String> res = new CharParser(MetaGrammar.make()).parse(new FileInputStream(s[0])).expand1();
//System.out.println(mg);
- Union meta = MetaGrammar.make();
+ Union meta = MetaGrammar.make(res, "s");
System.err.println("parsing " + s[1]);
SequenceInputStream sis = new SequenceInputStream(new FileInputStream(s[0]), new FileInputStream(s[1]));
res = new CharParser(meta).parse(sis).expand1();
System.exit(0);
}
System.err.println("expanding...");
- GraphViz gv = new GraphViz();
- r2.toGraphViz(gv);
- FileOutputStream fox = new FileOutputStream("out.dot");
- gv.dump(fox);
- fox.close();
TestCase[] expanded = (TestCase[])new TestCaseBuilder().walk(r2.expand1());
System.err.println("executing...");
return ret;
}
public boolean execute() throws Exception {
- if (jav) {
- Forest<String> tree = new CharParser(grammar).parse(new StringReader(input));
- FileOutputStream fos = new FileOutputStream("/Users/megacz/Desktop/out.dot");
- PrintWriter p = new PrintWriter(new OutputStreamWriter(fos));
- GraphViz gv = new GraphViz();
- tree.toGraphViz(gv);
- gv.dump(p);
- p.flush();
- p.close();
- return true;
- }
Forest<String> res = null;
ParseFailed pfe = null;
+ CharParser parser = new CharParser(grammar);
+ parser.helpgc = false;
try {
res = tib
? /*new CharParser(grammar).parse(new Tib(input))*/ null
- : new CharParser(grammar).parse(new StringReader(input));
+ : parser.parse(new StringReader(input));
} catch (ParseFailed pf) {
pfe = pf;
}
//ystem.out.println("res=="+res);
+ if (graph) {
+ FileOutputStream fos = new FileOutputStream("out.dot");
+ PrintWriter p = new PrintWriter(new OutputStreamWriter(fos));
+ GraphViz gv = new GraphViz();
+ res.toGraphViz(gv);
+ gv.dump(p);
+ p.flush();
+ p.close();
+ System.out.println(parser);
+ }
Collection<Tree<String>> results = res==null ? new HashSet<Tree<String>>() : res.expand(false);
System.out.print("\r");
if (results == null || (results.size() == 0 && (output!=null && output.length > 0))) {
IdentityHashMap<ToGraphViz,Node> ihm = new IdentityHashMap<ToGraphViz,Node>();
HashMap<Node,Group> groups = new HashMap<Node,Group>();
- public class Group {
+ public class Group extends Node {
+ private final int idx = master_idx++;
+ public boolean cluster = false;
+ public boolean primary = true;
public Group() { }
public void add(Node n) { groups.put(n, this); }
+ public String name() { return cluster?("cluster_"+idx):("subgraph_"+idx); }
+ public boolean simple() { return false; }
+ public void dump(PrintWriter pw, IdentityHashMap<Node,Node> done) {
+ Group g = this;
+ if (done.get(g)!=null) return;
+ done.put(g,g);
+ pw.println(" subgraph "+name()+" { rank=same;\n");
+ pw.println(" label=\""+StringUtil.escapify(label.toString(), "\\\"\r\n")+"\";\n");
+ pw.println(" color="+g.color+";\n");
+ pw.println(" shape="+g.shape+";\n");
+ for(Node n : groups.keySet())
+ if (groups.get(n)==g)
+ n.dump(pw, done);
+ pw.println(" }\n");
+ }
}
private static int master_idx=0;
+
public class Node {
private final int idx = master_idx++;
public String label;
public String comment;
public boolean directed = false;
public String color="black";
+ public String fill="white";
+ public String shape="ellipse";
public ArrayList<Node> edges = new ArrayList<Node>();
public ArrayList<Object> labels = new ArrayList<Object>();
public ArrayList<Node> inbound = new ArrayList<Node>();
if (n.inbound.size() > 1) { simple = false; break; }
return simple;
}
- public void dump(PrintWriter pw) {
+ public void dump(PrintWriter pw, IdentityHashMap<Node,Node> done) {
+ if (done.get(this)!=null) return;
+ done.put(this, this);
if (inbound.size() > 0) {
boolean good = false;
for(Node n : inbound)
return n;
}
+ public Group createGroup(ToGraphViz o) {
+ Group n = (Group)ihm.get(o);
+ if (n!=null) return n;
+ n = new Group();
+ ihm.put(o, n);
+ return n;
+ }
+
public static interface ToGraphViz {
public Node toGraphViz(GraphViz gv);
public boolean isTransparent();
public void dump(OutputStream os) { dump(new PrintWriter(new OutputStreamWriter(os))); }
public void dump(PrintWriter pw) {
IdentityHashMap<Node,Node> done = new IdentityHashMap<Node,Node>();
- pw.println("digraph G { rankdir=LR; ordering=out; \n");
- for(Group g : groups.values()) {
- pw.println(" { rank=same;\n");
- for(Node n : groups.keySet())
- if (groups.get(n)==g) {
- done.put(n,n);
- n.dump(pw);
- }
- pw.println(" }\n");
- }
+ pw.println("digraph G { rankdir=LR; ordering=out; compound=true; \n");
+ for(Group g : groups.values())
+ if (g.primary)
+ g.dump(pw, done);
for(Node n : ihm.values()) {
if (done.get(n)!=null) continue;
- n.dump(pw);
+ if (n instanceof Group) continue;
+ n.dump(pw, done);
}
for(Node n : ihm.values()) n.edges(pw);
pw.println("}\n");
// FEATURE: make this faster (plenty of ways: quadradic probing hash table is one)
/** a sparse mapping from pairs of <tt>int</tt>'s to <tt>V</tt>'s */
-public final class IntPairMap<V> {
+public final class IntPairMap<V> implements Iterable<V> {
private final HashMap<Long, V> hm = new HashMap<Long, V>();
public Iterable<V> values() { return hm.values(); }
public int size() { return hm.size(); }
public void toArray(V[] v) { hm.values().toArray(v); }
+ public Iterator<V> iterator() { return hm.values().iterator(); }
}
--- /dev/null
+testcase {
+ input "if (bar!) baz!;";
+ output "IfThen:{id:{x:{b x:{a r}}} id:{x:{b x:{a z}}}}";
+
+ s = Expr ";"
+ Expr = IfThen
+ | IfThenElse
+ | id:: id "!"
+ id = [a-z] | x:: [a-z] id
+ IfThen = IfThen::
+ "if" "(" Expr ")"
+ Expr
+ /ws
+ IfThenElse = IfThenElse::
+ "if" "(" Expr ")"
+ NotIfThenExpr
+ "else"
+ Expr
+ /ws
+ NotIfThenExpr = (Expr & [a-z]+)
+ SpaceIfThen = (~[])*// !IfThen
+ ws = [\n ]**
+}
\ No newline at end of file
c = ("c":: x "2" x "1")*
x = [123]
}
+
+testcase {
+ input "if (bar!) baz!;";
+ output "IfThen:{id:{x:{b x:{a r}}} id:{x:{b x:{a z}}}}";
+
+ s = Expr ";"
+ Expr = IfThen
+ | IfThenElse
+ | id:: id "!"
+ id = [a-z] | x:: [a-z] id
+ IfThen = IfThen::
+ "if" "(" Expr ")"
+ Expr
+ /ws
+ IfThenElse = IfThenElse::
+ "if" "(" Expr ")"
+ NotIfThenExpr
+ "else"
+ Expr
+ /ws
+ NotIfThenExpr = (Expr & [a-z]+)
+ SpaceIfThen = (~[])*// !IfThen
+ ws = [\n ]**
+}
\ No newline at end of file