// Copyright 2006 all rights reserved; see LICENSE file for BSD-style license
package edu.berkeley.sbp;
-import edu.berkeley.sbp.*;
import edu.berkeley.sbp.util.*;
import edu.berkeley.sbp.Sequence.Position;
import java.io.*;
import java.util.*;
-import java.lang.reflect.*;
-import java.lang.ref.*;
-abstract class Cache<Token> {
+/**
+ * A collection of Elements and Sequences which only reference each other.
+ *
+ * All analyses are done at the Grammar level, since a given
+ * Element/Sequence can appear in multiple Grammars. Some of these
+ * analyses depend on which elements *reference* a given element,
+ * rather than which elements *are referenced by* a given element.
+ *
+ * This class is package-private because it is likely to change often.
+ */
+abstract class Grammar<Token> {
protected Union rootUnion;
public HashMap<Sequence, Topology> follow = new HashMap<Sequence, Topology>();
public HashSet<SequenceOrElement> all = new HashSet<SequenceOrElement>();
abstract Topology<Token> emptyTopology();
- public Cache(Union root) {
+ public Grammar(Union root) {
this.rootUnion = root;
if (root != null)
for(Sequence s : root)
public abstract Topology<Token> emptyTopology();
public String toString() { return pt.toString(); }
- Cache cache() { return pt; }
+ Grammar cache() { return pt; }
/** parse <tt>input</tt>, and return the shared packed parse forest (or throw an exception) */
public Forest<NodeType> parse(Input<Token> input) throws IOException, ParseFailed {
// Table //////////////////////////////////////////////////////////////////////////////
/** an SLR(1) parse table which may contain conflicts */
- class Table extends Cache<Token> {
+ class Table extends Grammar<Token> {
/** the start state */
final State<Token> start;
final HashSet<Sequence> needs = new HashSet<Sequence>();
final HashSet<Sequence> hates = new HashSet<Sequence>();
- // FIXME: these are ugly -- migrate into Cache
+ // FIXME: these are ugly -- migrate into Grammar
HashMap<Sequence,Boolean> canNeed = new HashMap<Sequence,Boolean>();
HashMap<Sequence,Boolean> canKill = new HashMap<Sequence,Boolean>();
this.firstp = new Position(0, null);
}
- abstract Forest epsilonForm(Input.Region loc, Cache cache);
+ abstract Forest epsilonForm(Input.Region loc, Grammar cache);
protected abstract <T> Forest<T> postReduce(Input.Region loc, Forest<T>[] args, Position p);
// Position /////////////////////////////////////////////////////////////////////////////////
- final <T> Forest<T> rewrite(Input.Region loc, Cache cache) {
+ final <T> Forest<T> rewrite(Input.Region loc, Grammar cache) {
if (this==firstp()) epsilonForm(loc, cache);
for(int i=0; i<pos; i++) if (holder[i]==null) throw new Error("realbad " + i);
for(int i=pos; i<elements.length; i++) {
public <T> Forest<T> postReduce(Input.Region loc, Forest<T>[] args, Position p) {
return (Forest<T>)Forest.create(loc, result, null, false);
}
- Forest epsilonForm(Input.Region loc, Cache cache) {
+ Forest epsilonForm(Input.Region loc, Grammar cache) {
return Forest.create(loc, result, null, false);
}
}
public Singleton(Element[] e, int idx) { super(e); this.idx = idx; }
public <T> Forest<T> postReduce(Input.Region loc, Forest<T>[] args, Position p) { return args[idx]; }
Sequence _clone() { return new Singleton(elements,idx); }
- Forest epsilonForm(Input.Region loc, Cache cache) {
+ Forest epsilonForm(Input.Region loc, Grammar cache) {
return ((Union)elements[idx]).epsilonForm(loc, cache);
}
}
for(int i=0; i<args.length; i++) if (!drops[i]) args2[j++] = args[i];
return Forest.create(loc, (T)tag, args2, true);
}
- Forest epsilonForm(Input.Region loc, Cache cache) {
+ Forest epsilonForm(Input.Region loc, Grammar cache) {
return Forest.create(loc, tag, new Forest[0], false);
}
}
if (spacing) for(int i=0; i<50-len; i++) sb.append(' ');
return sb;
}
- Forest epsilonForm(Input.Region loc, Cache cache) {
+ Forest epsilonForm(Input.Region loc, Grammar cache) {
return Forest.create(loc, tag, new Forest[0], false);
}
}