{ + AbstractStackNode
[] getExpects(String nonTerminal); +} diff --git a/src/org/rascalmpl/parser/gtd/IGTD.java b/src/org/rascalmpl/parser/gtd/IGTD.java index bb4e9b5c656..0e61d06b213 100644 --- a/src/org/rascalmpl/parser/gtd/IGTD.java +++ b/src/org/rascalmpl/parser/gtd/IGTD.java @@ -22,7 +22,7 @@ /** * Parser interface. */ -public interface IGTD
{ +public interface IGTD
extends ExpectsProvider
{ /** * Parse the input string, using the given non-terminal as start node. If * the parse process successfully completes a result will be constructed diff --git a/src/org/rascalmpl/parser/gtd/SGTDBF.java b/src/org/rascalmpl/parser/gtd/SGTDBF.java index b0232bacf3e..2abcfd836f1 100755 --- a/src/org/rascalmpl/parser/gtd/SGTDBF.java +++ b/src/org/rascalmpl/parser/gtd/SGTDBF.java @@ -1,13 +1,10 @@ /******************************************************************************* - * Copyright (c) 2009-2013 CWI - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html + * Copyright (c) 2009-2013 CWI All rights reserved. This program and the accompanying materials are + * made available under the terms of the Eclipse Public License v1.0 which accompanies this + * distribution, and is available at http://www.eclipse.org/legal/epl-v10.html * - * Contributors: - * * Arnold Lankamp - Arnold.Lankamp@cwi.nl -*******************************************************************************/ + * Contributors: * Arnold Lankamp - Arnold.Lankamp@cwi.nl + *******************************************************************************/ package org.rascalmpl.parser.gtd; import java.lang.reflect.InvocationTargetException; @@ -45,586 +42,716 @@ import org.rascalmpl.parser.gtd.util.IntegerList; import org.rascalmpl.parser.gtd.util.IntegerObjectList; import org.rascalmpl.parser.gtd.util.Stack; +import org.rascalmpl.parser.uptr.debug.NopDebugListener; +import org.rascalmpl.parser.util.DebugUtil; +import org.rascalmpl.parser.util.ParseStateVisualizer; +import org.rascalmpl.util.visualize.dot.NodeId; +import org.rascalmpl.values.RascalValueFactory; +import org.rascalmpl.values.parsetrees.ITree; +import org.rascalmpl.values.parsetrees.TreeAdapter; + +import io.usethesource.vallang.IConstructor; +import io.usethesource.vallang.IList; +import io.usethesource.vallang.ISet; +import io.usethesource.vallang.IValue; +import io.usethesource.vallang.type.Type; /** * This is the core of the parser; it drives the parse process. */ -public abstract class SGTDBF
implements IGTD
{ +public abstract class SGTDBF
implements IGTD
{
private final static int DEFAULT_TODOLIST_CAPACITY = 16;
-
+
private URI inputURI;
private int[] input;
-
+ private int location;
+ protected int lookAheadChar;
+
+ // A mapping between character location and line/column.
private final PositionStore positionStore;
-
+
+ // Terminals that matched. Circular buffer indexed by length of the terminal. Each entry contains
+ // the node to reduce and the result node
+ // This is a circular buffer where queueIndex determines the start of the buffer.
+ // At each position, a stack is maintained of all terminals to reduce of a certain length.
+ // So at queueIndex+3, all terminals of length 3 that need reducing are stored.
private DoubleStack > stacksWithNonTerminalsToReduce;
+
+ // The current stack of non-terminals to reduce: it contains the matchable node with the smallest
+ // length from todoLists.
+ // - Nodes are removed in `reduceTerminals` where all productions are advanced one dot past the
+ // matchable node
+ // - Variable is assigned in:
+ // - findFirstStacksToReduce: the first non-empty `todoList` is assigned to this variable
+ // - findStacksToReduce: again the first non-empty `todoList` is assigned to this variable
+ // - parse: variable is used in main reduce/expand loop to determine when it is time to look for
+ // more `stacksToReduce`.
private DoubleStack > unmatchableMidProductionNodes;
private final DoubleStack recoverer;
-
+
// Debugging
private IDebugListener debugListener;
-
+ private ParseStateVisualizer visualizer;
+
// Temporary instrumentation for accurate profiling
private long timestamp;
private boolean printTimes = false;
-
- public SGTDBF(){
+
+
+ public SGTDBF() {
super();
-
+
positionStore = new PositionStore();
-
+
stacksToExpand = new Stack >();
-
+
cachedEdgesForExpect = new HashMap >();
+ unmatchableMidProductionNodes =
+ new DoubleStack >();
filteredNodes = new DoubleStack [] invokeExpects(AbstractStackNode nonTerminal){
- String name = nonTerminal.getName();
- AbstractStackNode [] expects = expectCache.get(name);
- if(expects == null){
- try{
- Method method = getClass().getMethod(name);
- try{
+ public AbstractStackNode [] getExpects(String nonTerminal) {
+ AbstractStackNode [] expects = expectCache.get(nonTerminal);
+ if (expects == null) {
+ try {
+ Method method = getClass().getMethod(nonTerminal);
+ try {
method.setAccessible(true); // Try to bypass the 'isAccessible' check to save time.
- }catch(SecurityException sex){
+ }
+ catch (SecurityException sex) {
// Ignore this if it happens.
}
-
+
expects = (AbstractStackNode []) method.invoke(this);
- }catch(NoSuchMethodException nsmex){
- throw new UndeclaredNonTerminalException(name, getClass());
- }catch(IllegalAccessException iaex){
+ }
+ catch (NoSuchMethodException nsmex) {
+ throw new UndeclaredNonTerminalException(nonTerminal, getClass());
+ }
+ catch (IllegalAccessException iaex) {
throw new RuntimeException(iaex);
- }catch(InvocationTargetException itex){
+ }
+ catch (InvocationTargetException itex) {
throw new RuntimeException(itex.getTargetException());
}
-
- expectCache.putUnsafe(name, expects);
+
+ expectCache.putUnsafe(nonTerminal, expects);
}
-
- return expects;
+
+ return expects;
}
-
+
+ /**
+ * Triggers the gathering of alternatives for the given non-terminal.
+ */
+ protected AbstractStackNode [] invokeExpects(AbstractStackNode nonTerminal) {
+ return getExpects(nonTerminal.getName());
+ }
+
/**
* Moves to the next symbol in the production.
*/
- private AbstractStackNode updateNextNode(AbstractStackNode next, AbstractStackNode node, AbstractNode result){
- IntegerKeyedDoubleValueHashMap.Entry updateNextNode(AbstractStackNode next, AbstractStackNode node,
+ AbstractNode result) {
+ IntegerKeyedDoubleValueHashMap.Entry alternative = alternativeEntry.value1;
- if(alternative.getStartLocation() == location){
- if(alternative.isMatchable()){
- if(alternative.isEmptyLeafNode()){
+ if (alternative.getStartLocation() == location) {
+ if (alternative.isMatchable()) {
+ if (alternative.isEmptyLeafNode()) {
// Encountered a possible stack 'overtake'.
- if(node.getStartLocation() != location){
+ if (node.getStartLocation() != location) {
propagateEdgesAndPrefixes(node, result, alternative, alternative.getResult());
- }else{
- propagateEdgesAndPrefixesForNullable(node, result, alternative, alternative.getResult(), node.getEdges().size());
+ }
+ else {
+ propagateEdgesAndPrefixesForNullable(node, result, alternative, alternative.getResult(),
+ node.getEdges().size());
}
return alternative;
}
- }else{
+ }
+ else {
EdgesSet alternativeEdgesSet = alternative.getIncomingEdges();
int resultStoreId = getResultStoreId(alternative.getId());
- if(alternativeEdgesSet != null && alternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location){
+ if (alternativeEdgesSet != null
+ && alternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location) {
// Encountered a possible stack 'overtake'.
- if(node.getStartLocation() != location){
- propagateEdgesAndPrefixes(node, result, alternative, alternativeEdgesSet.getLastResult(resultStoreId));
- }else{
- propagateEdgesAndPrefixesForNullable(node, result, alternative, alternativeEdgesSet.getLastResult(resultStoreId), node.getEdges().size());
+ if (node.getStartLocation() != location) {
+ propagateEdgesAndPrefixes(node, result, alternative,
+ alternativeEdgesSet.getLastResult(resultStoreId));
+ }
+ else {
+ propagateEdgesAndPrefixesForNullable(node, result, alternative,
+ alternativeEdgesSet.getLastResult(resultStoreId), node.getEdges().size());
}
return alternative;
}
}
}
-
+
alternative.updateNode(node, result);
-
- if(debugListener != null) debugListener.progressed(node, result, alternative);
-
+
+ debugListener.progressed(node, result, alternative);
+
return alternative;
}
-
- if(next.isMatchable()){ // Eager matching optimization.
- if((location + next.getLength()) > input.length) return null;
-
+
+ if (next.isMatchable()) { // Eager matching optimization.
+ if ((location + next.getLength()) > input.length)
+ return null;
+
AbstractNode nextResult = next.match(input, location);
- if(nextResult == null){
- // Push the node including it's predecessor to the appropriate error tracking collection (and take care of merging when necessary).
- DoubleArrayList next, AbstractStackNode node, AbstractNode result, IntegerObjectList next, AbstractStackNode node, AbstractNode result,
+ IntegerObjectList alternative = alternativeEntry.value1;
- if(result.isEmpty()){
- if(alternative.isMatchable()){
- if(alternative.isEmptyLeafNode()){
+ if (result.isEmpty()) {
+ if (alternative.isMatchable()) {
+ if (alternative.isEmptyLeafNode()) {
// Encountered a possible stack 'overtake'.
- propagateAlternativeEdgesAndPrefixes(node, result, alternative, alternative.getResult(), node.getEdges().size(), edgesMap, prefixesMap);
+ propagateAlternativeEdgesAndPrefixes(node, result, alternative, alternative.getResult(),
+ node.getEdges().size(), edgesMap, prefixesMap);
return true;
}
- }else{
+ }
+ else {
EdgesSet alternativeEdgesSet = alternative.getIncomingEdges();
int resultStoreId = getResultStoreId(alternative.getId());
- if(alternativeEdgesSet != null && alternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location){
+ if (alternativeEdgesSet != null
+ && alternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location) {
AbstractContainerNode nextResult = alternativeEdgesSet.getLastResult(resultStoreId);
// Encountered a possible stack 'overtake'.
- propagateAlternativeEdgesAndPrefixes(node, result, alternative, nextResult, node.getEdges().size(), edgesMap, prefixesMap);
+ propagateAlternativeEdgesAndPrefixes(node, result, alternative, nextResult,
+ node.getEdges().size(), edgesMap, prefixesMap);
return true;
}
}
}
-
- alternative.updatePrefixSharedNode(edgesMap, prefixesMap); // Prevent unnecessary overhead; share whenever possible.
-
- if(debugListener != null) debugListener.progressed(node, result, alternative);
-
+
+ alternative.updatePrefixSharedNode(edgesMap, prefixesMap); // Prevent unnecessary overhead; share whenever
+ // possible.
+
+ debugListener.progressed(node, result, alternative);
+
return true;
}
-
- if(next.isMatchable()){ // Eager matching optimization.
- if((location + next.getLength()) > input.length) return false;
-
+
+ if (next.isMatchable()) { // Eager matching optimization.
+ if ((location + next.getLength()) > input.length)
+ return false;
+
AbstractNode nextResult = next.match(input, location);
- if(nextResult == null){
- // Push the node including it's predecessor to the appropriate error tracking collection (and take care of merging when necessary).
- DoubleArrayList node, AbstractNode nodeResultStore, AbstractStackNode next, AbstractNode nextResultStore, int potentialNewEdges){
+ private void propagateReductions(AbstractStackNode node, AbstractNode nodeResultStore, AbstractStackNode next,
+ AbstractNode nextResultStore, int potentialNewEdges) {
IntegerList propagatedReductions = next.getPropagatedReductions();
-
+
IntegerObjectList edgeSet = edgesMap.getValue(i);
-
- if(debugListener != null) debugListener.reducing(node, resultLink, edgeSet);
-
- if(!hasNestingRestrictions){
+
+ debugListener.reducing(node, resultLink, edgeSet);
+
+ if (!hasNestingRestrictions) {
handleEdgeList(edgeSet, name, production, resultLink, startLocation);
- }else{
+ }
+ else {
handleEdgeListWithRestrictions(edgeSet, name, production, resultLink, startLocation, filteredParents);
}
}
}
-
+
/**
- * Part of the hidden-right-recursion fix.
- * Propagates absent prefixes.
+ * Part of the hidden-right-recursion fix. Propagates absent prefixes.
*/
- private void propagatePrefixes(AbstractStackNode next, AbstractNode nextResult, int nrOfAddedEdges){
+ private void propagatePrefixes(AbstractStackNode next, AbstractNode nextResult, int nrOfAddedEdges) {
// Proceed with the tail of the production.
int nextDot = next.getDot() + 1;
AbstractStackNode [] prod = next.getProduction();
AbstractStackNode nextNext = prod[nextDot];
- IntegerKeyedDoubleValueHashMap.Entry nextNextAlternative = null;
- if(nextNextAlternativeEntry != null){ // Valid continuation.
+ if (nextNextAlternativeEntry != null) { // Valid continuation.
DoubleArrayList nextNextAlternativeEdgesSet = nextNextAlternative.getIncomingEdges();
int resultStoreId = getResultStoreId(nextNextAlternative.getId());
- if(nextNextAlternativeEdgesSet != null && nextNextAlternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location){
- propagateEdgesAndPrefixesForNullable(next, nextResult, nextNextAlternative, nextNextAlternativeEdgesSet.getLastResult(resultStoreId), nrOfAddedEdges);
- }else{
+ if (nextNextAlternativeEdgesSet != null
+ && nextNextAlternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location) {
+ propagateEdgesAndPrefixesForNullable(next, nextResult, nextNextAlternative,
+ nextNextAlternativeEdgesSet.getLastResult(resultStoreId), nrOfAddedEdges);
+ }
+ else {
nextNextAlternative.updateNode(next, nextResult);
-
- if(debugListener != null) debugListener.propagated(next, nextResult, nextNextAlternative);
+
+ debugListener.propagated(next, nextResult, nextNextAlternative);
}
}
- }else{
+ }
+ else {
predecessors.add(next, nextResult);
}
}
-
+
// Handle alternative continuations (related to prefix sharing).
AbstractStackNode [][] alternateProds = next.getAlternateProductions();
- if(alternateProds != null){
- if(nextNextAlternative == null){ // If the first continuation has not been initialized yet (it may be a matchable that didn't match), create a dummy version to construct the necessary edges and prefixes.
- if(!nextNext.isMatchable()) return; // Matchable, abort.
+ if (alternateProds != null) {
+ if (nextNextAlternative == null) { // If the first continuation has not been initialized yet (it may be a
+ // matchable that didn't match), create a dummy version to construct the
+ // necessary edges and prefixes.
+ if (!nextNext.isMatchable())
+ return; // Matchable, abort.
nextNextAlternative = nextNext.getCleanCopy(location);
nextNextAlternative.updateNode(next, nextResult);
-
- if(debugListener != null) debugListener.propagated(next, nextResult, nextNextAlternative);
+
+ debugListener.propagated(next, nextResult, nextNextAlternative);
}
-
+
IntegerObjectList alternativeNextNext = prod[nextDot];
-
- IntegerKeyedDoubleValueHashMap.Entry nextNextAltAlternative = null;
- if(nextNextAltAlternativeEntry != null){
- DoubleArrayList nextAlternativeEdgesSet = nextNextAltAlternative.getIncomingEdges();
int resultStoreId = getResultStoreId(nextNextAltAlternative.getId());
- if(nextAlternativeEdgesSet != null && nextAlternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location){
- propagateAlternativeEdgesAndPrefixes(next, nextResult, nextNextAltAlternative, nextAlternativeEdgesSet.getLastResult(resultStoreId), nrOfAddedEdges, nextNextEdgesMap, nextNextPrefixesMap);
- }else{
+ if (nextAlternativeEdgesSet != null
+ && nextAlternativeEdgesSet.getLastVisitedLevel(resultStoreId) == location) {
+ propagateAlternativeEdgesAndPrefixes(next, nextResult, nextNextAltAlternative,
+ nextAlternativeEdgesSet.getLastResult(resultStoreId), nrOfAddedEdges,
+ nextNextEdgesMap, nextNextPrefixesMap);
+ }
+ else {
nextNextAltAlternative.updatePrefixSharedNode(nextNextEdgesMap, nextNextPrefixesMap);
-
- if(debugListener != null) debugListener.propagated(next, nextResult, nextNextAltAlternative);
+
+ debugListener.propagated(next, nextResult, nextNextAltAlternative);
}
}
- }else{
+ }
+ else {
predecessors.add(next, nextResult);
}
}
}
}
}
-
+
/**
- * Part of the hidden-right-recursion fix.
- * Inserts missing prefixes and triggers reductions where necessary.
+ * Part of the hidden-right-recursion fix. Inserts missing prefixes and triggers reductions where
+ * necessary.
*/
- private void propagateEdgesAndPrefixes(AbstractStackNode node, AbstractNode nodeResult, AbstractStackNode next, AbstractNode nextResult){
+ private void propagateEdgesAndPrefixes(AbstractStackNode node, AbstractNode nodeResult,
+ AbstractStackNode next, AbstractNode nextResult) {
int nrOfAddedEdges = next.updateOvertakenNode(node, nodeResult);
- if(debugListener != null) debugListener.propagated(node, nodeResult, next);
-
- if(nrOfAddedEdges == 0) return;
-
- if(next.isEndNode()){
+ debugListener.propagated(node, nodeResult, next);
+
+ if (nrOfAddedEdges == 0)
+ return;
+
+ if (next.isEndNode()) {
propagateReductions(node, nodeResult, next, nextResult, nrOfAddedEdges);
}
-
- if(next.hasNext()){
+
+ if (next.hasNext()) {
propagatePrefixes(next, nextResult, nrOfAddedEdges);
}
}
-
+
/**
- * Part of the hidden-right-recursion fix.
- * Inserts missing prefixes and triggers reductions where necessary (specific for nullable nodes).
+ * Part of the hidden-right-recursion fix. Inserts missing prefixes and triggers reductions where
+ * necessary (specific for nullable nodes).
*/
- private void propagateEdgesAndPrefixesForNullable(AbstractStackNode node, AbstractNode nodeResult, AbstractStackNode next, AbstractNode nextResult, int potentialNewEdges){
+ private void propagateEdgesAndPrefixesForNullable(AbstractStackNode node, AbstractNode nodeResult,
+ AbstractStackNode next, AbstractNode nextResult, int potentialNewEdges) {
int nrOfAddedEdges = next.updateOvertakenNullableNode(node, nodeResult, potentialNewEdges);
- if(debugListener != null) debugListener.propagated(node, nodeResult, next);
-
- if(nrOfAddedEdges == 0) return;
-
- if(next.isEndNode()){
+ debugListener.propagated(node, nodeResult, next);
+
+ if (nrOfAddedEdges == 0)
+ return;
+
+ if (next.isEndNode()) {
propagateReductions(node, nodeResult, next, nextResult, nrOfAddedEdges);
}
-
- if(next.hasNext()){
+
+ if (next.hasNext()) {
propagatePrefixes(next, nextResult, nrOfAddedEdges);
}
}
-
+
/**
- * Part of the hidden-right-recursion fix.
- * Inserts missing prefixes and triggers reductions where necessary (specifically for alternative continuations of prefix-shared productions).
+ * Part of the hidden-right-recursion fix. Inserts missing prefixes and triggers reductions where
+ * necessary (specifically for alternative continuations of prefix-shared productions).
*/
- private void propagateAlternativeEdgesAndPrefixes(AbstractStackNode node, AbstractNode nodeResult, AbstractStackNode next, AbstractNode nextResult, int potentialNewEdges, IntegerObjectList node, AbstractNode nodeResult,
+ AbstractStackNode next, AbstractNode nextResult, int potentialNewEdges,
+ IntegerObjectList node, AbstractNode result){
+ private void updateEdges(AbstractStackNode node, AbstractNode result) {
IntegerObjectList edgeSet = edgesMap.getValue(i);
-
- if(debugListener != null) debugListener.reducing(node, resultLink, edgeSet);
-
- if(!hasNestingRestrictions){ // Select the optimized path for handling edge sets that don't have nesting restrictions associated with them.
+
+ debugListener.reducing(node, resultLink, edgeSet);
+
+ if (!hasNestingRestrictions) { // Select the optimized path for handling edge sets that don't have nesting
+ // restrictions associated with them.
handleEdgeList(edgeSet, name, production, resultLink, edgesMap.getKey(i));
- }else{
- handleEdgeListWithRestrictions(edgeSet, name, production, resultLink, edgesMap.getKey(i), filteredParents);
+ }
+ else {
+ handleEdgeListWithRestrictions(edgeSet, name, production, resultLink, edgesMap.getKey(i),
+ filteredParents);
}
}
}
-
+
/**
* Initiates the handling of reductions for nullable symbols.
*/
- private void updateNullableEdges(AbstractStackNode node, AbstractNode result){
+ private void updateNullableEdges(AbstractStackNode node, AbstractNode result) {
IntegerList propagatedReductions = node.getPropagatedReductions();
-
+
int initialSize = propagatedReductions.size();
-
+
IntegerObjectList edgeSet = edgesMap.getValue(i);
-
- if(debugListener != null) debugListener.reducing(node, resultLink, edgeSet);
-
- if(!hasNestingRestrictions){ // Select the optimized path for handling edge sets that don't have nesting restrictions associated with them.
+
+ debugListener.reducing(node, resultLink, edgeSet);
+
+ if (!hasNestingRestrictions) { // Select the optimized path for handling edge sets that don't have nesting
+ // restrictions associated with them.
handleEdgeList(edgeSet, name, production, resultLink, startLocation);
- }else{
+ }
+ else {
handleEdgeListWithRestrictions(edgeSet, name, production, resultLink, startLocation, filteredParents);
}
}
}
-
+
/**
* Handles reductions.
*/
- private void handleEdgeList(EdgesSet edgeSet, String name, P production, Link resultLink, int startLocation){
+ private void handleEdgeList(EdgesSet edgeSet, String name, P production, Link resultLink, int startLocation) {
AbstractContainerNode resultStore = null;
int resultStoreId = EdgesSet.DEFAULT_RESULT_STORE_ID;
- if(edgeSet.getLastVisitedLevel(resultStoreId) != location){
+ if (edgeSet.getLastVisitedLevel(resultStoreId) != location) {
AbstractStackNode edge = edgeSet.get(0);
-
- if(edge.isRecovered()){
+
+ if (edge.isRecovered()) {
resultStore = new RecoveredNode (inputURI, startLocation, location);
- }else if(edge.isExpandable()){
- resultStore = new ExpandableContainerNode (inputURI, startLocation, location, startLocation == location, edge.isSeparator(), edge.isLayout());
- }else{
- resultStore = new SortContainerNode (inputURI, startLocation, location, startLocation == location, edge.isSeparator(), edge.isLayout());
}
-
+ else if (edge.isExpandable()) {
+ resultStore = new ExpandableContainerNode (inputURI, startLocation, location,
+ startLocation == location, edge.isSeparator(), edge.isLayout());
+ }
+ else {
+ resultStore = new SortContainerNode (inputURI, startLocation, location, startLocation == location,
+ edge.isSeparator(), edge.isLayout());
+ }
+
stacksWithNonTerminalsToReduce.push(edge, resultStore);
-
- if(debugListener != null) debugListener.reduced(edge);
-
- for(int j = edgeSet.size() - 1; j >= 1; --j){
+
+ debugListener.reduced(edge);
+
+ for (int j = edgeSet.size() - 1; j >= 1; --j) {
edge = edgeSet.get(j);
stacksWithNonTerminalsToReduce.push(edge, resultStore);
-
- if(debugListener != null) debugListener.reduced(edge);
+
+ debugListener.reduced(edge);
}
-
+
edgeSet.setLastVisitedLevel(location, resultStoreId);
edgeSet.setLastResult(resultStore, resultStoreId);
- }else{
+ }
+ else {
resultStore = edgeSet.getLastResult(resultStoreId);
}
-
+
resultStore.addAlternative(production, resultLink);
}
-
+
// Reuse these structures.
private final IntegerList firstTimeRegistration = new IntegerList();
private final IntegerList firstTimeReductions = new IntegerList();
@@ -632,89 +759,99 @@ private void handleEdgeList(EdgesSet edgeSet, String name, P production, Link
/**
* Handles reductions which may be associated with nesting restrictions.
*/
- private void handleEdgeListWithRestrictions(EdgesSet edgeSet, String name, P production, Link resultLink, int startLocation, IntegerList filteredParents){
+ private void handleEdgeListWithRestrictions(EdgesSet edgeSet, String name, P production, Link resultLink,
+ int startLocation, IntegerList filteredParents) {
// Only add the result to each resultstore once.
- // Make sure each edge only gets added to the non-terminal reduction list once per level, by keeping track of them.
+ // Make sure each edge only gets added to the non-terminal reduction list once per level, by keeping
+ // track of them.
firstTimeRegistration.clear();
firstTimeReductions.clear();
- for(int j = edgeSet.size() - 1; j >= 0; --j){
+ for (int j = edgeSet.size() - 1; j >= 0; --j) {
AbstractStackNode edge = edgeSet.get(j);
int resultStoreId = getResultStoreId(edge.getId());
-
- if(!firstTimeReductions.contains(resultStoreId)){
- if(firstTimeRegistration.contains(resultStoreId)){
- if(debugListener != null) debugListener.filteredByNestingRestriction(edge);
-
+
+ if (!firstTimeReductions.contains(resultStoreId)) {
+ if (firstTimeRegistration.contains(resultStoreId)) {
+ debugListener.filteredByNestingRestriction(edge);
+
continue;
}
firstTimeRegistration.add(resultStoreId);
-
+
// Check whether or not the nesting is allowed.
- if(filteredParents == null || !filteredParents.contains(edge.getId())){
+ if (filteredParents == null || !filteredParents.contains(edge.getId())) {
AbstractContainerNode resultStore = null;
- if(edgeSet.getLastVisitedLevel(resultStoreId) == location){
+ if (edgeSet.getLastVisitedLevel(resultStoreId) == location) {
resultStore = edgeSet.getLastResult(resultStoreId);
}
- if(resultStore == null){
+ if (resultStore == null) {
if (edge.isRecovered()) {
resultStore = new RecoveredNode (inputURI, startLocation, location);
- }else if (edge.isExpandable()) {
- resultStore = new ExpandableContainerNode (inputURI, startLocation, location, startLocation == location, edge.isSeparator(), edge.isLayout());
- }else {
- resultStore = new SortContainerNode (inputURI, startLocation, location, startLocation == location, edge.isSeparator(), edge.isLayout());
}
-
+ else if (edge.isExpandable()) {
+ resultStore = new ExpandableContainerNode (inputURI, startLocation, location,
+ startLocation == location, edge.isSeparator(), edge.isLayout());
+ }
+ else {
+ resultStore = new SortContainerNode (inputURI, startLocation, location,
+ startLocation == location, edge.isSeparator(), edge.isLayout());
+ }
+
edgeSet.setLastVisitedLevel(location, resultStoreId);
edgeSet.setLastResult(resultStore, resultStoreId);
-
+
stacksWithNonTerminalsToReduce.push(edge, resultStore);
firstTimeReductions.add(resultStoreId);
}
-
+
resultStore.addAlternative(production, resultLink);
-
- if(debugListener != null) debugListener.reduced(edge);
- }else{
- if(debugListener != null) debugListener.filteredByNestingRestriction(edge);
+
+ debugListener.reduced(edge);
+ }
+ else {
+ debugListener.filteredByNestingRestriction(edge);
}
- }else{
+ }
+ else {
AbstractContainerNode resultStore = edgeSet.getLastResult(resultStoreId);
stacksWithNonTerminalsToReduce.push(edge, resultStore);
}
}
}
-
+
/**
* Move to the next symbol(s) in the production.
*/
- private void moveToNext(AbstractStackNode node, AbstractNode result){
+ private void moveToNext(AbstractStackNode node, AbstractNode result) {
int nextDot = node.getDot() + 1;
AbstractStackNode [] prod = node.getProduction();
AbstractStackNode newNext = prod[nextDot];
AbstractStackNode next = updateNextNode(newNext, node, result);
-
+
// Handle alternative continuations of this production (related to prefix-sharing).
AbstractStackNode [][] alternateProds = node.getAlternateProductions();
- if(alternateProds != null){
+ if (alternateProds != null) {
IntegerObjectList newAlternativeNext = prod[nextDot];
-
- if(edgesMap != null){
+
+ if (edgesMap != null) {
updateAlternativeNextNode(newAlternativeNext, node, result, edgesMap, prefixesMap);
- }else{
+ }
+ else {
AbstractStackNode alternativeNext = updateNextNode(newAlternativeNext, node, result);
-
- if(alternativeNext != null){
+
+ if (alternativeNext != null) {
edgesMap = alternativeNext.getEdges();
prefixesMap = alternativeNext.getPrefixesMap();
}
@@ -722,139 +859,186 @@ private void moveToNext(AbstractStackNode node, AbstractNode result){
}
}
}
-
+
/**
- * Progress to the next 'states' associated with the given node.
- * I.e. move to the next symbol(s) in the production (if available) and executed reductions if necessary.
+ * Progress to the next 'states' associated with the given node. I.e. move to the next symbol(s) in
+ * the production (if available) and executed reductions if necessary.
*/
- private void move(AbstractStackNode node, AbstractNode result){
- if(debugListener != null) debugListener.moving(node, result);
-
+ private void move(AbstractStackNode node, AbstractNode result) {
+ debugListener.moving(node, result);
+
// Handle filtering.
ICompletionFilter[] completionFilters = node.getCompletionFilters();
- if(completionFilters != null){
+ if (completionFilters != null) {
int startLocation = node.getStartLocation();
- for(int i = completionFilters.length - 1; i >= 0; --i){
- if(completionFilters[i].isFiltered(input, startLocation, location, positionStore)){
+ for (int i = completionFilters.length - 1; i >= 0; --i) {
+ if (completionFilters[i].isFiltered(input, startLocation, location, positionStore)) {
filteredNodes.push(node, result);
-
- if(debugListener != null) debugListener.filteredByCompletionFilter(node, result);
-
+
+ debugListener.filteredByCompletionFilter(node, result);
+
return;
}
}
}
-
- if(node.isEndNode()){
- if(!result.isEmpty() || node.getId() == AbstractExpandableStackNode.DEFAULT_LIST_EPSILON_ID){ // Only go into the nullable fix path for nullables (special list epsilons can be ignored as well).
+
+ if (node.isEndNode()) {
+ if (!result.isEmpty() || node.getId() == AbstractExpandableStackNode.DEFAULT_LIST_EPSILON_ID) { // Only go
+ // into the
+ // nullable
+ // fix path
+ // for
+ // nullables
+ // (special
+ // list
+ // epsilons
+ // can be
+ // ignored
+ // as well).
updateEdges(node, result);
- }else{
+ }
+ else {
updateNullableEdges(node, result);
}
}
-
- if(node.hasNext()){
+
+ if (node.hasNext()) {
moveToNext(node, result);
}
}
-
+
/**
* Initiate the handling of stacks.
*/
- private void reduce(){
+ private void reduceTerminals() {
// Reduce terminals
- while(!stacksWithTerminalsToReduce.isEmpty()){
+ visualize("Reducing terminals", ParseStateVisualizer.TERMINALS_TO_REDUCE_ID);
+ while (!stacksWithTerminalsToReduce.isEmpty()) {
move(stacksWithTerminalsToReduce.peekFirst(), stacksWithTerminalsToReduce.popSecond());
}
-
+ }
+
+ private void reduceNonTerminals() {
// Reduce non-terminals
- while(!stacksWithNonTerminalsToReduce.isEmpty()){
+ visualize("Reducing non-terminals", ParseStateVisualizer.NON_TERMINALS_TO_REDUCE_ID);
+ while (!stacksWithNonTerminalsToReduce.isEmpty()) {
move(stacksWithNonTerminalsToReduce.peekFirst(), stacksWithNonTerminalsToReduce.popSecond());
}
}
-
+
/**
- * Locates the initial set of stacks that is queued for handling, for which the least amount of characters needs to be shifted.
+ * Locates the initial set of stacks that is queued for handling, for which the least amount of
+ * characters needs to be shifted.
*/
- private boolean findFirstStacksToReduce(){
- for(int i = 0; i < todoLists.length; ++i){
+ private boolean findFirstStacksToReduce() {
+ for (int i = 0; i < todoLists.length; ++i) {
DoubleStack recovered = recoveredNodes.getFirst(i);
- addTodo(recovered, recovered.getLength(), recoveredNodes.getSecond(i));
+ queueMatchableNode(recovered, recovered.getLength(), recoveredNodes.getSecond(i));
}
+ parseErrorRecovered = true;
return findStacksToReduce();
}
-
- parseErrorOccured = true;
+
+ parseErrorEncountered = true;
}
-
+
return false;
}
-
+
/**
- * Locates the set of stacks that is queued for handling, for which the least amount of characters needs to be shifted.
+ * Locates the set of stacks that is queued for handling, for which the least amount of characters
+ * needs to be shifted.
*/
- private boolean findStacksToReduce(){
+ private boolean findStacksToReduce() {
+ visualize("Finding stacks to reduce", ParseStateVisualizer.TODO_LISTS_ID);
int queueDepth = todoLists.length;
- for(int i = 1; i < queueDepth; ++i){
+ for (int i = 1; i < queueDepth; ++i) {
queueIndex = (queueIndex + 1) % queueDepth;
-
+
DoubleStack recovered = recoveredNodes.getFirst(i);
-
- int levelsFromHere = recovered.getLength() - (location - recovered.getStartLocation());
-
- addTodo(recovered, levelsFromHere, recoveredNodes.getSecond(i));
+ debugListener.reviving(input, location, unexpandableNodes, unmatchableLeafNodes,
+ unmatchableMidProductionNodes, filteredNodes);
+ visualize("Queue recovery node", ParseStateVisualizer.getNodeId(recovered));
+ queueRecoveryNode(recovered, recovered.getStartLocation(), recovered.getLength(),
+ recoveredNodes.getSecond(i));
}
+ parseErrorRecovered = true;
return findStacksToReduce();
}
-
- parseErrorOccured = true;
+
+ parseErrorEncountered = true;
}
-
+
return false;
}
-
- public boolean parseErrorHasOccurred(){
- return parseErrorOccured;
+
+ public boolean parseErrorHasOccurred() {
+ return parseErrorEncountered;
}
-
+
/**
- * Inserts a stack bottom into the todo-list.
+ * Inserts a stack bottom into the todoList.
*/
@SuppressWarnings("unchecked")
- private void addTodo(AbstractStackNode node, int length, AbstractNode result){
- if(result == null) throw new RuntimeException();
+ private void queueMatchableNode(AbstractStackNode node, int length, AbstractNode result) {
+ assert result != null;
+
int queueDepth = todoLists.length;
- if(length >= queueDepth){
+ if (length >= queueDepth) {
DoubleStack node, int length, AbstractNode result)
queueDepth = length + 1;
queueIndex = 0;
}
-
+
int insertLocation = (queueIndex + length) % queueDepth;
DoubleStack node, int startPosition, int length, AbstractNode result) {
+ assert result != null;
+
+ int queueDepth = todoLists.length;
+
+ if (startPosition < location) {
+ // Have to reset the parser to an earlier location to at least
+ // be able to process the new node. Cannot throw away the queue,
+ // because there are possibly already other recovery tokens in the queue.
+ // However, we may assume that the queue before the current index is
+ // done, based on the way we cycle the queue now. The queue is
+ // looking forward to the future and we never re-use past entries.
+
+ int negativeOffset = location - startPosition;
+
+ DoubleStack [] expects, EdgesSet cachedEdges, AbstractStackNode stackBeingWorkedOn){
+ private boolean handleExpects(AbstractStackNode [] expects, EdgesSet cachedEdges,
+ AbstractStackNode stackBeingWorkedOn) {
boolean hasValidAlternatives = false;
-
+
sharedLastExpects.dirtyClear();
-
- EXPECTS: for(int i = expects.length - 1; i >= 0; --i){
+
+ EXPECTS: for (int i = expects.length - 1; i >= 0; --i) {
AbstractStackNode first = expects[i];
-
- if(first.isMatchable()){ // Eager matching optimization.
+
+ if (first.isMatchable()) { // Eager matching optimization.
int length = first.getLength();
int endLocation = location + length;
- if(endLocation > input.length) continue;
-
+ if (endLocation > input.length)
+ continue;
+
AbstractNode result = first.match(input, location);
- if(result == null){
+ if (result == null) {
unmatchableLeafNodes.push(first);
-
- if(debugListener != null) debugListener.failedToMatch(first);
-
+
+ debugListener.failedToMatch(first);
+
continue;
}
-
- if(debugListener != null) debugListener.matched(first, result);
-
+
+ debugListener.matched(first, result);
+
// Handle filtering.
IEnterFilter[] enterFilters = first.getEnterFilters();
- if(enterFilters != null){
- for(int j = enterFilters.length - 1; j >= 0; --j){
- if(enterFilters[j].isFiltered(input, location, positionStore)){
- if(debugListener != null) debugListener.filteredByEnterFilter(first);
-
+ if (enterFilters != null) {
+ for (int j = enterFilters.length - 1; j >= 0; --j) {
+ if (enterFilters[j].isFiltered(input, location, positionStore)) {
+ debugListener.filteredByEnterFilter(first);
+
continue EXPECTS;
}
}
}
-
+
first = first.getCleanCopyWithResult(location, result);
-
- addTodo(first, length, result);
- }else{
+
+ queueMatchableNode(first, length, result);
+ }
+ else {
first = first.getCleanCopy(location);
stacksToExpand.push(first);
}
-
+
first.initEdges();
first.addEdges(cachedEdges, location);
-
+
sharedLastExpects.add(first.getId(), first);
-
+
hasValidAlternatives = true;
-
- if(debugListener != null) debugListener.expanded(stackBeingWorkedOn, first);
+
+ debugListener.expanded(stackBeingWorkedOn, first);
}
-
+
return hasValidAlternatives;
}
-
+
/**
* Check whether or not the given sort name has nesting restrictions associated with it.
*/
- protected boolean hasNestingRestrictions(String name){
+ protected boolean hasNestingRestrictions(String name) {
return false; // Priority and associativity filtering is off by default.
}
-
+
/**
* Retrieves the set of disallowed parents for the given child.
*/
- protected IntegerList getFilteredParents(int childId){
+ protected IntegerList getFilteredParents(int childId) {
return null; // Default implementation; intended to be overwritten in sub-classes.
}
-
+
/**
* Retrieves the resultstore id associated with the given id.
*/
- protected int getResultStoreId(int id){
+ protected int getResultStoreId(int id) {
return EdgesSet.DEFAULT_RESULT_STORE_ID; // Default implementation; intended to be overwritten in sub-classes.
}
-
+
/**
* Expands the given stack node.
*/
- private void expandStack(AbstractStackNode stack){
- if(debugListener != null) debugListener.expanding(stack);
-
+ private void expandStack(AbstractStackNode stack) {
+ debugListener.expanding(stack);
+
// Handle filtering.
IEnterFilter[] enterFilters = stack.getEnterFilters();
- if(enterFilters != null){
- for(int i = enterFilters.length - 1; i >= 0; --i){
- if(enterFilters[i].isFiltered(input, location, positionStore)){
+ if (enterFilters != null) {
+ for (int i = enterFilters.length - 1; i >= 0; --i) {
+ if (enterFilters[i].isFiltered(input, location, positionStore)) {
unexpandableNodes.push(stack);
-
- if(debugListener != null) debugListener.filteredByEnterFilter(stack);
-
+
+ debugListener.filteredByEnterFilter(stack);
+
return;
}
}
}
-
- if(stack.isMatchable()){ // Eager matching optimization related.
- addTodo(stack, stack.getLength(), stack.getResult());
- }else if(!stack.isExpandable()){ // A 'normal' non-terminal.
+
+ if (stack.isMatchable()) { // Eager matching optimization related.
+ queueMatchableNode(stack, stack.getLength(), stack.getResult());
+ }
+ else if (!stack.isExpandable()) { // A 'normal' non-terminal.
EdgesSet cachedEdges = cachedEdgesForExpect.get(stack.getName());
- if(cachedEdges == null){
+ if (cachedEdges == null) {
cachedEdges = new EdgesSet (1);
cachedEdgesForExpect.put(stack.getName(), cachedEdges);
-
+
AbstractStackNode [] expects = invokeExpects(stack);
- if(expects == null){
+ if (expects == null) {
unexpandableNodes.push(stack);
return;
}
-
- if(!handleExpects(expects, cachedEdges, stack)){
+
+ if (!handleExpects(expects, cachedEdges, stack)) {
unexpandableNodes.push(stack);
return;
}
- }else{
+ }
+ else {
int resultStoreId = getResultStoreId(stack.getId());
- if(cachedEdges.getLastVisitedLevel(resultStoreId) == location){ // Is nullable, add the known results.
+ if (cachedEdges.getLastVisitedLevel(resultStoreId) == location) { // Is nullable, add the known results.
stacksWithNonTerminalsToReduce.push(stack, cachedEdges.getLastResult(resultStoreId));
-
- if(debugListener != null) debugListener.foundIterationCachedNullableResult(stack);
+
+ debugListener.foundIterationCachedNullableResult(stack);
}
}
-
+
cachedEdges.add(stack);
-
+
stack.setIncomingEdges(cachedEdges);
- }else{ // Expandable
+ }
+ else { // Expandable
EdgesSet cachedEdges = cachedEdgesForExpect.get(stack.getName());
- if(cachedEdges == null){
+ if (cachedEdges == null) {
boolean expanded = false;
-
+
cachedEdges = new EdgesSet ();
cachedEdgesForExpect.put(stack.getName(), cachedEdges);
-
+
AbstractStackNode [] listChildren = stack.getChildren();
-
- CHILDREN : for(int i = listChildren.length - 1; i >= 0; --i){
+
+ CHILDREN: for (int i = listChildren.length - 1; i >= 0; --i) {
AbstractStackNode child = listChildren[i];
int childId = child.getId();
-
- IntegerKeyedDoubleValueHashMap.Entry sharedChild = sharedChildEntry.value1;
sharedChild.setEdgesSetWithPrefix(cachedEdges, null, location);
- }else{
- if(child.isMatchable()){
+ }
+ else {
+ if (child.isMatchable()) {
int length = child.getLength();
int endLocation = location + length;
- if(endLocation > input.length) continue;
-
+ if (endLocation > input.length)
+ continue;
+
AbstractNode result = child.match(input, location);
- if(result == null){
+ if (result == null) {
unmatchableLeafNodes.push(child);
-
- if(debugListener != null) debugListener.failedToMatch(child);
-
+
+ debugListener.failedToMatch(child);
+
continue;
}
-
- if(debugListener != null) debugListener.matched(child, result);
-
+
+ debugListener.matched(child, result);
+
// Handle filtering
IEnterFilter[] childEnterFilters = child.getEnterFilters();
- if(childEnterFilters != null){
- for(int j = childEnterFilters.length - 1; j >= 0; --j){
- if(childEnterFilters[j].isFiltered(input, location, positionStore)) {
- if(debugListener != null) debugListener.filteredByEnterFilter(child);
-
+ if (childEnterFilters != null) {
+ for (int j = childEnterFilters.length - 1; j >= 0; --j) {
+ if (childEnterFilters[j].isFiltered(input, location, positionStore)) {
+ debugListener.filteredByEnterFilter(child);
+
continue CHILDREN;
}
}
}
-
+
child = child.getCleanCopyWithResult(location, result);
- addTodo(child, length, result);
- }else{
+ queueMatchableNode(child, length, result);
+ }
+ else {
child = child.getCleanCopy(location);
stacksToExpand.push(child);
}
-
+
child.initEdges();
child.setEdgesSetWithPrefix(cachedEdges, null, location);
-
+
sharedNextNodes.putUnsafe(childId, child, null);
-
- if(debugListener != null) debugListener.expanded(stack, child);
+
+ debugListener.expanded(stack, child);
}
-
+
expanded = true;
}
-
- if(stack.canBeEmpty()){ // Star list or optional.
- AbstractStackNode empty = stack.getEmptyChild().getCleanCopyWithResult(location, EpsilonStackNode.EPSILON_RESULT);
+
+ if (stack.canBeEmpty()) { // Star list or optional.
+ AbstractStackNode empty =
+ stack.getEmptyChild().getCleanCopyWithResult(location, EpsilonStackNode.EPSILON_RESULT);
empty.initEdges();
empty.addEdges(cachedEdges, location);
-
+
stacksToExpand.push(empty);
-
- if(debugListener != null) debugListener.expanded(stack, empty);
-
+
+ debugListener.expanded(stack, empty);
+
expanded = true;
}
-
- if(!expanded){
+
+ if (!expanded) {
unexpandableNodes.push(stack);
}
}
int resultStoreId = getResultStoreId(stack.getId());
- if(cachedEdges.getLastVisitedLevel(resultStoreId) == location){ // Is nullable, add the known results.
+ if (cachedEdges.getLastVisitedLevel(resultStoreId) == location) { // Is nullable, add the known results.
stacksWithNonTerminalsToReduce.push(stack, cachedEdges.getLastResult(resultStoreId));
- if(debugListener != null) debugListener.foundIterationCachedNullableResult(stack);
+ debugListener.foundIterationCachedNullableResult(stack);
}
-
+
cachedEdges.add(stack);
-
+
stack.setIncomingEdges(cachedEdges);
}
}
-
+
/**
* Initiate stack expansion for all queued stacks.
*/
- private void expand(){
- while(!stacksToExpand.isEmpty()){
+ private void expand() {
+ visualize("Expanding", ParseStateVisualizer.STACKS_TO_EXPAND_ID);
+ while (!stacksToExpand.isEmpty()) {
expandStack(stacksToExpand.pop());
}
}
-
- protected AbstractNode parse(AbstractStackNode startNode, URI inputURI, int[] input){
+
+ protected AbstractNode parse(AbstractStackNode startNode, URI inputURI, int[] input) {
return parse(startNode, inputURI, input, (IRecoverer ) null, (IDebugListener ) null);
}
-
+
/**
* Initiates parsing.
*/
@SuppressWarnings("unchecked")
- protected AbstractNode parse(AbstractStackNode startNode, URI inputURI, int[] input, IRecoverer recoverer, IDebugListener debugListener){
- initTime();
+ protected AbstractNode parse(AbstractStackNode startNode, URI inputURI, int[] input, IRecoverer recoverer,
+ IDebugListener debugListener) {
+ if (debugListener == null) {
+ debugListener = new NopDebugListener<>();
+ }
+
+ initTime();
- try {
+ try {
+
+ if (invoked) {
+ throw new RuntimeException("Can only invoke 'parse' once.");
+ }
- if(invoked){
- throw new RuntimeException("Can only invoke 'parse' once.");
- }
+ invoked = true;
- invoked = true;
+ // Initialize.
+ this.inputURI = inputURI;
+ this.input = input;
- // Initialize.
- this.inputURI = inputURI;
- this.input = input;
+ this.recoverer = recoverer;
+ this.debugListener = debugListener;
- this.recoverer = recoverer;
- this.debugListener = debugListener;
+ visualizer = ParseStateVisualizer.shouldVisualizeUri(inputURI) ? new ParseStateVisualizer("Parser") : null;
- // Initialzed the position store.
- positionStore.index(input);
+ // Initialzed the position store.
+ positionStore.index(input);
- todoLists = new DoubleStack[DEFAULT_TODOLIST_CAPACITY];
+ todoLists = new DoubleStack[DEFAULT_TODOLIST_CAPACITY];
- // Handle the initial expansion of the root node.
- AbstractStackNode rootNode = startNode;
- rootNode.initEdges();
- stacksToExpand.push(rootNode);
- lookAheadChar = (input.length > 0) ? input[0] : 0;
+ // Handle the initial expansion of the root node.
+ AbstractStackNode rootNode = startNode;
+ rootNode.initEdges();
+ stacksToExpand.push(rootNode);
+ lookAheadChar = (input.length > 0) ? input[0] : 0;
- if(debugListener != null) debugListener.shifting(location, input, positionStore);
+ debugListener.shifting(location, input, positionStore);
- expand();
+ expand();
- if(findFirstStacksToReduce()){
- boolean shiftedLevel = (location != 0);
+ if (findFirstStacksToReduce()) {
+ boolean shiftedLevel = (location != 0);
- do{
- lookAheadChar = (location < input.length) ? input[location] : 0;
- if(shiftedLevel){ // Nullable fix for the first level.
- sharedNextNodes.clear();
- cachedEdgesForExpect.clear();
+ do {
+ lookAheadChar = (location < input.length) ? input[location] : 0;
+ if (shiftedLevel) { // Nullable fix for the first level.
+ sharedNextNodes.clear();
+ cachedEdgesForExpect.clear();
- unexpandableNodes.dirtyClear();
- unmatchableLeafNodes.dirtyClear();
- unmatchableMidProductionNodes.dirtyClear();
- filteredNodes.dirtyClear();
+ unexpandableNodes.dirtyClear();
+ unmatchableLeafNodes.dirtyClear();
+ unmatchableMidProductionNodes.dirtyClear();
+ filteredNodes.dirtyClear();
- if(debugListener != null) debugListener.shifting(location, input, positionStore);
- }
+ debugListener.shifting(location, input, positionStore);
+ }
+ // Reduce-expand loop.
+ do {
+ debugListener.iterating();
- // Reduce-expand loop.
- do{
- if(debugListener != null) debugListener.iterating();
+ reduceTerminals();
- reduce();
+ reduceNonTerminals();
- expand();
- }while(!stacksWithNonTerminalsToReduce.isEmpty() || !stacksWithTerminalsToReduce.isEmpty());
+ expand();
+ }
+ while (!stacksWithNonTerminalsToReduce.isEmpty() || !stacksWithTerminalsToReduce.isEmpty());
- shiftedLevel = true;
- }while(findStacksToReduce());
- }
+ shiftedLevel = true;
+ }
+ while (findStacksToReduce());
+ }
- // Check if we were successful.
- if(location == input.length){
- EdgesSet startNodeEdgesSet = startNode.getIncomingEdges();
- int resultStoreId = getResultStoreId(startNode.getId());
- if(startNodeEdgesSet != null && startNodeEdgesSet.getLastVisitedLevel(resultStoreId) == input.length){
- // Parsing succeeded.
- return startNodeEdgesSet.getLastResult(resultStoreId); // Success.
- }
- }
- }
- finally {
- checkTime("Parsing");
- }
+ visualize("Done", ParseStateVisualizer.PARSER_ID);
- try {
- // A parse error occured, and recovery failed as well
- parseErrorOccured = true;
-
- int errorLocation = (location == Integer.MAX_VALUE ? 0 : location);
- int line = positionStore.findLine(errorLocation);
- int column = positionStore.getColumn(errorLocation, line);
- if (location == input.length) {
- throw new ParseError("Parse error", inputURI, errorLocation, 0, line + 1, line + 1, column, column, (Stack startNodeEdgesSet = startNode.getIncomingEdges();
+ int resultStoreId = getResultStoreId(startNode.getId());
+ if (startNodeEdgesSet != null && startNodeEdgesSet.getLastVisitedLevel(resultStoreId) == input.length) {
+ // Parsing succeeded.
+ return startNodeEdgesSet.getLastResult(resultStoreId); // Success.
+ }
+ }
+ }
+ finally {
+ checkTime("Parsing");
+ }
+
+ try {
+ // A parse error occured, and recovery failed as well
+ parseErrorEncountered = true;
+
+ int errorLocation = (location == Integer.MAX_VALUE ? 0 : location);
+ int line = positionStore.findLine(errorLocation);
+ int column = positionStore.getColumn(errorLocation, line);
+ if (location == input.length) {
+ throw new ParseError("Parse error", inputURI, errorLocation, 0, line + 1, line + 1, column, column,
+ (Stack recoverer, IDebugListener debugListener){
- AbstractNode result = parse(new NonTerminalStackNode (AbstractStackNode.START_SYMBOL_ID, 0, nonterminal), inputURI, input, recoverer, debugListener);
+ private T parse(String nonterminal, URI inputURI, int[] input, IActionExecutor recoverer,
+ IDebugListener debugListener) {
+ AbstractNode result = parse(new NonTerminalStackNode (AbstractStackNode.START_SYMBOL_ID, 0, nonterminal),
+ inputURI, input, recoverer, debugListener);
return buildResult(result, converter, nodeConstructorFactory, actionExecutor);
}
-
- public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor recoverer, IDebugListener debugListener){
- return parse(nonterminal, inputURI, charsToInts(input), actionExecutor, converter, nodeConstructorFactory, recoverer, debugListener);
+
+ public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor recoverer,
+ IDebugListener debugListener) {
+ return parse(nonterminal, inputURI, charsToInts(input), actionExecutor, converter, nodeConstructorFactory,
+ recoverer, debugListener);
}
-
- public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor recoverer){
+
+ public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor recoverer) {
return parse(nonterminal, inputURI, input, actionExecutor, converter, nodeConstructorFactory, recoverer, null);
}
-
- public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor debugListener){
- return parse(nonterminal, inputURI, input, actionExecutor, converter, nodeConstructorFactory, null, debugListener);
+
+ public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor debugListener) {
+ return parse(nonterminal, inputURI, input, actionExecutor, converter, nodeConstructorFactory, null,
+ debugListener);
}
- public T parse(String nonterminal, URI inputURI, char[] input, IActionExecutor recoverer, IDebugListener debugListener){
- AbstractNode result = parse(new NonTerminalStackNode (AbstractStackNode.START_SYMBOL_ID, 0, nonterminal), inputURI, input, recoverer, debugListener);
+ private T parse(String nonterminal, URI inputURI, int[] input, INodeFlattener recoverer,
+ IDebugListener debugListener) {
+ AbstractNode result = parse(new NonTerminalStackNode (AbstractStackNode.START_SYMBOL_ID, 0, nonterminal),
+ inputURI, input, recoverer, debugListener);
return buildResult(result, converter, nodeConstructorFactory, new VoidActionExecutor