package bluej.parser.nodes;
import bluej.editor.moe.MoeSyntaxDocument;
import bluej.parser.DocumentReader;
import bluej.parser.EditorParser;
import bluej.parser.EscapedUnicodeReader;
import bluej.parser.lexer.JavaTokenTypes;
import bluej.parser.lexer.LocatableToken;
import bluej.parser.nodes.NodeTree.NodeAndPosition;
import threadchecker.OnThread;
import threadchecker.Tag;
import java.io.IOException;
import java.io.Reader;
import java.util.LinkedList;
import java.util.Stack;
| An abstract base class for nodes which can do incremental parsing.<p>
|
| We assume that such a node is broken into pieces which can be parsed separately.
| At least some such pieces will form complete sub-nodes which allow us to determine
| where we can re-parse from if a modification is made.<p>
|
| Sub-classes must provide implementations for several methods to parse a piece,
| determine whether a subnode represents a complete piece, etc.<p>
|
| IncrementalParsingNode has basic support for sequential parse states, where a node
| consists of several parts in sequence and each part must be parsed differently. The
| "stateMarkers" array contains the offset (from the node beginning) of each state
|* transition; subclasses should assign it an array of appropriate size. A value of -1
* in any entry means the marker is invalid.
*
| @author Davin McCall
|
public abstract class IncrementalParsingNode
extends JavaParentNode{
| The end position of each parse state. -1 means the marker is invalid.
|
protected int [] stateMarkers = new int[0];
| Whether the corresponding state marker specifically marks the end of the state (true), or if it rather
| marks the beginning of the next state (false).
|
protected boolean [] marksEnd = new boolean[0];
| The final token in the last partial parse. Should be set by doPartialParse if possible.
|
protected LocatableToken last;
protected final static int PP_OK = 0;
| Node ends, due to a parser error
|
protected final static int PP_INCOMPLETE = 1;
| Node ends just before the "last" token */
|protected final static int PP_ENDS_NODE = 2;
/** Node ends at end of the "last" token */
protected final static int PP_ENDS_NODE_AFTER = 3;
/** Parse completely failed. The node must be removed and the parent re-parsed. */
protected final static int PP_EPIC_FAIL = 4;
|
|/** The "last" token ends the state. The new state begins after it. */
protected final static int PP_ENDS_STATE = 5;
/** The "last" token is the beginning of the next state */
protected final static int PP_BEGINS_NEXT_STATE = 6;
/** The current state fails, requiring a regression to the previous parse state */
protected final static int PP_REGRESS_STATE = 7;
|
|/** Pull the next child up behind the "last" token and continue parsing inside it */
protected final static int PP_PULL_UP_CHILD = 8;
/**
* Abort the parse. This must be safe; either the parse has completed or an
* appropriate re-parse has been scheduled. params.abortPos must be set.
protected final static int PP_ABORT = 9;
| Construct an incremental parsing node with the given parent node.
|
public IncrementalParsingNode(JavaParentNode parent)
{
super(parent);
}
| Check whether the given node represents a complete parsed piece. If
| it does, we can safely resume incremental parsing just beyond its
| end. Also, if we are incrementally parsing, and we complete a piece
| at the boundary with this node, we don't need to continue parsing.
|
protected abstract boolean isDelimitingNode(NodeAndPosition<ParsedNode> nap);
| Actually perform a partial parse. If possible, this method should set
| "last" to the last token forming part of the parsed piece or null if there was a
|* parsing error. (It is safe to always set it to null).<p>
*
* The return value is one of the PP_ constants: PP_OK, PP_ENDS_NODE if the parse
| succeeds but requires that the node ends immediately, PP_EPIC_FAIL if the parse
| fails and indicates that the node is not what it purports to be.
|
@OnThread(Tag.FXPlatform)
protected abstract int doPartialParse(ParseParams params, int state);
protected boolean isNodeEndMarker(int tokenType)
{
return tokenType == JavaTokenTypes.RCURLY;
}
@Override
protected int reparseNode(MoeSyntaxDocument document, int nodePos, int offset, int maxParse, NodeStructureListener listener)
{
int parseEnd = Math.min(offset + maxParse, nodePos + getSize());
int state = getCurrentState(offset - nodePos);
int originalOffset = offset;
int stateBoundary = (state != 0) ? stateMarkers[state - 1] + nodePos : nodePos;
NodeAndPosition<ParsedNode> nap = null;
if (offset > stateBoundary) {
nap = getNodeTree().findNodeAtOrBefore(offset - 1, nodePos);
}
while (nap != null && !isDelimitingNode(nap)){
if (nap.getPosition() >= stateBoundary) {
nap = getNodeTree().findNodeAtOrBefore(nap.getPosition() - 1, nodePos);
}
else {
nap = null;
}
}
NodeAndPosition<ParsedNode> nextNap = null;
if (nap != null) {
nextNap = nap.nextSibling();
offset = nap.getEnd();
}
else {
offset = stateBoundary;
}
NodeAndPosition<ParsedNode> boundaryNap = nap;
LinkedList<NodeAndPosition<ParsedNode>> childQueue = new LinkedList<NodeAndPosition<ParsedNode>>();
if (nap == null) {
nap = findNodeAtOrAfter(offset + 1, nodePos);
}
else {
nap = nextNap;
}
while (nap != null){
childQueue.add(nap);
nextNap = nap.nextSibling();
nap.getNode().remove();
nap = nextNap;
}
NodeAndPosition<ParsedNode> nextChild = childQueue.peek();
int pline = document.getDefaultRootElement().getElementIndex(offset) + 1;
int pcol = offset - document.getDefaultRootElement().getElement(pline - 1).getStartOffset() + 1;
Reader r = new DocumentReader(document, offset, parseEnd);
EditorParser parser = new EditorParser(document, r, pline, pcol, offset, buildScopeStack());
LocatableToken laToken = parser.getTokenStream().LA(1);
int ttype = laToken.getType();
int tokpos = lineColToPos(document, laToken.getLine(), laToken.getColumn());
nap = boundaryNap;
boolean extendPrev = false;
if (nap != null) {
if (! nap.getNode().complete) {
extendPrev = true;
}
}
if (extendPrev) {
int tokend = lineColToPos(document, laToken.getEndLine(), laToken.getEndColumn());
if (ttype == JavaTokenTypes.EOF) {
boolean weCanGrow = nodePos + getSize() < document.getLength();
if (! weCanGrow && tokend == nap.getEnd()) {
((MoeSyntaxDocument) document).markSectionParsed(offset, tokend - offset + 1);
return ALL_OK;
}
if (weCanGrow) {
boolean grew = getParentNode().growChild(document,
new NodeAndPosition<ParsedNode>(this, nodePos, getSize()),
listener);
if (!grew) {
return REMOVE_NODE;
}
complete = false;
return ALL_OK;
}
}
nextChild = removeOverwrittenChildren(childQueue, tokend, listener);
int oldSize = nap.getSize();
nap.setSize(tokend - nap.getPosition());
listener.nodeChangedLength(nap, nap.getPosition(), oldSize);
int pr = nap.getNode().reparseNode(document, nap.getPosition(), tokpos, parseEnd - tokpos, listener);
if (pr == REMOVE_NODE) {
removeChild(nap, listener);
((MoeSyntaxDocument)document).scheduleReparse(originalOffset,
tokend - originalOffset);
return ALL_OK;
}
else {
if (nap.getNode().getSize() != oldSize) {
if (! nap.getNode().complete) {
return ALL_OK;
}
offset = nap.getPosition() + nap.getNode().getSize();
pline = document.getDefaultRootElement().getElementIndex(offset) + 1;
pcol = offset - document.getDefaultRootElement().getElement(pline - 1).getStartOffset() + 1;
r = new DocumentReader(document, offset, parseEnd);
parser = new EditorParser(document, r, pline, pcol, offset, buildScopeStack());
laToken = parser.getTokenStream().LA(1);
tokpos = lineColToPos(document, laToken.getLine(), laToken.getColumn());
}
}
}
int nextStatePos = (state < stateMarkers.length) ? stateMarkers[state] : -1;
nextStatePos += (nextStatePos == -1) ? 0 : nodePos;
ParseParams pparams = new ParseParams();
pparams.listener = listener;
pparams.parser = parser;
pparams.tokenStream = parser.getTokenStream();
pparams.document = (MoeSyntaxDocument) document;
pparams.nodePos = nodePos;
pparams.childQueue = childQueue;
while (! isNodeEndMarker(ttype)){
int ppr = doPartialParse(pparams, state);
nextChild = childQueue.peek();
if (ppr == PP_ENDS_NODE || ppr == PP_ENDS_NODE_AFTER || (ppr == PP_INCOMPLETE
&& last.getType() != JavaTokenTypes.EOF)) {
int pos;
if (ppr == PP_ENDS_NODE_AFTER) {
pos = lineColToPos(document, last.getEndLine(), last.getEndColumn());
}
else {
pos = lineColToPos(document, last.getLine(), last.getColumn());
}
pparams.document.markSectionParsed(offset, pos - offset);
int newsize = pos - nodePos;
if (! complete) {
pparams.document.scheduleReparse(nodePos + newsize, 0);
}
complete = (ppr != PP_INCOMPLETE);
int oldSize = getSize();
endNodeCleanup(pparams, state, Integer.MAX_VALUE, nodePos + newsize);
if (newsize != oldSize) {
setSize(newsize);
nap = new NodeAndPosition<ParsedNode>(this, nodePos, newsize);
listener.nodeChangedLength(nap, nodePos, oldSize);
return NODE_SHRUNK;
}
return ALL_OK;
}
else if (ppr == PP_INCOMPLETE) {
if (parseEnd != nodePos + getSize()) {
pparams.document.scheduleReparse(parseEnd, 0);
pparams.document.markSectionParsed(offset, parseEnd - offset);
return ALL_OK;
}
complete = false;
}
else if (ppr == PP_EPIC_FAIL) {
removeOverwrittenChildren(childQueue, Integer.MAX_VALUE, listener);
return REMOVE_NODE;
}
else if (ppr == PP_ENDS_STATE || ppr == PP_BEGINS_NEXT_STATE) {
int pos;
if (ppr == PP_ENDS_STATE) {
pos = lineColToPos(document, last.getEndLine(), last.getEndColumn());
}
else {
pos = lineColToPos(document, last.getLine(), last.getColumn());
}
if (stateMarkers[state] == (pos - nodePos)) {
nextChild = removeOverwrittenChildren(childQueue, pos, listener);
processChildQueue(nodePos, childQueue, nextChild);
parser.completedNode(this, nodePos, pos - nodePos);
pparams.document.markSectionParsed(offset, pos - offset);
return ALL_OK;
}
stateMarkers[state] = pos - nodePos;
marksEnd[state] = (ppr == PP_ENDS_STATE);
state++;
nextStatePos = (state < stateMarkers.length) ? stateMarkers[state] : -1;
}
else if (ppr == PP_REGRESS_STATE) {
state--;
int rppos = stateMarkers[state] + nodePos;
pparams.document.scheduleReparse(rppos, Math.max(offset - rppos, 0));
stateMarkers[state] = -1;
int epos = lineColToPos(document, last.getLine(), last.getColumn());
removeOverwrittenChildren(childQueue, epos, listener);
processChildQueue(nodePos, childQueue, nextChild);
return ALL_OK;
}
else if (ppr == PP_PULL_UP_CHILD) {
nextChild = childQueue.peek();
processChildQueue(nodePos, childQueue, nextChild);
parser.completedNode(this, nodePos, pparams.abortPos - nodePos);
tokpos = lineColToPos(document, last.getLine(), last.getColumn());
int ncpos = nextChild.getPosition();
if (ncpos != pparams.abortPos) {
int slideAmount = nextChild.getPosition() - pparams.abortPos;
nextChild.slide(-slideAmount);
int rr = nextChild.getNode().textInserted((MoeSyntaxDocument) document,
nextChild.getPosition(),
nextChild.getPosition(),
slideAmount, listener);
if (rr == ParsedNode.REMOVE_NODE) {
removeChild(nextChild, listener);
}
else {
nextChild.setNapSize(nextChild.getNode().getSize());
childResized(pparams.document, nodePos, nextChild);
}
listener.nodeChangedLength(nextChild, ncpos, nextChild.getSize() - slideAmount);
pparams.document.scheduleReparse(nextChild.getPosition(), slideAmount);
}
pparams.document.markSectionParsed(offset, pparams.abortPos - offset);
return ALL_OK;
}
else if (ppr == PP_ABORT) {
nextChild = removeOverwrittenChildren(childQueue, pparams.abortPos, listener);
processChildQueue(nodePos, childQueue, nextChild);
parser.completedNode(this, nodePos, pparams.abortPos - nodePos);
pparams.document.markSectionParsed(offset, pparams.abortPos - offset);
return ALL_OK;
}
LocatableToken nlaToken = parser.getTokenStream().LA(1);
if (nlaToken == laToken) {
parser.getTokenStream().nextToken();
nlaToken = parser.getTokenStream().LA(1);
}
int nlaPos = lineColToPos(document, nlaToken.getLine(), nlaToken.getColumn());
for (int i = state; i < stateMarkers.length; i++) {
if (stateMarkers[i] + nodePos < nlaPos) {
stateMarkers[i] = -1;
}
}
if (nlaToken.getType() == JavaTokenTypes.EOF) {
endNodeCleanup(pparams, state, parseEnd, parseEnd);
processChildQueue(nodePos, childQueue, childQueue.peek());
if (parseEnd < nodePos + getSize()) {
parser.completedNode(this, nodePos, getSize());
pparams.document.markSectionParsed(offset, parseEnd - offset);
pparams.document.scheduleReparse(parseEnd, 0);
return ALL_OK;
}
if (! complete) {
ParsedNode parentNode = getParentNode();
if (parentNode != null && parentNode.growChild(document,
new NodeAndPosition<ParsedNode>(this, nodePos, getSize()), listener)) {
pparams.document.markSectionParsed(offset, parseEnd - offset);
pparams.document.scheduleReparse(parseEnd, nodePos + getSize() - parseEnd);
return NODE_GREW;
}
else if (nodePos + getSize() < document.getLength()) {
return REMOVE_NODE;
}
}
pparams.document.markSectionParsed(offset, parseEnd - offset);
return checkEnd(document, nodePos, listener);
}
laToken = nlaToken;
ttype = laToken.getType();
tokpos = nlaPos;
}
removeOverwrittenChildren(childQueue, Integer.MAX_VALUE, listener);
tokpos = lineColToPos(document, laToken.getLine(), laToken.getColumn());
pparams.document.markSectionParsed(offset, tokpos - offset);
int newsize = tokpos - nodePos;
parser.completedNode(this, nodePos, newsize);
if (! complete) {
pparams.document.scheduleReparse(nodePos + newsize, 0);
}
complete = true;
int oldSize = getSize();
if (newsize < oldSize) {
setSize(newsize);
nap = new NodeAndPosition<ParsedNode>(this, nodePos, newsize);
listener.nodeChangedLength(nap, nodePos, oldSize);
return NODE_SHRUNK;
}
return ALL_OK;
}
| Check whether the next token is the boundary (beginning) of a delimiting node, in which
| case we may be able to finish re-parsing. Returns true if a boundary has been reached;
| in this case params.abortpos will be set appropriately.
| This is intended as a utility for use by subclasses.
|
protected boolean checkBoundary(ParseParams params, LocatableToken token)
{
int lpos = lineColToPos(params.document, token.getLine(), token.getColumn());
int hpos = lpos;
LocatableToken hidden = token.getHiddenBefore();
if (hidden != null) {
hpos = lineColToPos(params.document, hidden.getLine(), hidden.getColumn());
}
NodeAndPosition<ParsedNode> nextChild = params.childQueue.peek();
while (nextChild != null){
if (isDelimitingNode(nextChild)) {
boolean hasComment = nextChild.getNode().isCommentAttached();
if (hasComment && nextChild.getPosition() == hpos) {
params.abortPos = hpos;
return true;
}
if (!hasComment && nextChild.getPosition() == lpos) {
int nextType = nextChild.getNode().getNodeType();
boolean wantsComment = nextType == ParsedNode.NODETYPE_TYPEDEF
|| nextType == ParsedNode.NODETYPE_METHODDEF;
params.abortPos = wantsComment ? hpos : lpos;
return true;
}
}
if (nextChild.getPosition() > lpos) {
break;
}
childRemoved(nextChild, params.listener);
params.childQueue.poll();
nextChild = params.childQueue.peek();
}
return false;
}
| Perform some general cleanup after a parse operation. Overwritten children are removed,
| Overwritten state markers are invalidated; comments seen by the parser are added as
| children of this node.
|
| @param params The parse parameters
| @param state The current parse state
| @param rpos The point up to which overwritten children and state markers should be removed
| @param epos The position where the parse ended
|
private void endNodeCleanup(ParseParams params, int state, int rpos, int epos)
{
while (state < stateMarkers.length){
if (stateMarkers[state] < rpos || stateMarkers[state] == rpos && marksEnd[state]) {
stateMarkers[state] = -1;
}
state++;
}
removeOverwrittenChildren(params.childQueue, rpos, params.listener);
params.parser.completedNode(this, params.nodePos, epos - params.nodePos);
}
private Stack buildScopeStack()
{
Stack<JavaParentNode> r = new Stack<JavaParentNode>();
JavaParentNode pn = this;
do {
r.add(0, pn);
pn = pn.getParentNode();
} while (pn != null){;
return r;
}
}
| If during parsing we reach some point (epos) then we have overwritten any old child nodes
| which overlap or occur before epos and so we need to remove them properly.
|
private NodeAndPosition removeOverwrittenChildren(LinkedList<NodeAndPosition<ParsedNode>> childQueue,
int epos, NodeStructureListener listener)
{
NodeAndPosition<ParsedNode> nextChild = childQueue.peek();
while (nextChild != null && (epos > nextChild.getPosition() || epos >= nextChild.getEnd())){
childRemoved(nextChild, listener);
childQueue.removeFirst();
nextChild = childQueue.peek();
}
return nextChild;
}
| Restore children in the child queue which were removed temporarily, but not actually overwritten during parsing.
|
private void processChildQueue(int nodePos, LinkedList<NodeAndPosition<ParsedNode>> childQueue,
NodeAndPosition<ParsedNode> nextChild)
{
while (nextChild != null){
insertNode(nextChild.getNode(), nextChild.getPosition() - nodePos, nextChild.getSize());
childQueue.removeFirst();
nextChild = childQueue.peek();
}
}
| Convert a line and column number to an absolute position.
|
protected static int lineColToPos(MoeSyntaxDocument document, int line, int col)
{
return document.getDefaultRootElement().getElement(line - 1).getStartOffset() + col - 1;
}
private int getCurrentState(int pos)
{
for (int i = stateMarkers.length - 1; i >= 0; i--) {
if (pos >= stateMarkers[i] && stateMarkers[i] >= 0) {
return i + 1;
}
}
return 0;
}
@Override
public int textInserted(MoeSyntaxDocument document, int nodePos, int insPos,
int length, NodeStructureListener listener)
{
for (int i = 0; i < stateMarkers.length; i++) {
if (stateMarkers[i] > (insPos - nodePos)
|| (stateMarkers[i] == (insPos - nodePos) && !marksEnd[i])) {
stateMarkers[i] += length;
for (; i < stateMarkers.length; i++) {
if (stateMarkers[i] >= 0) {
stateMarkers[i] += length;
}
}
break;
}
}
int result = super.textInserted(document, nodePos, insPos, length, listener);
return result;
}
@Override
public int textRemoved(MoeSyntaxDocument document, int nodePos, int delPos,
int length, NodeStructureListener listener)
{
for (int i = 0; i < stateMarkers.length; i++) {
if (stateMarkers[i] > (delPos - nodePos) || (marksEnd[i] && stateMarkers[i] == (delPos - nodePos))) {
stateMarkers[i] -= length;
if (stateMarkers[i] < (delPos - nodePos)
|| (stateMarkers[i] == (delPos - nodePos) && marksEnd[i])) {
stateMarkers[i] = -1;
}
else if (stateMarkers[i] == (delPos - nodePos)) {
int spos = (i == 0) ? 0 : Math.max(stateMarkers[i-1], 0);
NodeAndPosition<ParsedNode> nap = getNodeTree().findNodeAtOrBefore(stateMarkers[i] - 1 + nodePos, nodePos);
if (nap != null && isDelimitingNode(nap)) {
spos = Math.max(spos, nap.getEnd() - nodePos);
}
if (stateMarkers[i] > spos) {
document.scheduleReparse(spos + nodePos, stateMarkers[i] - spos);
}
}
}
}
return super.textRemoved(document, nodePos, delPos, length, listener);
}
| Check if a single line comment exists at the end of this node, which is not properly
| terminated - that is, it ends before the end of the line. This can happen if such a
| comment is inserted into an existing node which ends on the same line.
|
@OnThread(Tag.FXPlatform)
private int checkEnd(MoeSyntaxDocument document, int nodePos, NodeStructureListener listener)
{
int end = nodePos + getSize();
if (end >= document.getLength()) {
return ALL_OK;
}
NodeAndPosition<ParsedNode> nap = findNodeAt(end - 1, nodePos);
if (nap == null) {
return ALL_OK;
}
int offset = nap.getPosition();
if (offset + nap.getSize() < end
|| nap.getNode().getNodeType() != ParsedNode.NODETYPE_COMMENT) {
return ALL_OK;
}
Reader r = new DocumentReader(document, offset, nodePos + getSize());
EscapedUnicodeReader eur = new EscapedUnicodeReader(r);
try {
if (eur.read() == '/' && eur.read() == '/') {
eur.close();
String str = document.getText(end, 1);
if (str.charAt(0) != '\n') {
ParsedNode parentNode = getParentNode();
if (parentNode != null && parentNode.growChild(document,
new NodeAndPosition<ParsedNode>(this, nodePos, getSize()), listener)) {
int pr = reparseNode(document, nodePos, offset, getSize(), listener);
return pr == ALL_OK ? NODE_GREW : pr;
}
return REMOVE_NODE;
}
}
}
catch (IOException ioe) {
}
return ALL_OK;
}
@Override
protected int handleDeletion(MoeSyntaxDocument document, int nodePos, int dpos,
NodeStructureListener listener)
{
int offset = dpos;
int state = getCurrentState(offset - nodePos);
int stateBoundary = (state != 0) ? stateMarkers[state - 1] + nodePos : nodePos;
NodeAndPosition<ParsedNode> nap = null;
if (offset > stateBoundary) {
nap = getNodeTree().findNodeAtOrBefore(offset, nodePos);
while (nap != null && nap.getSize() == 0){
NodeAndPosition<ParsedNode> pnap = nap.prevSibling();
removeChild(nap, listener);
nap = pnap;
}
while (nap != null && nap.getEnd() > dpos){
nap = nap.prevSibling();
}
while (nap != null && !isDelimitingNode(nap)){
boolean isLeadingComment = nap != null && nap.getNode().getNodeType() == ParsedNode.NODETYPE_COMMENT
&& nap.getPosition() == stateBoundary;
if (isLeadingComment) {
break;
}
if (nap.getPosition() >= stateBoundary) {
NodeAndPosition<ParsedNode> pnap = nap.prevSibling();
if (pnap != null && isDelimitingNode(pnap) && pnap.getEnd() == dpos && nap.getPosition() == dpos) {
removeChild(nap, listener);
return super.handleDeletion(document, nodePos, dpos, listener);
}
nap = pnap;
}
else {
nap = null;
}
}
}
int adjustedPos = (nap != null) ? nap.getEnd() : stateBoundary;
return super.handleDeletion(document, nodePos, adjustedPos, listener);
}
@Override
@OnThread(Tag.FXPlatform)
protected boolean growChild(MoeSyntaxDocument document, NodeAndPosition<ParsedNode> child,
NodeStructureListener listener)
{
int mypos = child.getPosition() - child.getNode().getOffsetFromParent();
int oldSize = child.getSize();
NodeAndPosition<ParsedNode> nap = child.nextSibling();
if (nap != null && nap.getPosition() > child.getEnd()) {
int newSize = nap.getPosition() - child.getPosition();
child.setSize(newSize);
childResized((MoeSyntaxDocument)document, mypos, child);
listener.nodeChangedLength(child, child.getPosition(), oldSize);
return true;
}
int myEnd = mypos + getSize();
if (nap != null) {
removeChild(nap, listener);
child.setSize(nap.getEnd() - child.getPosition());
if (myEnd == nap.getEnd() && marksOwnEnd()) {
complete = false;
}
childResized((MoeSyntaxDocument)document, mypos, child);
listener.nodeChangedLength(child, child.getPosition(), oldSize);
return true;
}
if (myEnd > child.getEnd()) {
int newsize = myEnd - child.getPosition();
child.resize(newsize);
if (marksOwnEnd()) {
complete = false;
}
childResized((MoeSyntaxDocument)document, mypos, child);
listener.nodeChangedLength(child, child.getPosition(), oldSize);
return true;
}
ParsedNode parentNode = getParentNode();
if (parentNode != null && parentNode.growChild(document,
new NodeAndPosition<ParsedNode>(this, mypos, getSize()), listener)) {
myEnd = mypos + getSize();
((MoeSyntaxDocument) document).scheduleReparse(myEnd, 0);
complete = false;
int newsize = myEnd - child.getPosition();
child.resize(newsize);
childResized((MoeSyntaxDocument)document, mypos, child);
listener.nodeChangedLength(child, child.getPosition(), oldSize);
return true;
}
return false;
}
}
top,
use,
map,
abstract class IncrementalParsingNode
. IncrementalParsingNode
. isDelimitingNode
. doPartialParse
. isNodeEndMarker
. reparseNode
. checkBoundary
. endNodeCleanup
. buildScopeStack
. removeOverwrittenChildren
. processChildQueue
. lineColToPos
. getCurrentState
. textInserted
. textRemoved
. checkEnd
. handleDeletion
. growChild
959 neLoCode
+ 45 LoComm