1884 lines
		
	
	
		
			70 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
			
		
		
	
	
			1884 lines
		
	
	
		
			70 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
| import { Parser, NodeProp, NodeSet, NodeType, DefaultBufferLength, Tree, IterMode } from '@lezer/common';
 | |
| 
 | |
| /**
 | |
| A parse stack. These are used internally by the parser to track
 | |
| parsing progress. They also provide some properties and methods
 | |
| that external code such as a tokenizer can use to get information
 | |
| about the parse state.
 | |
| */
 | |
| class Stack {
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     constructor(
 | |
|     /**
 | |
|     The parse that this stack is part of @internal
 | |
|     */
 | |
|     p, 
 | |
|     /**
 | |
|     Holds state, input pos, buffer index triplets for all but the
 | |
|     top state @internal
 | |
|     */
 | |
|     stack, 
 | |
|     /**
 | |
|     The current parse state @internal
 | |
|     */
 | |
|     state, 
 | |
|     // The position at which the next reduce should take place. This
 | |
|     // can be less than `this.pos` when skipped expressions have been
 | |
|     // added to the stack (which should be moved outside of the next
 | |
|     // reduction)
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     reducePos, 
 | |
|     /**
 | |
|     The input position up to which this stack has parsed.
 | |
|     */
 | |
|     pos, 
 | |
|     /**
 | |
|     The dynamic score of the stack, including dynamic precedence
 | |
|     and error-recovery penalties
 | |
|     @internal
 | |
|     */
 | |
|     score, 
 | |
|     // The output buffer. Holds (type, start, end, size) quads
 | |
|     // representing nodes created by the parser, where `size` is
 | |
|     // amount of buffer array entries covered by this node.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     buffer, 
 | |
|     // The base offset of the buffer. When stacks are split, the split
 | |
|     // instance shared the buffer history with its parent up to
 | |
|     // `bufferBase`, which is the absolute offset (including the
 | |
|     // offset of previous splits) into the buffer at which this stack
 | |
|     // starts writing.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     bufferBase, 
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     curContext, 
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     lookAhead = 0, 
 | |
|     // A parent stack from which this was split off, if any. This is
 | |
|     // set up so that it always points to a stack that has some
 | |
|     // additional buffer content, never to a stack with an equal
 | |
|     // `bufferBase`.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     parent) {
 | |
|         this.p = p;
 | |
|         this.stack = stack;
 | |
|         this.state = state;
 | |
|         this.reducePos = reducePos;
 | |
|         this.pos = pos;
 | |
|         this.score = score;
 | |
|         this.buffer = buffer;
 | |
|         this.bufferBase = bufferBase;
 | |
|         this.curContext = curContext;
 | |
|         this.lookAhead = lookAhead;
 | |
|         this.parent = parent;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     toString() {
 | |
|         return `[${this.stack.filter((_, i) => i % 3 == 0).concat(this.state)}]@${this.pos}${this.score ? "!" + this.score : ""}`;
 | |
|     }
 | |
|     // Start an empty stack
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     static start(p, state, pos = 0) {
 | |
|         let cx = p.parser.context;
 | |
|         return new Stack(p, [], state, pos, pos, 0, [], 0, cx ? new StackContext(cx, cx.start) : null, 0, null);
 | |
|     }
 | |
|     /**
 | |
|     The stack's current [context](#lr.ContextTracker) value, if
 | |
|     any. Its type will depend on the context tracker's type
 | |
|     parameter, or it will be `null` if there is no context
 | |
|     tracker.
 | |
|     */
 | |
|     get context() { return this.curContext ? this.curContext.context : null; }
 | |
|     // Push a state onto the stack, tracking its start position as well
 | |
|     // as the buffer base at that point.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     pushState(state, start) {
 | |
|         this.stack.push(this.state, start, this.bufferBase + this.buffer.length);
 | |
|         this.state = state;
 | |
|     }
 | |
|     // Apply a reduce action
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     reduce(action) {
 | |
|         var _a;
 | |
|         let depth = action >> 19 /* Action.ReduceDepthShift */, type = action & 65535 /* Action.ValueMask */;
 | |
|         let { parser } = this.p;
 | |
|         let lookaheadRecord = this.reducePos < this.pos - 25 /* Lookahead.Margin */;
 | |
|         if (lookaheadRecord)
 | |
|             this.setLookAhead(this.pos);
 | |
|         let dPrec = parser.dynamicPrecedence(type);
 | |
|         if (dPrec)
 | |
|             this.score += dPrec;
 | |
|         if (depth == 0) {
 | |
|             this.pushState(parser.getGoto(this.state, type, true), this.reducePos);
 | |
|             // Zero-depth reductions are a special case—they add stuff to
 | |
|             // the stack without popping anything off.
 | |
|             if (type < parser.minRepeatTerm)
 | |
|                 this.storeNode(type, this.reducePos, this.reducePos, lookaheadRecord ? 8 : 4, true);
 | |
|             this.reduceContext(type, this.reducePos);
 | |
|             return;
 | |
|         }
 | |
|         // Find the base index into `this.stack`, content after which will
 | |
|         // be dropped. Note that with `StayFlag` reductions we need to
 | |
|         // consume two extra frames (the dummy parent node for the skipped
 | |
|         // expression and the state that we'll be staying in, which should
 | |
|         // be moved to `this.state`).
 | |
|         let base = this.stack.length - ((depth - 1) * 3) - (action & 262144 /* Action.StayFlag */ ? 6 : 0);
 | |
|         let start = base ? this.stack[base - 2] : this.p.ranges[0].from, size = this.reducePos - start;
 | |
|         // This is a kludge to try and detect overly deep left-associative
 | |
|         // trees, which will not increase the parse stack depth and thus
 | |
|         // won't be caught by the regular stack-depth limit check.
 | |
|         if (size >= 2000 /* Recover.MinBigReduction */ && !((_a = this.p.parser.nodeSet.types[type]) === null || _a === void 0 ? void 0 : _a.isAnonymous)) {
 | |
|             if (start == this.p.lastBigReductionStart) {
 | |
|                 this.p.bigReductionCount++;
 | |
|                 this.p.lastBigReductionSize = size;
 | |
|             }
 | |
|             else if (this.p.lastBigReductionSize < size) {
 | |
|                 this.p.bigReductionCount = 1;
 | |
|                 this.p.lastBigReductionStart = start;
 | |
|                 this.p.lastBigReductionSize = size;
 | |
|             }
 | |
|         }
 | |
|         let bufferBase = base ? this.stack[base - 1] : 0, count = this.bufferBase + this.buffer.length - bufferBase;
 | |
|         // Store normal terms or `R -> R R` repeat reductions
 | |
|         if (type < parser.minRepeatTerm || (action & 131072 /* Action.RepeatFlag */)) {
 | |
|             let pos = parser.stateFlag(this.state, 1 /* StateFlag.Skipped */) ? this.pos : this.reducePos;
 | |
|             this.storeNode(type, start, pos, count + 4, true);
 | |
|         }
 | |
|         if (action & 262144 /* Action.StayFlag */) {
 | |
|             this.state = this.stack[base];
 | |
|         }
 | |
|         else {
 | |
|             let baseStateID = this.stack[base - 3];
 | |
|             this.state = parser.getGoto(baseStateID, type, true);
 | |
|         }
 | |
|         while (this.stack.length > base)
 | |
|             this.stack.pop();
 | |
|         this.reduceContext(type, start);
 | |
|     }
 | |
|     // Shift a value into the buffer
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     storeNode(term, start, end, size = 4, mustSink = false) {
 | |
|         if (term == 0 /* Term.Err */ &&
 | |
|             (!this.stack.length || this.stack[this.stack.length - 1] < this.buffer.length + this.bufferBase)) {
 | |
|             // Try to omit/merge adjacent error nodes
 | |
|             let cur = this, top = this.buffer.length;
 | |
|             if (top == 0 && cur.parent) {
 | |
|                 top = cur.bufferBase - cur.parent.bufferBase;
 | |
|                 cur = cur.parent;
 | |
|             }
 | |
|             if (top > 0 && cur.buffer[top - 4] == 0 /* Term.Err */ && cur.buffer[top - 1] > -1) {
 | |
|                 if (start == end)
 | |
|                     return;
 | |
|                 if (cur.buffer[top - 2] >= start) {
 | |
|                     cur.buffer[top - 2] = end;
 | |
|                     return;
 | |
|                 }
 | |
|             }
 | |
|         }
 | |
|         if (!mustSink || this.pos == end) { // Simple case, just append
 | |
|             this.buffer.push(term, start, end, size);
 | |
|         }
 | |
|         else { // There may be skipped nodes that have to be moved forward
 | |
|             let index = this.buffer.length;
 | |
|             if (index > 0 && this.buffer[index - 4] != 0 /* Term.Err */) {
 | |
|                 let mustMove = false;
 | |
|                 for (let scan = index; scan > 0 && this.buffer[scan - 2] > end; scan -= 4) {
 | |
|                     if (this.buffer[scan - 1] >= 0) {
 | |
|                         mustMove = true;
 | |
|                         break;
 | |
|                     }
 | |
|                 }
 | |
|                 if (mustMove)
 | |
|                     while (index > 0 && this.buffer[index - 2] > end) {
 | |
|                         // Move this record forward
 | |
|                         this.buffer[index] = this.buffer[index - 4];
 | |
|                         this.buffer[index + 1] = this.buffer[index - 3];
 | |
|                         this.buffer[index + 2] = this.buffer[index - 2];
 | |
|                         this.buffer[index + 3] = this.buffer[index - 1];
 | |
|                         index -= 4;
 | |
|                         if (size > 4)
 | |
|                             size -= 4;
 | |
|                     }
 | |
|             }
 | |
|             this.buffer[index] = term;
 | |
|             this.buffer[index + 1] = start;
 | |
|             this.buffer[index + 2] = end;
 | |
|             this.buffer[index + 3] = size;
 | |
|         }
 | |
|     }
 | |
|     // Apply a shift action
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     shift(action, type, start, end) {
 | |
|         if (action & 131072 /* Action.GotoFlag */) {
 | |
|             this.pushState(action & 65535 /* Action.ValueMask */, this.pos);
 | |
|         }
 | |
|         else if ((action & 262144 /* Action.StayFlag */) == 0) { // Regular shift
 | |
|             let nextState = action, { parser } = this.p;
 | |
|             if (end > this.pos || type <= parser.maxNode) {
 | |
|                 this.pos = end;
 | |
|                 if (!parser.stateFlag(nextState, 1 /* StateFlag.Skipped */))
 | |
|                     this.reducePos = end;
 | |
|             }
 | |
|             this.pushState(nextState, start);
 | |
|             this.shiftContext(type, start);
 | |
|             if (type <= parser.maxNode)
 | |
|                 this.buffer.push(type, start, end, 4);
 | |
|         }
 | |
|         else { // Shift-and-stay, which means this is a skipped token
 | |
|             this.pos = end;
 | |
|             this.shiftContext(type, start);
 | |
|             if (type <= this.p.parser.maxNode)
 | |
|                 this.buffer.push(type, start, end, 4);
 | |
|         }
 | |
|     }
 | |
|     // Apply an action
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     apply(action, next, nextStart, nextEnd) {
 | |
|         if (action & 65536 /* Action.ReduceFlag */)
 | |
|             this.reduce(action);
 | |
|         else
 | |
|             this.shift(action, next, nextStart, nextEnd);
 | |
|     }
 | |
|     // Add a prebuilt (reused) node into the buffer.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     useNode(value, next) {
 | |
|         let index = this.p.reused.length - 1;
 | |
|         if (index < 0 || this.p.reused[index] != value) {
 | |
|             this.p.reused.push(value);
 | |
|             index++;
 | |
|         }
 | |
|         let start = this.pos;
 | |
|         this.reducePos = this.pos = start + value.length;
 | |
|         this.pushState(next, start);
 | |
|         this.buffer.push(index, start, this.reducePos, -1 /* size == -1 means this is a reused value */);
 | |
|         if (this.curContext)
 | |
|             this.updateContext(this.curContext.tracker.reuse(this.curContext.context, value, this, this.p.stream.reset(this.pos - value.length)));
 | |
|     }
 | |
|     // Split the stack. Due to the buffer sharing and the fact
 | |
|     // that `this.stack` tends to stay quite shallow, this isn't very
 | |
|     // expensive.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     split() {
 | |
|         let parent = this;
 | |
|         let off = parent.buffer.length;
 | |
|         // Because the top of the buffer (after this.pos) may be mutated
 | |
|         // to reorder reductions and skipped tokens, and shared buffers
 | |
|         // should be immutable, this copies any outstanding skipped tokens
 | |
|         // to the new buffer, and puts the base pointer before them.
 | |
|         while (off > 0 && parent.buffer[off - 2] > parent.reducePos)
 | |
|             off -= 4;
 | |
|         let buffer = parent.buffer.slice(off), base = parent.bufferBase + off;
 | |
|         // Make sure parent points to an actual parent with content, if there is such a parent.
 | |
|         while (parent && base == parent.bufferBase)
 | |
|             parent = parent.parent;
 | |
|         return new Stack(this.p, this.stack.slice(), this.state, this.reducePos, this.pos, this.score, buffer, base, this.curContext, this.lookAhead, parent);
 | |
|     }
 | |
|     // Try to recover from an error by 'deleting' (ignoring) one token.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     recoverByDelete(next, nextEnd) {
 | |
|         let isNode = next <= this.p.parser.maxNode;
 | |
|         if (isNode)
 | |
|             this.storeNode(next, this.pos, nextEnd, 4);
 | |
|         this.storeNode(0 /* Term.Err */, this.pos, nextEnd, isNode ? 8 : 4);
 | |
|         this.pos = this.reducePos = nextEnd;
 | |
|         this.score -= 190 /* Recover.Delete */;
 | |
|     }
 | |
|     /**
 | |
|     Check if the given term would be able to be shifted (optionally
 | |
|     after some reductions) on this stack. This can be useful for
 | |
|     external tokenizers that want to make sure they only provide a
 | |
|     given token when it applies.
 | |
|     */
 | |
|     canShift(term) {
 | |
|         for (let sim = new SimulatedStack(this);;) {
 | |
|             let action = this.p.parser.stateSlot(sim.state, 4 /* ParseState.DefaultReduce */) || this.p.parser.hasAction(sim.state, term);
 | |
|             if (action == 0)
 | |
|                 return false;
 | |
|             if ((action & 65536 /* Action.ReduceFlag */) == 0)
 | |
|                 return true;
 | |
|             sim.reduce(action);
 | |
|         }
 | |
|     }
 | |
|     // Apply up to Recover.MaxNext recovery actions that conceptually
 | |
|     // inserts some missing token or rule.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     recoverByInsert(next) {
 | |
|         if (this.stack.length >= 300 /* Recover.MaxInsertStackDepth */)
 | |
|             return [];
 | |
|         let nextStates = this.p.parser.nextStates(this.state);
 | |
|         if (nextStates.length > 4 /* Recover.MaxNext */ << 1 || this.stack.length >= 120 /* Recover.DampenInsertStackDepth */) {
 | |
|             let best = [];
 | |
|             for (let i = 0, s; i < nextStates.length; i += 2) {
 | |
|                 if ((s = nextStates[i + 1]) != this.state && this.p.parser.hasAction(s, next))
 | |
|                     best.push(nextStates[i], s);
 | |
|             }
 | |
|             if (this.stack.length < 120 /* Recover.DampenInsertStackDepth */)
 | |
|                 for (let i = 0; best.length < 4 /* Recover.MaxNext */ << 1 && i < nextStates.length; i += 2) {
 | |
|                     let s = nextStates[i + 1];
 | |
|                     if (!best.some((v, i) => (i & 1) && v == s))
 | |
|                         best.push(nextStates[i], s);
 | |
|                 }
 | |
|             nextStates = best;
 | |
|         }
 | |
|         let result = [];
 | |
|         for (let i = 0; i < nextStates.length && result.length < 4 /* Recover.MaxNext */; i += 2) {
 | |
|             let s = nextStates[i + 1];
 | |
|             if (s == this.state)
 | |
|                 continue;
 | |
|             let stack = this.split();
 | |
|             stack.pushState(s, this.pos);
 | |
|             stack.storeNode(0 /* Term.Err */, stack.pos, stack.pos, 4, true);
 | |
|             stack.shiftContext(nextStates[i], this.pos);
 | |
|             stack.reducePos = this.pos;
 | |
|             stack.score -= 200 /* Recover.Insert */;
 | |
|             result.push(stack);
 | |
|         }
 | |
|         return result;
 | |
|     }
 | |
|     // Force a reduce, if possible. Return false if that can't
 | |
|     // be done.
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     forceReduce() {
 | |
|         let { parser } = this.p;
 | |
|         let reduce = parser.stateSlot(this.state, 5 /* ParseState.ForcedReduce */);
 | |
|         if ((reduce & 65536 /* Action.ReduceFlag */) == 0)
 | |
|             return false;
 | |
|         if (!parser.validAction(this.state, reduce)) {
 | |
|             let depth = reduce >> 19 /* Action.ReduceDepthShift */, term = reduce & 65535 /* Action.ValueMask */;
 | |
|             let target = this.stack.length - depth * 3;
 | |
|             if (target < 0 || parser.getGoto(this.stack[target], term, false) < 0) {
 | |
|                 let backup = this.findForcedReduction();
 | |
|                 if (backup == null)
 | |
|                     return false;
 | |
|                 reduce = backup;
 | |
|             }
 | |
|             this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
 | |
|             this.score -= 100 /* Recover.Reduce */;
 | |
|         }
 | |
|         this.reducePos = this.pos;
 | |
|         this.reduce(reduce);
 | |
|         return true;
 | |
|     }
 | |
|     /**
 | |
|     Try to scan through the automaton to find some kind of reduction
 | |
|     that can be applied. Used when the regular ForcedReduce field
 | |
|     isn't a valid action. @internal
 | |
|     */
 | |
|     findForcedReduction() {
 | |
|         let { parser } = this.p, seen = [];
 | |
|         let explore = (state, depth) => {
 | |
|             if (seen.includes(state))
 | |
|                 return;
 | |
|             seen.push(state);
 | |
|             return parser.allActions(state, (action) => {
 | |
|                 if (action & (262144 /* Action.StayFlag */ | 131072 /* Action.GotoFlag */)) ;
 | |
|                 else if (action & 65536 /* Action.ReduceFlag */) {
 | |
|                     let rDepth = (action >> 19 /* Action.ReduceDepthShift */) - depth;
 | |
|                     if (rDepth > 1) {
 | |
|                         let term = action & 65535 /* Action.ValueMask */, target = this.stack.length - rDepth * 3;
 | |
|                         if (target >= 0 && parser.getGoto(this.stack[target], term, false) >= 0)
 | |
|                             return (rDepth << 19 /* Action.ReduceDepthShift */) | 65536 /* Action.ReduceFlag */ | term;
 | |
|                     }
 | |
|                 }
 | |
|                 else {
 | |
|                     let found = explore(action, depth + 1);
 | |
|                     if (found != null)
 | |
|                         return found;
 | |
|                 }
 | |
|             });
 | |
|         };
 | |
|         return explore(this.state, 0);
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     forceAll() {
 | |
|         while (!this.p.parser.stateFlag(this.state, 2 /* StateFlag.Accepting */)) {
 | |
|             if (!this.forceReduce()) {
 | |
|                 this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
 | |
|                 break;
 | |
|             }
 | |
|         }
 | |
|         return this;
 | |
|     }
 | |
|     /**
 | |
|     Check whether this state has no further actions (assumed to be a direct descendant of the
 | |
|     top state, since any other states must be able to continue
 | |
|     somehow). @internal
 | |
|     */
 | |
|     get deadEnd() {
 | |
|         if (this.stack.length != 3)
 | |
|             return false;
 | |
|         let { parser } = this.p;
 | |
|         return parser.data[parser.stateSlot(this.state, 1 /* ParseState.Actions */)] == 65535 /* Seq.End */ &&
 | |
|             !parser.stateSlot(this.state, 4 /* ParseState.DefaultReduce */);
 | |
|     }
 | |
|     /**
 | |
|     Restart the stack (put it back in its start state). Only safe
 | |
|     when this.stack.length == 3 (state is directly below the top
 | |
|     state). @internal
 | |
|     */
 | |
|     restart() {
 | |
|         this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
 | |
|         this.state = this.stack[0];
 | |
|         this.stack.length = 0;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     sameState(other) {
 | |
|         if (this.state != other.state || this.stack.length != other.stack.length)
 | |
|             return false;
 | |
|         for (let i = 0; i < this.stack.length; i += 3)
 | |
|             if (this.stack[i] != other.stack[i])
 | |
|                 return false;
 | |
|         return true;
 | |
|     }
 | |
|     /**
 | |
|     Get the parser used by this stack.
 | |
|     */
 | |
|     get parser() { return this.p.parser; }
 | |
|     /**
 | |
|     Test whether a given dialect (by numeric ID, as exported from
 | |
|     the terms file) is enabled.
 | |
|     */
 | |
|     dialectEnabled(dialectID) { return this.p.parser.dialect.flags[dialectID]; }
 | |
|     shiftContext(term, start) {
 | |
|         if (this.curContext)
 | |
|             this.updateContext(this.curContext.tracker.shift(this.curContext.context, term, this, this.p.stream.reset(start)));
 | |
|     }
 | |
|     reduceContext(term, start) {
 | |
|         if (this.curContext)
 | |
|             this.updateContext(this.curContext.tracker.reduce(this.curContext.context, term, this, this.p.stream.reset(start)));
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     emitContext() {
 | |
|         let last = this.buffer.length - 1;
 | |
|         if (last < 0 || this.buffer[last] != -3)
 | |
|             this.buffer.push(this.curContext.hash, this.pos, this.pos, -3);
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     emitLookAhead() {
 | |
|         let last = this.buffer.length - 1;
 | |
|         if (last < 0 || this.buffer[last] != -4)
 | |
|             this.buffer.push(this.lookAhead, this.pos, this.pos, -4);
 | |
|     }
 | |
|     updateContext(context) {
 | |
|         if (context != this.curContext.context) {
 | |
|             let newCx = new StackContext(this.curContext.tracker, context);
 | |
|             if (newCx.hash != this.curContext.hash)
 | |
|                 this.emitContext();
 | |
|             this.curContext = newCx;
 | |
|         }
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     setLookAhead(lookAhead) {
 | |
|         if (lookAhead > this.lookAhead) {
 | |
|             this.emitLookAhead();
 | |
|             this.lookAhead = lookAhead;
 | |
|         }
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     close() {
 | |
|         if (this.curContext && this.curContext.tracker.strict)
 | |
|             this.emitContext();
 | |
|         if (this.lookAhead > 0)
 | |
|             this.emitLookAhead();
 | |
|     }
 | |
| }
 | |
| class StackContext {
 | |
|     constructor(tracker, context) {
 | |
|         this.tracker = tracker;
 | |
|         this.context = context;
 | |
|         this.hash = tracker.strict ? tracker.hash(context) : 0;
 | |
|     }
 | |
| }
 | |
| // Used to cheaply run some reductions to scan ahead without mutating
 | |
| // an entire stack
 | |
| class SimulatedStack {
 | |
|     constructor(start) {
 | |
|         this.start = start;
 | |
|         this.state = start.state;
 | |
|         this.stack = start.stack;
 | |
|         this.base = this.stack.length;
 | |
|     }
 | |
|     reduce(action) {
 | |
|         let term = action & 65535 /* Action.ValueMask */, depth = action >> 19 /* Action.ReduceDepthShift */;
 | |
|         if (depth == 0) {
 | |
|             if (this.stack == this.start.stack)
 | |
|                 this.stack = this.stack.slice();
 | |
|             this.stack.push(this.state, 0, 0);
 | |
|             this.base += 3;
 | |
|         }
 | |
|         else {
 | |
|             this.base -= (depth - 1) * 3;
 | |
|         }
 | |
|         let goto = this.start.p.parser.getGoto(this.stack[this.base - 3], term, true);
 | |
|         this.state = goto;
 | |
|     }
 | |
| }
 | |
| // This is given to `Tree.build` to build a buffer, and encapsulates
 | |
| // the parent-stack-walking necessary to read the nodes.
 | |
| class StackBufferCursor {
 | |
|     constructor(stack, pos, index) {
 | |
|         this.stack = stack;
 | |
|         this.pos = pos;
 | |
|         this.index = index;
 | |
|         this.buffer = stack.buffer;
 | |
|         if (this.index == 0)
 | |
|             this.maybeNext();
 | |
|     }
 | |
|     static create(stack, pos = stack.bufferBase + stack.buffer.length) {
 | |
|         return new StackBufferCursor(stack, pos, pos - stack.bufferBase);
 | |
|     }
 | |
|     maybeNext() {
 | |
|         let next = this.stack.parent;
 | |
|         if (next != null) {
 | |
|             this.index = this.stack.bufferBase - next.bufferBase;
 | |
|             this.stack = next;
 | |
|             this.buffer = next.buffer;
 | |
|         }
 | |
|     }
 | |
|     get id() { return this.buffer[this.index - 4]; }
 | |
|     get start() { return this.buffer[this.index - 3]; }
 | |
|     get end() { return this.buffer[this.index - 2]; }
 | |
|     get size() { return this.buffer[this.index - 1]; }
 | |
|     next() {
 | |
|         this.index -= 4;
 | |
|         this.pos -= 4;
 | |
|         if (this.index == 0)
 | |
|             this.maybeNext();
 | |
|     }
 | |
|     fork() {
 | |
|         return new StackBufferCursor(this.stack, this.pos, this.index);
 | |
|     }
 | |
| }
 | |
| 
 | |
| // See lezer-generator/src/encode.ts for comments about the encoding
 | |
| // used here
 | |
| function decodeArray(input, Type = Uint16Array) {
 | |
|     if (typeof input != "string")
 | |
|         return input;
 | |
|     let array = null;
 | |
|     for (let pos = 0, out = 0; pos < input.length;) {
 | |
|         let value = 0;
 | |
|         for (;;) {
 | |
|             let next = input.charCodeAt(pos++), stop = false;
 | |
|             if (next == 126 /* Encode.BigValCode */) {
 | |
|                 value = 65535 /* Encode.BigVal */;
 | |
|                 break;
 | |
|             }
 | |
|             if (next >= 92 /* Encode.Gap2 */)
 | |
|                 next--;
 | |
|             if (next >= 34 /* Encode.Gap1 */)
 | |
|                 next--;
 | |
|             let digit = next - 32 /* Encode.Start */;
 | |
|             if (digit >= 46 /* Encode.Base */) {
 | |
|                 digit -= 46 /* Encode.Base */;
 | |
|                 stop = true;
 | |
|             }
 | |
|             value += digit;
 | |
|             if (stop)
 | |
|                 break;
 | |
|             value *= 46 /* Encode.Base */;
 | |
|         }
 | |
|         if (array)
 | |
|             array[out++] = value;
 | |
|         else
 | |
|             array = new Type(value);
 | |
|     }
 | |
|     return array;
 | |
| }
 | |
| 
 | |
| class CachedToken {
 | |
|     constructor() {
 | |
|         this.start = -1;
 | |
|         this.value = -1;
 | |
|         this.end = -1;
 | |
|         this.extended = -1;
 | |
|         this.lookAhead = 0;
 | |
|         this.mask = 0;
 | |
|         this.context = 0;
 | |
|     }
 | |
| }
 | |
| const nullToken = new CachedToken;
 | |
| /**
 | |
| [Tokenizers](#lr.ExternalTokenizer) interact with the input
 | |
| through this interface. It presents the input as a stream of
 | |
| characters, tracking lookahead and hiding the complexity of
 | |
| [ranges](#common.Parser.parse^ranges) from tokenizer code.
 | |
| */
 | |
| class InputStream {
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     constructor(
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     input, 
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     ranges) {
 | |
|         this.input = input;
 | |
|         this.ranges = ranges;
 | |
|         /**
 | |
|         @internal
 | |
|         */
 | |
|         this.chunk = "";
 | |
|         /**
 | |
|         @internal
 | |
|         */
 | |
|         this.chunkOff = 0;
 | |
|         /**
 | |
|         Backup chunk
 | |
|         */
 | |
|         this.chunk2 = "";
 | |
|         this.chunk2Pos = 0;
 | |
|         /**
 | |
|         The character code of the next code unit in the input, or -1
 | |
|         when the stream is at the end of the input.
 | |
|         */
 | |
|         this.next = -1;
 | |
|         /**
 | |
|         @internal
 | |
|         */
 | |
|         this.token = nullToken;
 | |
|         this.rangeIndex = 0;
 | |
|         this.pos = this.chunkPos = ranges[0].from;
 | |
|         this.range = ranges[0];
 | |
|         this.end = ranges[ranges.length - 1].to;
 | |
|         this.readNext();
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     resolveOffset(offset, assoc) {
 | |
|         let range = this.range, index = this.rangeIndex;
 | |
|         let pos = this.pos + offset;
 | |
|         while (pos < range.from) {
 | |
|             if (!index)
 | |
|                 return null;
 | |
|             let next = this.ranges[--index];
 | |
|             pos -= range.from - next.to;
 | |
|             range = next;
 | |
|         }
 | |
|         while (assoc < 0 ? pos > range.to : pos >= range.to) {
 | |
|             if (index == this.ranges.length - 1)
 | |
|                 return null;
 | |
|             let next = this.ranges[++index];
 | |
|             pos += next.from - range.to;
 | |
|             range = next;
 | |
|         }
 | |
|         return pos;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     clipPos(pos) {
 | |
|         if (pos >= this.range.from && pos < this.range.to)
 | |
|             return pos;
 | |
|         for (let range of this.ranges)
 | |
|             if (range.to > pos)
 | |
|                 return Math.max(pos, range.from);
 | |
|         return this.end;
 | |
|     }
 | |
|     /**
 | |
|     Look at a code unit near the stream position. `.peek(0)` equals
 | |
|     `.next`, `.peek(-1)` gives you the previous character, and so
 | |
|     on.
 | |
|     
 | |
|     Note that looking around during tokenizing creates dependencies
 | |
|     on potentially far-away content, which may reduce the
 | |
|     effectiveness incremental parsing—when looking forward—or even
 | |
|     cause invalid reparses when looking backward more than 25 code
 | |
|     units, since the library does not track lookbehind.
 | |
|     */
 | |
|     peek(offset) {
 | |
|         let idx = this.chunkOff + offset, pos, result;
 | |
|         if (idx >= 0 && idx < this.chunk.length) {
 | |
|             pos = this.pos + offset;
 | |
|             result = this.chunk.charCodeAt(idx);
 | |
|         }
 | |
|         else {
 | |
|             let resolved = this.resolveOffset(offset, 1);
 | |
|             if (resolved == null)
 | |
|                 return -1;
 | |
|             pos = resolved;
 | |
|             if (pos >= this.chunk2Pos && pos < this.chunk2Pos + this.chunk2.length) {
 | |
|                 result = this.chunk2.charCodeAt(pos - this.chunk2Pos);
 | |
|             }
 | |
|             else {
 | |
|                 let i = this.rangeIndex, range = this.range;
 | |
|                 while (range.to <= pos)
 | |
|                     range = this.ranges[++i];
 | |
|                 this.chunk2 = this.input.chunk(this.chunk2Pos = pos);
 | |
|                 if (pos + this.chunk2.length > range.to)
 | |
|                     this.chunk2 = this.chunk2.slice(0, range.to - pos);
 | |
|                 result = this.chunk2.charCodeAt(0);
 | |
|             }
 | |
|         }
 | |
|         if (pos >= this.token.lookAhead)
 | |
|             this.token.lookAhead = pos + 1;
 | |
|         return result;
 | |
|     }
 | |
|     /**
 | |
|     Accept a token. By default, the end of the token is set to the
 | |
|     current stream position, but you can pass an offset (relative to
 | |
|     the stream position) to change that.
 | |
|     */
 | |
|     acceptToken(token, endOffset = 0) {
 | |
|         let end = endOffset ? this.resolveOffset(endOffset, -1) : this.pos;
 | |
|         if (end == null || end < this.token.start)
 | |
|             throw new RangeError("Token end out of bounds");
 | |
|         this.token.value = token;
 | |
|         this.token.end = end;
 | |
|     }
 | |
|     /**
 | |
|     Accept a token ending at a specific given position.
 | |
|     */
 | |
|     acceptTokenTo(token, endPos) {
 | |
|         this.token.value = token;
 | |
|         this.token.end = endPos;
 | |
|     }
 | |
|     getChunk() {
 | |
|         if (this.pos >= this.chunk2Pos && this.pos < this.chunk2Pos + this.chunk2.length) {
 | |
|             let { chunk, chunkPos } = this;
 | |
|             this.chunk = this.chunk2;
 | |
|             this.chunkPos = this.chunk2Pos;
 | |
|             this.chunk2 = chunk;
 | |
|             this.chunk2Pos = chunkPos;
 | |
|             this.chunkOff = this.pos - this.chunkPos;
 | |
|         }
 | |
|         else {
 | |
|             this.chunk2 = this.chunk;
 | |
|             this.chunk2Pos = this.chunkPos;
 | |
|             let nextChunk = this.input.chunk(this.pos);
 | |
|             let end = this.pos + nextChunk.length;
 | |
|             this.chunk = end > this.range.to ? nextChunk.slice(0, this.range.to - this.pos) : nextChunk;
 | |
|             this.chunkPos = this.pos;
 | |
|             this.chunkOff = 0;
 | |
|         }
 | |
|     }
 | |
|     readNext() {
 | |
|         if (this.chunkOff >= this.chunk.length) {
 | |
|             this.getChunk();
 | |
|             if (this.chunkOff == this.chunk.length)
 | |
|                 return this.next = -1;
 | |
|         }
 | |
|         return this.next = this.chunk.charCodeAt(this.chunkOff);
 | |
|     }
 | |
|     /**
 | |
|     Move the stream forward N (defaults to 1) code units. Returns
 | |
|     the new value of [`next`](#lr.InputStream.next).
 | |
|     */
 | |
|     advance(n = 1) {
 | |
|         this.chunkOff += n;
 | |
|         while (this.pos + n >= this.range.to) {
 | |
|             if (this.rangeIndex == this.ranges.length - 1)
 | |
|                 return this.setDone();
 | |
|             n -= this.range.to - this.pos;
 | |
|             this.range = this.ranges[++this.rangeIndex];
 | |
|             this.pos = this.range.from;
 | |
|         }
 | |
|         this.pos += n;
 | |
|         if (this.pos >= this.token.lookAhead)
 | |
|             this.token.lookAhead = this.pos + 1;
 | |
|         return this.readNext();
 | |
|     }
 | |
|     setDone() {
 | |
|         this.pos = this.chunkPos = this.end;
 | |
|         this.range = this.ranges[this.rangeIndex = this.ranges.length - 1];
 | |
|         this.chunk = "";
 | |
|         return this.next = -1;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     reset(pos, token) {
 | |
|         if (token) {
 | |
|             this.token = token;
 | |
|             token.start = pos;
 | |
|             token.lookAhead = pos + 1;
 | |
|             token.value = token.extended = -1;
 | |
|         }
 | |
|         else {
 | |
|             this.token = nullToken;
 | |
|         }
 | |
|         if (this.pos != pos) {
 | |
|             this.pos = pos;
 | |
|             if (pos == this.end) {
 | |
|                 this.setDone();
 | |
|                 return this;
 | |
|             }
 | |
|             while (pos < this.range.from)
 | |
|                 this.range = this.ranges[--this.rangeIndex];
 | |
|             while (pos >= this.range.to)
 | |
|                 this.range = this.ranges[++this.rangeIndex];
 | |
|             if (pos >= this.chunkPos && pos < this.chunkPos + this.chunk.length) {
 | |
|                 this.chunkOff = pos - this.chunkPos;
 | |
|             }
 | |
|             else {
 | |
|                 this.chunk = "";
 | |
|                 this.chunkOff = 0;
 | |
|             }
 | |
|             this.readNext();
 | |
|         }
 | |
|         return this;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     read(from, to) {
 | |
|         if (from >= this.chunkPos && to <= this.chunkPos + this.chunk.length)
 | |
|             return this.chunk.slice(from - this.chunkPos, to - this.chunkPos);
 | |
|         if (from >= this.chunk2Pos && to <= this.chunk2Pos + this.chunk2.length)
 | |
|             return this.chunk2.slice(from - this.chunk2Pos, to - this.chunk2Pos);
 | |
|         if (from >= this.range.from && to <= this.range.to)
 | |
|             return this.input.read(from, to);
 | |
|         let result = "";
 | |
|         for (let r of this.ranges) {
 | |
|             if (r.from >= to)
 | |
|                 break;
 | |
|             if (r.to > from)
 | |
|                 result += this.input.read(Math.max(r.from, from), Math.min(r.to, to));
 | |
|         }
 | |
|         return result;
 | |
|     }
 | |
| }
 | |
| /**
 | |
| @internal
 | |
| */
 | |
| class TokenGroup {
 | |
|     constructor(data, id) {
 | |
|         this.data = data;
 | |
|         this.id = id;
 | |
|     }
 | |
|     token(input, stack) {
 | |
|         let { parser } = stack.p;
 | |
|         readToken(this.data, input, stack, this.id, parser.data, parser.tokenPrecTable);
 | |
|     }
 | |
| }
 | |
| TokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
 | |
| /**
 | |
| @hide
 | |
| */
 | |
| class LocalTokenGroup {
 | |
|     constructor(data, precTable, elseToken) {
 | |
|         this.precTable = precTable;
 | |
|         this.elseToken = elseToken;
 | |
|         this.data = typeof data == "string" ? decodeArray(data) : data;
 | |
|     }
 | |
|     token(input, stack) {
 | |
|         let start = input.pos, skipped = 0;
 | |
|         for (;;) {
 | |
|             let atEof = input.next < 0, nextPos = input.resolveOffset(1, 1);
 | |
|             readToken(this.data, input, stack, 0, this.data, this.precTable);
 | |
|             if (input.token.value > -1)
 | |
|                 break;
 | |
|             if (this.elseToken == null)
 | |
|                 return;
 | |
|             if (!atEof)
 | |
|                 skipped++;
 | |
|             if (nextPos == null)
 | |
|                 break;
 | |
|             input.reset(nextPos, input.token);
 | |
|         }
 | |
|         if (skipped) {
 | |
|             input.reset(start, input.token);
 | |
|             input.acceptToken(this.elseToken, skipped);
 | |
|         }
 | |
|     }
 | |
| }
 | |
| LocalTokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
 | |
| /**
 | |
| `@external tokens` declarations in the grammar should resolve to
 | |
| an instance of this class.
 | |
| */
 | |
| class ExternalTokenizer {
 | |
|     /**
 | |
|     Create a tokenizer. The first argument is the function that,
 | |
|     given an input stream, scans for the types of tokens it
 | |
|     recognizes at the stream's position, and calls
 | |
|     [`acceptToken`](#lr.InputStream.acceptToken) when it finds
 | |
|     one.
 | |
|     */
 | |
|     constructor(
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     token, options = {}) {
 | |
|         this.token = token;
 | |
|         this.contextual = !!options.contextual;
 | |
|         this.fallback = !!options.fallback;
 | |
|         this.extend = !!options.extend;
 | |
|     }
 | |
| }
 | |
| // Tokenizer data is stored a big uint16 array containing, for each
 | |
| // state:
 | |
| //
 | |
| //  - A group bitmask, indicating what token groups are reachable from
 | |
| //    this state, so that paths that can only lead to tokens not in
 | |
| //    any of the current groups can be cut off early.
 | |
| //
 | |
| //  - The position of the end of the state's sequence of accepting
 | |
| //    tokens
 | |
| //
 | |
| //  - The number of outgoing edges for the state
 | |
| //
 | |
| //  - The accepting tokens, as (token id, group mask) pairs
 | |
| //
 | |
| //  - The outgoing edges, as (start character, end character, state
 | |
| //    index) triples, with end character being exclusive
 | |
| //
 | |
| // This function interprets that data, running through a stream as
 | |
| // long as new states with the a matching group mask can be reached,
 | |
| // and updating `input.token` when it matches a token.
 | |
| function readToken(data, input, stack, group, precTable, precOffset) {
 | |
|     let state = 0, groupMask = 1 << group, { dialect } = stack.p.parser;
 | |
|     scan: for (;;) {
 | |
|         if ((groupMask & data[state]) == 0)
 | |
|             break;
 | |
|         let accEnd = data[state + 1];
 | |
|         // Check whether this state can lead to a token in the current group
 | |
|         // Accept tokens in this state, possibly overwriting
 | |
|         // lower-precedence / shorter tokens
 | |
|         for (let i = state + 3; i < accEnd; i += 2)
 | |
|             if ((data[i + 1] & groupMask) > 0) {
 | |
|                 let term = data[i];
 | |
|                 if (dialect.allows(term) &&
 | |
|                     (input.token.value == -1 || input.token.value == term ||
 | |
|                         overrides(term, input.token.value, precTable, precOffset))) {
 | |
|                     input.acceptToken(term);
 | |
|                     break;
 | |
|                 }
 | |
|             }
 | |
|         let next = input.next, low = 0, high = data[state + 2];
 | |
|         // Special case for EOF
 | |
|         if (input.next < 0 && high > low && data[accEnd + high * 3 - 3] == 65535 /* Seq.End */) {
 | |
|             state = data[accEnd + high * 3 - 1];
 | |
|             continue scan;
 | |
|         }
 | |
|         // Do a binary search on the state's edges
 | |
|         for (; low < high;) {
 | |
|             let mid = (low + high) >> 1;
 | |
|             let index = accEnd + mid + (mid << 1);
 | |
|             let from = data[index], to = data[index + 1] || 0x10000;
 | |
|             if (next < from)
 | |
|                 high = mid;
 | |
|             else if (next >= to)
 | |
|                 low = mid + 1;
 | |
|             else {
 | |
|                 state = data[index + 2];
 | |
|                 input.advance();
 | |
|                 continue scan;
 | |
|             }
 | |
|         }
 | |
|         break;
 | |
|     }
 | |
| }
 | |
| function findOffset(data, start, term) {
 | |
|     for (let i = start, next; (next = data[i]) != 65535 /* Seq.End */; i++)
 | |
|         if (next == term)
 | |
|             return i - start;
 | |
|     return -1;
 | |
| }
 | |
| function overrides(token, prev, tableData, tableOffset) {
 | |
|     let iPrev = findOffset(tableData, tableOffset, prev);
 | |
|     return iPrev < 0 || findOffset(tableData, tableOffset, token) < iPrev;
 | |
| }
 | |
| 
 | |
| // Environment variable used to control console output
 | |
| const verbose = typeof process != "undefined" && process.env && /\bparse\b/.test(process.env.LOG);
 | |
| let stackIDs = null;
 | |
| function cutAt(tree, pos, side) {
 | |
|     let cursor = tree.cursor(IterMode.IncludeAnonymous);
 | |
|     cursor.moveTo(pos);
 | |
|     for (;;) {
 | |
|         if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos)))
 | |
|             for (;;) {
 | |
|                 if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError)
 | |
|                     return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */))
 | |
|                         : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */));
 | |
|                 if (side < 0 ? cursor.prevSibling() : cursor.nextSibling())
 | |
|                     break;
 | |
|                 if (!cursor.parent())
 | |
|                     return side < 0 ? 0 : tree.length;
 | |
|             }
 | |
|     }
 | |
| }
 | |
| class FragmentCursor {
 | |
|     constructor(fragments, nodeSet) {
 | |
|         this.fragments = fragments;
 | |
|         this.nodeSet = nodeSet;
 | |
|         this.i = 0;
 | |
|         this.fragment = null;
 | |
|         this.safeFrom = -1;
 | |
|         this.safeTo = -1;
 | |
|         this.trees = [];
 | |
|         this.start = [];
 | |
|         this.index = [];
 | |
|         this.nextFragment();
 | |
|     }
 | |
|     nextFragment() {
 | |
|         let fr = this.fragment = this.i == this.fragments.length ? null : this.fragments[this.i++];
 | |
|         if (fr) {
 | |
|             this.safeFrom = fr.openStart ? cutAt(fr.tree, fr.from + fr.offset, 1) - fr.offset : fr.from;
 | |
|             this.safeTo = fr.openEnd ? cutAt(fr.tree, fr.to + fr.offset, -1) - fr.offset : fr.to;
 | |
|             while (this.trees.length) {
 | |
|                 this.trees.pop();
 | |
|                 this.start.pop();
 | |
|                 this.index.pop();
 | |
|             }
 | |
|             this.trees.push(fr.tree);
 | |
|             this.start.push(-fr.offset);
 | |
|             this.index.push(0);
 | |
|             this.nextStart = this.safeFrom;
 | |
|         }
 | |
|         else {
 | |
|             this.nextStart = 1e9;
 | |
|         }
 | |
|     }
 | |
|     // `pos` must be >= any previously given `pos` for this cursor
 | |
|     nodeAt(pos) {
 | |
|         if (pos < this.nextStart)
 | |
|             return null;
 | |
|         while (this.fragment && this.safeTo <= pos)
 | |
|             this.nextFragment();
 | |
|         if (!this.fragment)
 | |
|             return null;
 | |
|         for (;;) {
 | |
|             let last = this.trees.length - 1;
 | |
|             if (last < 0) { // End of tree
 | |
|                 this.nextFragment();
 | |
|                 return null;
 | |
|             }
 | |
|             let top = this.trees[last], index = this.index[last];
 | |
|             if (index == top.children.length) {
 | |
|                 this.trees.pop();
 | |
|                 this.start.pop();
 | |
|                 this.index.pop();
 | |
|                 continue;
 | |
|             }
 | |
|             let next = top.children[index];
 | |
|             let start = this.start[last] + top.positions[index];
 | |
|             if (start > pos) {
 | |
|                 this.nextStart = start;
 | |
|                 return null;
 | |
|             }
 | |
|             if (next instanceof Tree) {
 | |
|                 if (start == pos) {
 | |
|                     if (start < this.safeFrom)
 | |
|                         return null;
 | |
|                     let end = start + next.length;
 | |
|                     if (end <= this.safeTo) {
 | |
|                         let lookAhead = next.prop(NodeProp.lookAhead);
 | |
|                         if (!lookAhead || end + lookAhead < this.fragment.to)
 | |
|                             return next;
 | |
|                     }
 | |
|                 }
 | |
|                 this.index[last]++;
 | |
|                 if (start + next.length >= Math.max(this.safeFrom, pos)) { // Enter this node
 | |
|                     this.trees.push(next);
 | |
|                     this.start.push(start);
 | |
|                     this.index.push(0);
 | |
|                 }
 | |
|             }
 | |
|             else {
 | |
|                 this.index[last]++;
 | |
|                 this.nextStart = start + next.length;
 | |
|             }
 | |
|         }
 | |
|     }
 | |
| }
 | |
| class TokenCache {
 | |
|     constructor(parser, stream) {
 | |
|         this.stream = stream;
 | |
|         this.tokens = [];
 | |
|         this.mainToken = null;
 | |
|         this.actions = [];
 | |
|         this.tokens = parser.tokenizers.map(_ => new CachedToken);
 | |
|     }
 | |
|     getActions(stack) {
 | |
|         let actionIndex = 0;
 | |
|         let main = null;
 | |
|         let { parser } = stack.p, { tokenizers } = parser;
 | |
|         let mask = parser.stateSlot(stack.state, 3 /* ParseState.TokenizerMask */);
 | |
|         let context = stack.curContext ? stack.curContext.hash : 0;
 | |
|         let lookAhead = 0;
 | |
|         for (let i = 0; i < tokenizers.length; i++) {
 | |
|             if (((1 << i) & mask) == 0)
 | |
|                 continue;
 | |
|             let tokenizer = tokenizers[i], token = this.tokens[i];
 | |
|             if (main && !tokenizer.fallback)
 | |
|                 continue;
 | |
|             if (tokenizer.contextual || token.start != stack.pos || token.mask != mask || token.context != context) {
 | |
|                 this.updateCachedToken(token, tokenizer, stack);
 | |
|                 token.mask = mask;
 | |
|                 token.context = context;
 | |
|             }
 | |
|             if (token.lookAhead > token.end + 25 /* Lookahead.Margin */)
 | |
|                 lookAhead = Math.max(token.lookAhead, lookAhead);
 | |
|             if (token.value != 0 /* Term.Err */) {
 | |
|                 let startIndex = actionIndex;
 | |
|                 if (token.extended > -1)
 | |
|                     actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
 | |
|                 actionIndex = this.addActions(stack, token.value, token.end, actionIndex);
 | |
|                 if (!tokenizer.extend) {
 | |
|                     main = token;
 | |
|                     if (actionIndex > startIndex)
 | |
|                         break;
 | |
|                 }
 | |
|             }
 | |
|         }
 | |
|         while (this.actions.length > actionIndex)
 | |
|             this.actions.pop();
 | |
|         if (lookAhead)
 | |
|             stack.setLookAhead(lookAhead);
 | |
|         if (!main && stack.pos == this.stream.end) {
 | |
|             main = new CachedToken;
 | |
|             main.value = stack.p.parser.eofTerm;
 | |
|             main.start = main.end = stack.pos;
 | |
|             actionIndex = this.addActions(stack, main.value, main.end, actionIndex);
 | |
|         }
 | |
|         this.mainToken = main;
 | |
|         return this.actions;
 | |
|     }
 | |
|     getMainToken(stack) {
 | |
|         if (this.mainToken)
 | |
|             return this.mainToken;
 | |
|         let main = new CachedToken, { pos, p } = stack;
 | |
|         main.start = pos;
 | |
|         main.end = Math.min(pos + 1, p.stream.end);
 | |
|         main.value = pos == p.stream.end ? p.parser.eofTerm : 0 /* Term.Err */;
 | |
|         return main;
 | |
|     }
 | |
|     updateCachedToken(token, tokenizer, stack) {
 | |
|         let start = this.stream.clipPos(stack.pos);
 | |
|         tokenizer.token(this.stream.reset(start, token), stack);
 | |
|         if (token.value > -1) {
 | |
|             let { parser } = stack.p;
 | |
|             for (let i = 0; i < parser.specialized.length; i++)
 | |
|                 if (parser.specialized[i] == token.value) {
 | |
|                     let result = parser.specializers[i](this.stream.read(token.start, token.end), stack);
 | |
|                     if (result >= 0 && stack.p.parser.dialect.allows(result >> 1)) {
 | |
|                         if ((result & 1) == 0 /* Specialize.Specialize */)
 | |
|                             token.value = result >> 1;
 | |
|                         else
 | |
|                             token.extended = result >> 1;
 | |
|                         break;
 | |
|                     }
 | |
|                 }
 | |
|         }
 | |
|         else {
 | |
|             token.value = 0 /* Term.Err */;
 | |
|             token.end = this.stream.clipPos(start + 1);
 | |
|         }
 | |
|     }
 | |
|     putAction(action, token, end, index) {
 | |
|         // Don't add duplicate actions
 | |
|         for (let i = 0; i < index; i += 3)
 | |
|             if (this.actions[i] == action)
 | |
|                 return index;
 | |
|         this.actions[index++] = action;
 | |
|         this.actions[index++] = token;
 | |
|         this.actions[index++] = end;
 | |
|         return index;
 | |
|     }
 | |
|     addActions(stack, token, end, index) {
 | |
|         let { state } = stack, { parser } = stack.p, { data } = parser;
 | |
|         for (let set = 0; set < 2; set++) {
 | |
|             for (let i = parser.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */);; i += 3) {
 | |
|                 if (data[i] == 65535 /* Seq.End */) {
 | |
|                     if (data[i + 1] == 1 /* Seq.Next */) {
 | |
|                         i = pair(data, i + 2);
 | |
|                     }
 | |
|                     else {
 | |
|                         if (index == 0 && data[i + 1] == 2 /* Seq.Other */)
 | |
|                             index = this.putAction(pair(data, i + 2), token, end, index);
 | |
|                         break;
 | |
|                     }
 | |
|                 }
 | |
|                 if (data[i] == token)
 | |
|                     index = this.putAction(pair(data, i + 1), token, end, index);
 | |
|             }
 | |
|         }
 | |
|         return index;
 | |
|     }
 | |
| }
 | |
| class Parse {
 | |
|     constructor(parser, input, fragments, ranges) {
 | |
|         this.parser = parser;
 | |
|         this.input = input;
 | |
|         this.ranges = ranges;
 | |
|         this.recovering = 0;
 | |
|         this.nextStackID = 0x2654; // ♔, ♕, ♖, ♗, ♘, ♙, ♠, ♡, ♢, ♣, ♤, ♥, ♦, ♧
 | |
|         this.minStackPos = 0;
 | |
|         this.reused = [];
 | |
|         this.stoppedAt = null;
 | |
|         this.lastBigReductionStart = -1;
 | |
|         this.lastBigReductionSize = 0;
 | |
|         this.bigReductionCount = 0;
 | |
|         this.stream = new InputStream(input, ranges);
 | |
|         this.tokens = new TokenCache(parser, this.stream);
 | |
|         this.topTerm = parser.top[1];
 | |
|         let { from } = ranges[0];
 | |
|         this.stacks = [Stack.start(this, parser.top[0], from)];
 | |
|         this.fragments = fragments.length && this.stream.end - from > parser.bufferLength * 4
 | |
|             ? new FragmentCursor(fragments, parser.nodeSet) : null;
 | |
|     }
 | |
|     get parsedPos() {
 | |
|         return this.minStackPos;
 | |
|     }
 | |
|     // Move the parser forward. This will process all parse stacks at
 | |
|     // `this.pos` and try to advance them to a further position. If no
 | |
|     // stack for such a position is found, it'll start error-recovery.
 | |
|     //
 | |
|     // When the parse is finished, this will return a syntax tree. When
 | |
|     // not, it returns `null`.
 | |
|     advance() {
 | |
|         let stacks = this.stacks, pos = this.minStackPos;
 | |
|         // This will hold stacks beyond `pos`.
 | |
|         let newStacks = this.stacks = [];
 | |
|         let stopped, stoppedTokens;
 | |
|         // If a large amount of reductions happened with the same start
 | |
|         // position, force the stack out of that production in order to
 | |
|         // avoid creating a tree too deep to recurse through.
 | |
|         // (This is an ugly kludge, because unfortunately there is no
 | |
|         // straightforward, cheap way to check for this happening, due to
 | |
|         // the history of reductions only being available in an
 | |
|         // expensive-to-access format in the stack buffers.)
 | |
|         if (this.bigReductionCount > 300 /* Rec.MaxLeftAssociativeReductionCount */ && stacks.length == 1) {
 | |
|             let [s] = stacks;
 | |
|             while (s.forceReduce() && s.stack.length && s.stack[s.stack.length - 2] >= this.lastBigReductionStart) { }
 | |
|             this.bigReductionCount = this.lastBigReductionSize = 0;
 | |
|         }
 | |
|         // Keep advancing any stacks at `pos` until they either move
 | |
|         // forward or can't be advanced. Gather stacks that can't be
 | |
|         // advanced further in `stopped`.
 | |
|         for (let i = 0; i < stacks.length; i++) {
 | |
|             let stack = stacks[i];
 | |
|             for (;;) {
 | |
|                 this.tokens.mainToken = null;
 | |
|                 if (stack.pos > pos) {
 | |
|                     newStacks.push(stack);
 | |
|                 }
 | |
|                 else if (this.advanceStack(stack, newStacks, stacks)) {
 | |
|                     continue;
 | |
|                 }
 | |
|                 else {
 | |
|                     if (!stopped) {
 | |
|                         stopped = [];
 | |
|                         stoppedTokens = [];
 | |
|                     }
 | |
|                     stopped.push(stack);
 | |
|                     let tok = this.tokens.getMainToken(stack);
 | |
|                     stoppedTokens.push(tok.value, tok.end);
 | |
|                 }
 | |
|                 break;
 | |
|             }
 | |
|         }
 | |
|         if (!newStacks.length) {
 | |
|             let finished = stopped && findFinished(stopped);
 | |
|             if (finished) {
 | |
|                 if (verbose)
 | |
|                     console.log("Finish with " + this.stackID(finished));
 | |
|                 return this.stackToTree(finished);
 | |
|             }
 | |
|             if (this.parser.strict) {
 | |
|                 if (verbose && stopped)
 | |
|                     console.log("Stuck with token " + (this.tokens.mainToken ? this.parser.getName(this.tokens.mainToken.value) : "none"));
 | |
|                 throw new SyntaxError("No parse at " + pos);
 | |
|             }
 | |
|             if (!this.recovering)
 | |
|                 this.recovering = 5 /* Rec.Distance */;
 | |
|         }
 | |
|         if (this.recovering && stopped) {
 | |
|             let finished = this.stoppedAt != null && stopped[0].pos > this.stoppedAt ? stopped[0]
 | |
|                 : this.runRecovery(stopped, stoppedTokens, newStacks);
 | |
|             if (finished) {
 | |
|                 if (verbose)
 | |
|                     console.log("Force-finish " + this.stackID(finished));
 | |
|                 return this.stackToTree(finished.forceAll());
 | |
|             }
 | |
|         }
 | |
|         if (this.recovering) {
 | |
|             let maxRemaining = this.recovering == 1 ? 1 : this.recovering * 3 /* Rec.MaxRemainingPerStep */;
 | |
|             if (newStacks.length > maxRemaining) {
 | |
|                 newStacks.sort((a, b) => b.score - a.score);
 | |
|                 while (newStacks.length > maxRemaining)
 | |
|                     newStacks.pop();
 | |
|             }
 | |
|             if (newStacks.some(s => s.reducePos > pos))
 | |
|                 this.recovering--;
 | |
|         }
 | |
|         else if (newStacks.length > 1) {
 | |
|             // Prune stacks that are in the same state, or that have been
 | |
|             // running without splitting for a while, to avoid getting stuck
 | |
|             // with multiple successful stacks running endlessly on.
 | |
|             outer: for (let i = 0; i < newStacks.length - 1; i++) {
 | |
|                 let stack = newStacks[i];
 | |
|                 for (let j = i + 1; j < newStacks.length; j++) {
 | |
|                     let other = newStacks[j];
 | |
|                     if (stack.sameState(other) ||
 | |
|                         stack.buffer.length > 500 /* Rec.MinBufferLengthPrune */ && other.buffer.length > 500 /* Rec.MinBufferLengthPrune */) {
 | |
|                         if (((stack.score - other.score) || (stack.buffer.length - other.buffer.length)) > 0) {
 | |
|                             newStacks.splice(j--, 1);
 | |
|                         }
 | |
|                         else {
 | |
|                             newStacks.splice(i--, 1);
 | |
|                             continue outer;
 | |
|                         }
 | |
|                     }
 | |
|                 }
 | |
|             }
 | |
|             if (newStacks.length > 12 /* Rec.MaxStackCount */)
 | |
|                 newStacks.splice(12 /* Rec.MaxStackCount */, newStacks.length - 12 /* Rec.MaxStackCount */);
 | |
|         }
 | |
|         this.minStackPos = newStacks[0].pos;
 | |
|         for (let i = 1; i < newStacks.length; i++)
 | |
|             if (newStacks[i].pos < this.minStackPos)
 | |
|                 this.minStackPos = newStacks[i].pos;
 | |
|         return null;
 | |
|     }
 | |
|     stopAt(pos) {
 | |
|         if (this.stoppedAt != null && this.stoppedAt < pos)
 | |
|             throw new RangeError("Can't move stoppedAt forward");
 | |
|         this.stoppedAt = pos;
 | |
|     }
 | |
|     // Returns an updated version of the given stack, or null if the
 | |
|     // stack can't advance normally. When `split` and `stacks` are
 | |
|     // given, stacks split off by ambiguous operations will be pushed to
 | |
|     // `split`, or added to `stacks` if they move `pos` forward.
 | |
|     advanceStack(stack, stacks, split) {
 | |
|         let start = stack.pos, { parser } = this;
 | |
|         let base = verbose ? this.stackID(stack) + " -> " : "";
 | |
|         if (this.stoppedAt != null && start > this.stoppedAt)
 | |
|             return stack.forceReduce() ? stack : null;
 | |
|         if (this.fragments) {
 | |
|             let strictCx = stack.curContext && stack.curContext.tracker.strict, cxHash = strictCx ? stack.curContext.hash : 0;
 | |
|             for (let cached = this.fragments.nodeAt(start); cached;) {
 | |
|                 let match = this.parser.nodeSet.types[cached.type.id] == cached.type ? parser.getGoto(stack.state, cached.type.id) : -1;
 | |
|                 if (match > -1 && cached.length && (!strictCx || (cached.prop(NodeProp.contextHash) || 0) == cxHash)) {
 | |
|                     stack.useNode(cached, match);
 | |
|                     if (verbose)
 | |
|                         console.log(base + this.stackID(stack) + ` (via reuse of ${parser.getName(cached.type.id)})`);
 | |
|                     return true;
 | |
|                 }
 | |
|                 if (!(cached instanceof Tree) || cached.children.length == 0 || cached.positions[0] > 0)
 | |
|                     break;
 | |
|                 let inner = cached.children[0];
 | |
|                 if (inner instanceof Tree && cached.positions[0] == 0)
 | |
|                     cached = inner;
 | |
|                 else
 | |
|                     break;
 | |
|             }
 | |
|         }
 | |
|         let defaultReduce = parser.stateSlot(stack.state, 4 /* ParseState.DefaultReduce */);
 | |
|         if (defaultReduce > 0) {
 | |
|             stack.reduce(defaultReduce);
 | |
|             if (verbose)
 | |
|                 console.log(base + this.stackID(stack) + ` (via always-reduce ${parser.getName(defaultReduce & 65535 /* Action.ValueMask */)})`);
 | |
|             return true;
 | |
|         }
 | |
|         if (stack.stack.length >= 8400 /* Rec.CutDepth */) {
 | |
|             while (stack.stack.length > 6000 /* Rec.CutTo */ && stack.forceReduce()) { }
 | |
|         }
 | |
|         let actions = this.tokens.getActions(stack);
 | |
|         for (let i = 0; i < actions.length;) {
 | |
|             let action = actions[i++], term = actions[i++], end = actions[i++];
 | |
|             let last = i == actions.length || !split;
 | |
|             let localStack = last ? stack : stack.split();
 | |
|             let main = this.tokens.mainToken;
 | |
|             localStack.apply(action, term, main ? main.start : localStack.pos, end);
 | |
|             if (verbose)
 | |
|                 console.log(base + this.stackID(localStack) + ` (via ${(action & 65536 /* Action.ReduceFlag */) == 0 ? "shift"
 | |
|                     : `reduce of ${parser.getName(action & 65535 /* Action.ValueMask */)}`} for ${parser.getName(term)} @ ${start}${localStack == stack ? "" : ", split"})`);
 | |
|             if (last)
 | |
|                 return true;
 | |
|             else if (localStack.pos > start)
 | |
|                 stacks.push(localStack);
 | |
|             else
 | |
|                 split.push(localStack);
 | |
|         }
 | |
|         return false;
 | |
|     }
 | |
|     // Advance a given stack forward as far as it will go. Returns the
 | |
|     // (possibly updated) stack if it got stuck, or null if it moved
 | |
|     // forward and was given to `pushStackDedup`.
 | |
|     advanceFully(stack, newStacks) {
 | |
|         let pos = stack.pos;
 | |
|         for (;;) {
 | |
|             if (!this.advanceStack(stack, null, null))
 | |
|                 return false;
 | |
|             if (stack.pos > pos) {
 | |
|                 pushStackDedup(stack, newStacks);
 | |
|                 return true;
 | |
|             }
 | |
|         }
 | |
|     }
 | |
|     runRecovery(stacks, tokens, newStacks) {
 | |
|         let finished = null, restarted = false;
 | |
|         for (let i = 0; i < stacks.length; i++) {
 | |
|             let stack = stacks[i], token = tokens[i << 1], tokenEnd = tokens[(i << 1) + 1];
 | |
|             let base = verbose ? this.stackID(stack) + " -> " : "";
 | |
|             if (stack.deadEnd) {
 | |
|                 if (restarted)
 | |
|                     continue;
 | |
|                 restarted = true;
 | |
|                 stack.restart();
 | |
|                 if (verbose)
 | |
|                     console.log(base + this.stackID(stack) + " (restarted)");
 | |
|                 let done = this.advanceFully(stack, newStacks);
 | |
|                 if (done)
 | |
|                     continue;
 | |
|             }
 | |
|             let force = stack.split(), forceBase = base;
 | |
|             for (let j = 0; force.forceReduce() && j < 10 /* Rec.ForceReduceLimit */; j++) {
 | |
|                 if (verbose)
 | |
|                     console.log(forceBase + this.stackID(force) + " (via force-reduce)");
 | |
|                 let done = this.advanceFully(force, newStacks);
 | |
|                 if (done)
 | |
|                     break;
 | |
|                 if (verbose)
 | |
|                     forceBase = this.stackID(force) + " -> ";
 | |
|             }
 | |
|             for (let insert of stack.recoverByInsert(token)) {
 | |
|                 if (verbose)
 | |
|                     console.log(base + this.stackID(insert) + " (via recover-insert)");
 | |
|                 this.advanceFully(insert, newStacks);
 | |
|             }
 | |
|             if (this.stream.end > stack.pos) {
 | |
|                 if (tokenEnd == stack.pos) {
 | |
|                     tokenEnd++;
 | |
|                     token = 0 /* Term.Err */;
 | |
|                 }
 | |
|                 stack.recoverByDelete(token, tokenEnd);
 | |
|                 if (verbose)
 | |
|                     console.log(base + this.stackID(stack) + ` (via recover-delete ${this.parser.getName(token)})`);
 | |
|                 pushStackDedup(stack, newStacks);
 | |
|             }
 | |
|             else if (!finished || finished.score < stack.score) {
 | |
|                 finished = stack;
 | |
|             }
 | |
|         }
 | |
|         return finished;
 | |
|     }
 | |
|     // Convert the stack's buffer to a syntax tree.
 | |
|     stackToTree(stack) {
 | |
|         stack.close();
 | |
|         return Tree.build({ buffer: StackBufferCursor.create(stack),
 | |
|             nodeSet: this.parser.nodeSet,
 | |
|             topID: this.topTerm,
 | |
|             maxBufferLength: this.parser.bufferLength,
 | |
|             reused: this.reused,
 | |
|             start: this.ranges[0].from,
 | |
|             length: stack.pos - this.ranges[0].from,
 | |
|             minRepeatType: this.parser.minRepeatTerm });
 | |
|     }
 | |
|     stackID(stack) {
 | |
|         let id = (stackIDs || (stackIDs = new WeakMap)).get(stack);
 | |
|         if (!id)
 | |
|             stackIDs.set(stack, id = String.fromCodePoint(this.nextStackID++));
 | |
|         return id + stack;
 | |
|     }
 | |
| }
 | |
| function pushStackDedup(stack, newStacks) {
 | |
|     for (let i = 0; i < newStacks.length; i++) {
 | |
|         let other = newStacks[i];
 | |
|         if (other.pos == stack.pos && other.sameState(stack)) {
 | |
|             if (newStacks[i].score < stack.score)
 | |
|                 newStacks[i] = stack;
 | |
|             return;
 | |
|         }
 | |
|     }
 | |
|     newStacks.push(stack);
 | |
| }
 | |
| class Dialect {
 | |
|     constructor(source, flags, disabled) {
 | |
|         this.source = source;
 | |
|         this.flags = flags;
 | |
|         this.disabled = disabled;
 | |
|     }
 | |
|     allows(term) { return !this.disabled || this.disabled[term] == 0; }
 | |
| }
 | |
| const id = x => x;
 | |
| /**
 | |
| Context trackers are used to track stateful context (such as
 | |
| indentation in the Python grammar, or parent elements in the XML
 | |
| grammar) needed by external tokenizers. You declare them in a
 | |
| grammar file as `@context exportName from "module"`.
 | |
| 
 | |
| Context values should be immutable, and can be updated (replaced)
 | |
| on shift or reduce actions.
 | |
| 
 | |
| The export used in a `@context` declaration should be of this
 | |
| type.
 | |
| */
 | |
| class ContextTracker {
 | |
|     /**
 | |
|     Define a context tracker.
 | |
|     */
 | |
|     constructor(spec) {
 | |
|         this.start = spec.start;
 | |
|         this.shift = spec.shift || id;
 | |
|         this.reduce = spec.reduce || id;
 | |
|         this.reuse = spec.reuse || id;
 | |
|         this.hash = spec.hash || (() => 0);
 | |
|         this.strict = spec.strict !== false;
 | |
|     }
 | |
| }
 | |
| /**
 | |
| Holds the parse tables for a given grammar, as generated by
 | |
| `lezer-generator`, and provides [methods](#common.Parser) to parse
 | |
| content with.
 | |
| */
 | |
| class LRParser extends Parser {
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     constructor(spec) {
 | |
|         super();
 | |
|         /**
 | |
|         @internal
 | |
|         */
 | |
|         this.wrappers = [];
 | |
|         if (spec.version != 14 /* File.Version */)
 | |
|             throw new RangeError(`Parser version (${spec.version}) doesn't match runtime version (${14 /* File.Version */})`);
 | |
|         let nodeNames = spec.nodeNames.split(" ");
 | |
|         this.minRepeatTerm = nodeNames.length;
 | |
|         for (let i = 0; i < spec.repeatNodeCount; i++)
 | |
|             nodeNames.push("");
 | |
|         let topTerms = Object.keys(spec.topRules).map(r => spec.topRules[r][1]);
 | |
|         let nodeProps = [];
 | |
|         for (let i = 0; i < nodeNames.length; i++)
 | |
|             nodeProps.push([]);
 | |
|         function setProp(nodeID, prop, value) {
 | |
|             nodeProps[nodeID].push([prop, prop.deserialize(String(value))]);
 | |
|         }
 | |
|         if (spec.nodeProps)
 | |
|             for (let propSpec of spec.nodeProps) {
 | |
|                 let prop = propSpec[0];
 | |
|                 if (typeof prop == "string")
 | |
|                     prop = NodeProp[prop];
 | |
|                 for (let i = 1; i < propSpec.length;) {
 | |
|                     let next = propSpec[i++];
 | |
|                     if (next >= 0) {
 | |
|                         setProp(next, prop, propSpec[i++]);
 | |
|                     }
 | |
|                     else {
 | |
|                         let value = propSpec[i + -next];
 | |
|                         for (let j = -next; j > 0; j--)
 | |
|                             setProp(propSpec[i++], prop, value);
 | |
|                         i++;
 | |
|                     }
 | |
|                 }
 | |
|             }
 | |
|         this.nodeSet = new NodeSet(nodeNames.map((name, i) => NodeType.define({
 | |
|             name: i >= this.minRepeatTerm ? undefined : name,
 | |
|             id: i,
 | |
|             props: nodeProps[i],
 | |
|             top: topTerms.indexOf(i) > -1,
 | |
|             error: i == 0,
 | |
|             skipped: spec.skippedNodes && spec.skippedNodes.indexOf(i) > -1
 | |
|         })));
 | |
|         if (spec.propSources)
 | |
|             this.nodeSet = this.nodeSet.extend(...spec.propSources);
 | |
|         this.strict = false;
 | |
|         this.bufferLength = DefaultBufferLength;
 | |
|         let tokenArray = decodeArray(spec.tokenData);
 | |
|         this.context = spec.context;
 | |
|         this.specializerSpecs = spec.specialized || [];
 | |
|         this.specialized = new Uint16Array(this.specializerSpecs.length);
 | |
|         for (let i = 0; i < this.specializerSpecs.length; i++)
 | |
|             this.specialized[i] = this.specializerSpecs[i].term;
 | |
|         this.specializers = this.specializerSpecs.map(getSpecializer);
 | |
|         this.states = decodeArray(spec.states, Uint32Array);
 | |
|         this.data = decodeArray(spec.stateData);
 | |
|         this.goto = decodeArray(spec.goto);
 | |
|         this.maxTerm = spec.maxTerm;
 | |
|         this.tokenizers = spec.tokenizers.map(value => typeof value == "number" ? new TokenGroup(tokenArray, value) : value);
 | |
|         this.topRules = spec.topRules;
 | |
|         this.dialects = spec.dialects || {};
 | |
|         this.dynamicPrecedences = spec.dynamicPrecedences || null;
 | |
|         this.tokenPrecTable = spec.tokenPrec;
 | |
|         this.termNames = spec.termNames || null;
 | |
|         this.maxNode = this.nodeSet.types.length - 1;
 | |
|         this.dialect = this.parseDialect();
 | |
|         this.top = this.topRules[Object.keys(this.topRules)[0]];
 | |
|     }
 | |
|     createParse(input, fragments, ranges) {
 | |
|         let parse = new Parse(this, input, fragments, ranges);
 | |
|         for (let w of this.wrappers)
 | |
|             parse = w(parse, input, fragments, ranges);
 | |
|         return parse;
 | |
|     }
 | |
|     /**
 | |
|     Get a goto table entry @internal
 | |
|     */
 | |
|     getGoto(state, term, loose = false) {
 | |
|         let table = this.goto;
 | |
|         if (term >= table[0])
 | |
|             return -1;
 | |
|         for (let pos = table[term + 1];;) {
 | |
|             let groupTag = table[pos++], last = groupTag & 1;
 | |
|             let target = table[pos++];
 | |
|             if (last && loose)
 | |
|                 return target;
 | |
|             for (let end = pos + (groupTag >> 1); pos < end; pos++)
 | |
|                 if (table[pos] == state)
 | |
|                     return target;
 | |
|             if (last)
 | |
|                 return -1;
 | |
|         }
 | |
|     }
 | |
|     /**
 | |
|     Check if this state has an action for a given terminal @internal
 | |
|     */
 | |
|     hasAction(state, terminal) {
 | |
|         let data = this.data;
 | |
|         for (let set = 0; set < 2; set++) {
 | |
|             for (let i = this.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */), next;; i += 3) {
 | |
|                 if ((next = data[i]) == 65535 /* Seq.End */) {
 | |
|                     if (data[i + 1] == 1 /* Seq.Next */)
 | |
|                         next = data[i = pair(data, i + 2)];
 | |
|                     else if (data[i + 1] == 2 /* Seq.Other */)
 | |
|                         return pair(data, i + 2);
 | |
|                     else
 | |
|                         break;
 | |
|                 }
 | |
|                 if (next == terminal || next == 0 /* Term.Err */)
 | |
|                     return pair(data, i + 1);
 | |
|             }
 | |
|         }
 | |
|         return 0;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     stateSlot(state, slot) {
 | |
|         return this.states[(state * 6 /* ParseState.Size */) + slot];
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     stateFlag(state, flag) {
 | |
|         return (this.stateSlot(state, 0 /* ParseState.Flags */) & flag) > 0;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     validAction(state, action) {
 | |
|         return !!this.allActions(state, a => a == action ? true : null);
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     allActions(state, action) {
 | |
|         let deflt = this.stateSlot(state, 4 /* ParseState.DefaultReduce */);
 | |
|         let result = deflt ? action(deflt) : undefined;
 | |
|         for (let i = this.stateSlot(state, 1 /* ParseState.Actions */); result == null; i += 3) {
 | |
|             if (this.data[i] == 65535 /* Seq.End */) {
 | |
|                 if (this.data[i + 1] == 1 /* Seq.Next */)
 | |
|                     i = pair(this.data, i + 2);
 | |
|                 else
 | |
|                     break;
 | |
|             }
 | |
|             result = action(pair(this.data, i + 1));
 | |
|         }
 | |
|         return result;
 | |
|     }
 | |
|     /**
 | |
|     Get the states that can follow this one through shift actions or
 | |
|     goto jumps. @internal
 | |
|     */
 | |
|     nextStates(state) {
 | |
|         let result = [];
 | |
|         for (let i = this.stateSlot(state, 1 /* ParseState.Actions */);; i += 3) {
 | |
|             if (this.data[i] == 65535 /* Seq.End */) {
 | |
|                 if (this.data[i + 1] == 1 /* Seq.Next */)
 | |
|                     i = pair(this.data, i + 2);
 | |
|                 else
 | |
|                     break;
 | |
|             }
 | |
|             if ((this.data[i + 2] & (65536 /* Action.ReduceFlag */ >> 16)) == 0) {
 | |
|                 let value = this.data[i + 1];
 | |
|                 if (!result.some((v, i) => (i & 1) && v == value))
 | |
|                     result.push(this.data[i], value);
 | |
|             }
 | |
|         }
 | |
|         return result;
 | |
|     }
 | |
|     /**
 | |
|     Configure the parser. Returns a new parser instance that has the
 | |
|     given settings modified. Settings not provided in `config` are
 | |
|     kept from the original parser.
 | |
|     */
 | |
|     configure(config) {
 | |
|         // Hideous reflection-based kludge to make it easy to create a
 | |
|         // slightly modified copy of a parser.
 | |
|         let copy = Object.assign(Object.create(LRParser.prototype), this);
 | |
|         if (config.props)
 | |
|             copy.nodeSet = this.nodeSet.extend(...config.props);
 | |
|         if (config.top) {
 | |
|             let info = this.topRules[config.top];
 | |
|             if (!info)
 | |
|                 throw new RangeError(`Invalid top rule name ${config.top}`);
 | |
|             copy.top = info;
 | |
|         }
 | |
|         if (config.tokenizers)
 | |
|             copy.tokenizers = this.tokenizers.map(t => {
 | |
|                 let found = config.tokenizers.find(r => r.from == t);
 | |
|                 return found ? found.to : t;
 | |
|             });
 | |
|         if (config.specializers) {
 | |
|             copy.specializers = this.specializers.slice();
 | |
|             copy.specializerSpecs = this.specializerSpecs.map((s, i) => {
 | |
|                 let found = config.specializers.find(r => r.from == s.external);
 | |
|                 if (!found)
 | |
|                     return s;
 | |
|                 let spec = Object.assign(Object.assign({}, s), { external: found.to });
 | |
|                 copy.specializers[i] = getSpecializer(spec);
 | |
|                 return spec;
 | |
|             });
 | |
|         }
 | |
|         if (config.contextTracker)
 | |
|             copy.context = config.contextTracker;
 | |
|         if (config.dialect)
 | |
|             copy.dialect = this.parseDialect(config.dialect);
 | |
|         if (config.strict != null)
 | |
|             copy.strict = config.strict;
 | |
|         if (config.wrap)
 | |
|             copy.wrappers = copy.wrappers.concat(config.wrap);
 | |
|         if (config.bufferLength != null)
 | |
|             copy.bufferLength = config.bufferLength;
 | |
|         return copy;
 | |
|     }
 | |
|     /**
 | |
|     Tells you whether any [parse wrappers](#lr.ParserConfig.wrap)
 | |
|     are registered for this parser.
 | |
|     */
 | |
|     hasWrappers() {
 | |
|         return this.wrappers.length > 0;
 | |
|     }
 | |
|     /**
 | |
|     Returns the name associated with a given term. This will only
 | |
|     work for all terms when the parser was generated with the
 | |
|     `--names` option. By default, only the names of tagged terms are
 | |
|     stored.
 | |
|     */
 | |
|     getName(term) {
 | |
|         return this.termNames ? this.termNames[term] : String(term <= this.maxNode && this.nodeSet.types[term].name || term);
 | |
|     }
 | |
|     /**
 | |
|     The eof term id is always allocated directly after the node
 | |
|     types. @internal
 | |
|     */
 | |
|     get eofTerm() { return this.maxNode + 1; }
 | |
|     /**
 | |
|     The type of top node produced by the parser.
 | |
|     */
 | |
|     get topNode() { return this.nodeSet.types[this.top[1]]; }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     dynamicPrecedence(term) {
 | |
|         let prec = this.dynamicPrecedences;
 | |
|         return prec == null ? 0 : prec[term] || 0;
 | |
|     }
 | |
|     /**
 | |
|     @internal
 | |
|     */
 | |
|     parseDialect(dialect) {
 | |
|         let values = Object.keys(this.dialects), flags = values.map(() => false);
 | |
|         if (dialect)
 | |
|             for (let part of dialect.split(" ")) {
 | |
|                 let id = values.indexOf(part);
 | |
|                 if (id >= 0)
 | |
|                     flags[id] = true;
 | |
|             }
 | |
|         let disabled = null;
 | |
|         for (let i = 0; i < values.length; i++)
 | |
|             if (!flags[i]) {
 | |
|                 for (let j = this.dialects[values[i]], id; (id = this.data[j++]) != 65535 /* Seq.End */;)
 | |
|                     (disabled || (disabled = new Uint8Array(this.maxTerm + 1)))[id] = 1;
 | |
|             }
 | |
|         return new Dialect(dialect, flags, disabled);
 | |
|     }
 | |
|     /**
 | |
|     Used by the output of the parser generator. Not available to
 | |
|     user code. @hide
 | |
|     */
 | |
|     static deserialize(spec) {
 | |
|         return new LRParser(spec);
 | |
|     }
 | |
| }
 | |
| function pair(data, off) { return data[off] | (data[off + 1] << 16); }
 | |
| function findFinished(stacks) {
 | |
|     let best = null;
 | |
|     for (let stack of stacks) {
 | |
|         let stopped = stack.p.stoppedAt;
 | |
|         if ((stack.pos == stack.p.stream.end || stopped != null && stack.pos > stopped) &&
 | |
|             stack.p.parser.stateFlag(stack.state, 2 /* StateFlag.Accepting */) &&
 | |
|             (!best || best.score < stack.score))
 | |
|             best = stack;
 | |
|     }
 | |
|     return best;
 | |
| }
 | |
| function getSpecializer(spec) {
 | |
|     if (spec.external) {
 | |
|         let mask = spec.extend ? 1 /* Specialize.Extend */ : 0 /* Specialize.Specialize */;
 | |
|         return (value, stack) => (spec.external(value, stack) << 1) | mask;
 | |
|     }
 | |
|     return spec.get;
 | |
| }
 | |
| 
 | |
| export { ContextTracker, ExternalTokenizer, InputStream, LRParser, LocalTokenGroup, Stack };
 |