diff --git a/server/src/code/highlight.ts b/server/src/code/highlight.ts index 1efa05a5..a2c29358 100644 --- a/server/src/code/highlight.ts +++ b/server/src/code/highlight.ts @@ -1,6 +1,6 @@ // https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide -export enum HighlightToken { +export enum HighlightForToken { Invalid, Namespace, // For identifiers that declare or reference a namespace, module, or package. Class, // For identifiers that declare or reference a class type. @@ -25,11 +25,12 @@ export enum HighlightToken { Number, // For tokens that represent a number literal. Regexp, // For tokens that represent a regular expression literal. Operator, // For tokens that represent an operator. - Builtin, - Directive, + // The following are specific to AngelScript Language Server: + Builtin, // For tokens that represent a built-in type or function. + Directive, // For tokens that represent a preprocessor directive. } -export const highlightTokens = [ +export const highlightForTokenList = [ '', 'namespace', 'class', @@ -58,7 +59,7 @@ export const highlightTokens = [ 'directive', ]; -export enum HighlightModifier { +export enum HighlightForModifier { Declaration, // For declarations of symbols. Definition, // For definitions of symbols, for example, in header files. Readonly, // For readonly variables and member fields (constants). @@ -72,7 +73,7 @@ export enum HighlightModifier { Nothing, } -export const highlightModifiers = [ +export const highlightForModifierList = [ 'declaration', 'definition', 'readonly', diff --git a/server/src/compiler_analyzer/analyzer.ts b/server/src/compiler_analyzer/analyzer.ts index e2f47bef..269f282d 100644 --- a/server/src/compiler_analyzer/analyzer.ts +++ b/server/src/compiler_analyzer/analyzer.ts @@ -35,8 +35,7 @@ import { NodeType, NodeVar, NodeVarAccess, - NodeWhile, - ParsedRange + NodeWhile } from "../compiler_parser/nodes"; import { getSourceNodeName, @@ -58,11 +57,6 @@ import { } from "./symbolScope"; import {checkFunctionMatch} from "./checkFunction"; import {canTypeConvert, checkTypeMatch, isAllowedToAccessMember} from "./checkType"; -import { - getLocationBetween, - getNextTokenIfExist, - getNodeLocation -} from "../compiler_parser/nodesUtils"; import { builtinBoolType, resolvedBuiltinBool, @@ -88,6 +82,7 @@ import assert = require("node:assert"); import {ResolvedType} from "./resolvedType"; import {analyzerDiagnostic} from "./analyzerDiagnostic"; import {TextLocation} from "../compiler_tokenizer/textLocation"; +import {getBoundingLocationBetween, TokenRange} from "../compiler_parser/tokenRange"; export type HoistQueue = (() => void)[]; @@ -294,7 +289,7 @@ function analyzeTemplateTypes(scope: SymbolScope, nodeType: NodeType[], template for (let i = 0; i < nodeType.length; i++) { if (i >= templateTypes.length) { analyzerDiagnostic.add( - getNodeLocation(nodeType[nodeType.length - 1].nodeRange), + (nodeType[nodeType.length - 1].nodeRange.getBoundingLocation()), `Too many template types.`); break; } @@ -349,7 +344,7 @@ function analyzeScope(parentScope: SymbolScope, nodeScope: NodeScope): SymbolSco // Append a hint for completion of the namespace to the scope. const complementRange: TextLocation = nextScope.location.withEnd( - getNextTokenIfExist(getNextTokenIfExist(nextScope)).location.start); + nextScope.getNextOrSelf().getNextOrSelf().location.start); parentScope.completionHints.push({ complementKind: ComplementKind.Namespace, complementLocation: complementRange, @@ -472,7 +467,7 @@ function analyzeExprStat(scope: SymbolScope, exprStat: NodeExprStat) { if (exprStat.assign === undefined) return; const assign = analyzeAssign(scope, exprStat.assign); if (assign?.isHandler !== true && assign?.symbolType instanceof SymbolFunction) { - analyzerDiagnostic.add(getNodeLocation(exprStat.assign.nodeRange), `Function call without handler.`); + analyzerDiagnostic.add(exprStat.assign.nodeRange.getBoundingLocation(), `Function call without handler.`); } } @@ -504,7 +499,7 @@ function analyzeReturn(scope: SymbolScope, nodeReturn: NodeReturn) { const expectedReturn = functionReturn.returnType?.symbolType; if (expectedReturn instanceof SymbolType && expectedReturn?.identifierText === 'void') { if (nodeReturn.assign === undefined) return; - analyzerDiagnostic.add(getNodeLocation(nodeReturn.nodeRange), `Function does not return a value.`); + analyzerDiagnostic.add(nodeReturn.nodeRange.getBoundingLocation(), `Function does not return a value.`); } else { checkTypeMatch(returnType, functionReturn.returnType, nodeReturn.nodeRange); } @@ -513,7 +508,9 @@ function analyzeReturn(scope: SymbolScope, nodeReturn: NodeReturn) { const isGetter = key.startsWith('get_'); if (isGetter === false) { if (nodeReturn.assign === undefined) return; - analyzerDiagnostic.add(getNodeLocation(nodeReturn.nodeRange), `Property setter does not return a value.`); + analyzerDiagnostic.add( + nodeReturn.nodeRange.getBoundingLocation(), + `Property setter does not return a value.`); return; } @@ -538,7 +535,7 @@ function analyzeExpr(scope: SymbolScope, expr: NodeExpr): ResolvedType | undefin // Evaluate by Shunting Yard Algorithm // https://qiita.com/phenan/items/df157fef2fea590e3fa9 - type Term = [ResolvedType | undefined, ParsedRange]; + type Term = [ResolvedType | undefined, TokenRange]; type Op = TokenObject; function isOp(termOrOp: (Term | Op)): termOrOp is Op { @@ -580,7 +577,7 @@ function analyzeExpr(scope: SymbolScope, expr: NodeExpr): ResolvedType | undefin if (lhs === undefined || rhs === undefined) return undefined; outputTerm.push([analyzeExprOp( - scope, item, lhs[0], rhs[0], lhs[1], rhs[1]), {start: lhs[1].start, end: rhs[1].end}]); + scope, item, lhs[0], rhs[0], lhs[1], rhs[1]), new TokenRange(lhs[1].start, rhs[1].end)]); } else { outputTerm.push(item); } @@ -742,7 +739,7 @@ function analyzeBuiltinConstructorCaller( // EXPRPREOP ::= '-' | '+' | '!' | '++' | '--' | '~' | '@' // EXPRPOSTOP ::= ('.' (FUNCCALL | IDENTIFIER)) | ('[' [IDENTIFIER ':'] ASSIGN {',' [IDENTIFIER ':' ASSIGN} ']') | ARGLIST | '++' | '--' -function analyzeExprPostOp(scope: SymbolScope, exprPostOp: NodeExprPostOp, exprValue: ResolvedType, exprRange: ParsedRange) { +function analyzeExprPostOp(scope: SymbolScope, exprPostOp: NodeExprPostOp, exprValue: ResolvedType, exprRange: TokenRange) { if (exprPostOp.postOp === 1) { return analyzeExprPostOp1(scope, exprPostOp, exprValue); } else if (exprPostOp.postOp === 2) { @@ -753,14 +750,14 @@ function analyzeExprPostOp(scope: SymbolScope, exprPostOp: NodeExprPostOp, exprV // ('.' (FUNCCALL | IDENTIFIER)) function analyzeExprPostOp1(scope: SymbolScope, exprPostOp: NodeExprPostOp1, exprValue: ResolvedType) { if (exprValue.symbolType instanceof SymbolType === false) { - analyzerDiagnostic.add(getNodeLocation(exprPostOp.nodeRange), `Invalid access to type.`); + analyzerDiagnostic.add(exprPostOp.nodeRange.getBoundingLocation(), `Invalid access to type.`); return undefined; } // Append a hint for complement of class members. - const complementRange = getLocationBetween( + const complementRange = getBoundingLocationBetween( exprPostOp.nodeRange.start, - getNextTokenIfExist(exprPostOp.nodeRange.start)); + exprPostOp.nodeRange.start.getNextOrSelf()); scope.completionHints.push({ complementKind: ComplementKind.Type, complementLocation: complementRange, @@ -802,7 +799,7 @@ function analyzeExprPostOp1(scope: SymbolScope, exprPostOp: NodeExprPostOp1, exp } // ('[' [IDENTIFIER ':'] ASSIGN {',' [IDENTIFIER ':' ASSIGN} ']') -function analyzeExprPostOp2(scope: SymbolScope, exprPostOp: NodeExprPostOp2, exprValue: ResolvedType, exprRange: ParsedRange) { +function analyzeExprPostOp2(scope: SymbolScope, exprPostOp: NodeExprPostOp2, exprValue: ResolvedType, exprRange: TokenRange) { const args = exprPostOp.indexerList.map(indexer => analyzeAssign(scope, indexer.assign)); return analyzeOperatorAlias( scope, @@ -850,7 +847,7 @@ function analyzeLambda(scope: SymbolScope, lambda: NodeLambda): ResolvedType | u function analyzeLiteral(scope: SymbolScope, literal: NodeLiteral): ResolvedType | undefined { const literalValue = literal.value; if (literalValue.isNumberToken()) { - switch (literalValue.numeric) { + switch (literalValue.numberLiteral) { case NumberLiterals.Integer: return resolvedBuiltinInt; case NumberLiterals.Float: @@ -955,9 +952,9 @@ function analyzeFunctionCaller( } // Append a hint for completion of function arguments to the scope. - const complementRange = getLocationBetween( + const complementRange = getBoundingLocationBetween( callerArgList.nodeRange.start, - getNextTokenIfExist(callerArgList.nodeRange.end)); + callerArgList.nodeRange.end.getNextOrSelf()); scope.completionHints.push({ complementKind: ComplementKind.Arguments, complementLocation: complementRange, @@ -1076,7 +1073,9 @@ export function analyzeCondition(scope: SymbolScope, condition: NodeCondition): if (canTypeConvert(falseAssign, trueAssign)) return trueAssign; analyzerDiagnostic.add( - getLocationBetween(condition.ternary.trueAssign.nodeRange.start, condition.ternary.falseAssign.nodeRange.end), + getBoundingLocationBetween( + condition.ternary.trueAssign.nodeRange.start, + condition.ternary.falseAssign.nodeRange.end), `Type mismatches between '${stringifyResolvedType(trueAssign)}' and '${stringifyResolvedType(falseAssign)}'.`); return undefined; } @@ -1085,7 +1084,7 @@ export function analyzeCondition(scope: SymbolScope, condition: NodeCondition): function analyzeExprOp( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType | undefined, rhs: ResolvedType | undefined, - leftRange: ParsedRange, rightRange: ParsedRange + leftRange: TokenRange, rightRange: TokenRange ): ResolvedType | undefined { if (operator.isReservedToken() === false) return undefined; if (lhs === undefined || rhs === undefined) return undefined; @@ -1105,7 +1104,7 @@ function analyzeExprOp( function analyzeOperatorAlias( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType, rhs: ResolvedType | (ResolvedType | undefined)[], - leftRange: ParsedRange, rightRange: ParsedRange, + leftRange: TokenRange, rightRange: TokenRange, alias: string ) { const rhsArgs = Array.isArray(rhs) ? rhs : [rhs]; @@ -1140,7 +1139,7 @@ function analyzeOperatorAlias( return checkFunctionMatch({ scope: scope, callerIdentifier: operator, - callerRange: {start: operator, end: operator}, + callerRange: new TokenRange(operator, operator), callerArgRanges: [rightRange], callerArgTypes: rhsArgs, calleeFunc: aliasFunction, @@ -1152,7 +1151,7 @@ function analyzeOperatorAlias( function analyzeBitOp( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType, rhs: ResolvedType, - leftRange: ParsedRange, rightRange: ParsedRange + leftRange: TokenRange, rightRange: TokenRange ): ResolvedType | undefined { if (lhs.symbolType instanceof SymbolType && rhs.symbolType instanceof SymbolType) { if (canTypeConvert(lhs, resolvedBuiltinInt) && canTypeConvert( @@ -1182,7 +1181,7 @@ const bitOpAliases = new Map([ function analyzeMathOp( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType, rhs: ResolvedType, - leftRange: ParsedRange, rightRange: ParsedRange + leftRange: TokenRange, rightRange: TokenRange ): ResolvedType | undefined { if (lhs.symbolType instanceof SymbolType && rhs.symbolType instanceof SymbolType) { if (canTypeConvert(lhs, resolvedBuiltinInt) && canTypeConvert( @@ -1212,7 +1211,7 @@ const mathOpAliases = new Map([ function analyzeCompOp( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType, rhs: ResolvedType, - leftRange: ParsedRange, rightRange: ParsedRange + leftRange: TokenRange, rightRange: TokenRange ): ResolvedType | undefined { if (lhs.symbolType instanceof SymbolType && rhs.symbolType instanceof SymbolType) { if (canTypeConvert(lhs, rhs) || canTypeConvert(rhs, lhs)) { @@ -1240,7 +1239,7 @@ const compOpAliases = new Map([ function analyzeLogicOp( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType, rhs: ResolvedType, - leftRange: ParsedRange, rightRange: ParsedRange + leftRange: TokenRange, rightRange: TokenRange ): ResolvedType | undefined { checkTypeMatch(lhs, new ResolvedType(builtinBoolType), leftRange); checkTypeMatch(rhs, new ResolvedType(builtinBoolType), rightRange); @@ -1251,7 +1250,7 @@ function analyzeLogicOp( function analyzeAssignOp( scope: SymbolScope, operator: TokenObject, lhs: ResolvedType | undefined, rhs: ResolvedType | undefined, - leftRange: ParsedRange, rightRange: ParsedRange + leftRange: TokenRange, rightRange: TokenRange ): ResolvedType | undefined { if (lhs === undefined || rhs === undefined) return undefined; if (lhs.symbolType instanceof SymbolType && rhs.symbolType instanceof SymbolType) { diff --git a/server/src/compiler_analyzer/checkFunction.ts b/server/src/compiler_analyzer/checkFunction.ts index 34f5d895..962eccc9 100644 --- a/server/src/compiler_analyzer/checkFunction.ts +++ b/server/src/compiler_analyzer/checkFunction.ts @@ -1,21 +1,20 @@ -import {diagnostic} from "../code/diagnostic"; -import {ParsedRange} from "../compiler_parser/nodes"; import { SymbolFunction, } from "./symbolObject"; import {canTypeConvert} from "./checkType"; -import {getNodeLocation, stringifyNodeType} from "../compiler_parser/nodesUtils"; +import {stringifyNodeType} from "../compiler_parser/nodesUtils"; import {resolveTemplateTypes, stringifyResolvedType, stringifyResolvedTypes, TemplateTranslation} from "./symbolUtils"; import {SymbolScope} from "./symbolScope"; import {ResolvedType} from "./resolvedType"; import {analyzerDiagnostic} from "./analyzerDiagnostic"; import {TokenObject} from "../compiler_tokenizer/tokenObject"; +import {TokenRange} from "../compiler_parser/tokenRange"; export interface FunctionMatchingArgs { scope: SymbolScope; callerIdentifier: TokenObject; - callerRange: ParsedRange; - callerArgRanges: ParsedRange[]; + callerRange: TokenRange; + callerArgRanges: TokenRange[]; callerArgTypes: (ResolvedType | undefined)[]; calleeFunc: SymbolFunction; templateTranslators: (TemplateTranslation | undefined)[]; @@ -70,7 +69,7 @@ function checkFunctionMatchInternal( overloadedHead, templateTranslators) === false) { analyzerDiagnostic.add( - getNodeLocation(callerRange), + callerRange.getBoundingLocation(), `Missing argument for parameter '${stringifyNodeType(param.type)}'.`); } @@ -96,7 +95,7 @@ function checkFunctionMatchInternal( overloadedHead, templateTranslators) === false) { analyzerDiagnostic.add( - getNodeLocation(callerRange), + callerRange.getBoundingLocation(), `Cannot convert '${stringifyResolvedType(actualType)}' to parameter type '${stringifyResolvedType( expectedType)}'.`); } @@ -120,7 +119,7 @@ function handleTooMuchCallerArgs(args: FunctionMatchingArgs, overloadedHead: Sym overloadedHead, templateTranslators) === false) { analyzerDiagnostic.add( - getNodeLocation(callerRange), + callerRange.getBoundingLocation(), `Function has ${calleeFunc.sourceNode.paramList.length} parameters, but ${callerArgTypes.length} were provided.`); } @@ -128,7 +127,7 @@ function handleTooMuchCallerArgs(args: FunctionMatchingArgs, overloadedHead: Sym } function handleErrorWhenOverloaded( - callerRange: ParsedRange, + callerRange: TokenRange, callerArgs: (ResolvedType | undefined)[], calleeFunc: SymbolFunction, overloadedHead: SymbolFunction, @@ -147,6 +146,6 @@ function handleErrorWhenOverloaded( cursor = cursor.nextOverload; } - analyzerDiagnostic.add(getNodeLocation(callerRange), message); + analyzerDiagnostic.add(callerRange.getBoundingLocation(), message); return true; } diff --git a/server/src/compiler_analyzer/checkType.ts b/server/src/compiler_analyzer/checkType.ts index 821d6fb0..4e506fdb 100644 --- a/server/src/compiler_analyzer/checkType.ts +++ b/server/src/compiler_analyzer/checkType.ts @@ -4,14 +4,13 @@ import { SymbolObject, SymbolType, isSourceNodeClassOrInterface, } from "./symbolObject"; -import {AccessModifier, NodeName, ParsedRange} from "../compiler_parser/nodes"; -import {getNodeLocation} from "../compiler_parser/nodesUtils"; +import {AccessModifier, NodeName} from "../compiler_parser/nodes"; import {findScopeShallowly, findScopeWithParentByNodes, isScopeChildOrGrandchild, SymbolScope} from "./symbolScope"; import assert = require("assert"); import {findSymbolShallowly, resolveTemplateType, stringifyResolvedType} from "./symbolUtils"; import {ResolvedType} from "./resolvedType"; -import {isSameToken} from "../compiler_tokenizer/tokenUtils"; import {analyzerDiagnostic} from "./analyzerDiagnostic"; +import {TokenRange} from "../compiler_parser/tokenRange"; /** * Check if the source type can be converted to the destination type. @@ -23,12 +22,12 @@ import {analyzerDiagnostic} from "./analyzerDiagnostic"; export function checkTypeMatch( src: ResolvedType | undefined, dest: ResolvedType | undefined, - nodeRange: ParsedRange, + nodeRange: TokenRange, ): boolean { if (canTypeConvert(src, dest)) return true; analyzerDiagnostic.add( - getNodeLocation(nodeRange), + nodeRange.getBoundingLocation(), `'${stringifyResolvedType(src)}' cannot be converted to '${stringifyResolvedType(dest)}'.`); return false; } @@ -151,7 +150,7 @@ function canCastFromPrimitiveType( const destNode = destType.sourceNode; if (srcType.isTypeParameter) { - return destType.isTypeParameter && isSameToken(srcType.declaredPlace, destType.declaredPlace); + return destType.isTypeParameter && srcType.declaredPlace.equals(destType.declaredPlace); } if (srcType.identifierText === 'void') { diff --git a/server/src/compiler_analyzer/symbolComplement.ts b/server/src/compiler_analyzer/symbolComplement.ts index 79284123..d24874a8 100644 --- a/server/src/compiler_analyzer/symbolComplement.ts +++ b/server/src/compiler_analyzer/symbolComplement.ts @@ -1,10 +1,9 @@ -import {ParsedRange} from "../compiler_parser/nodes"; -import {getNodeLocation} from "../compiler_parser/nodesUtils"; import {TokenObject} from "../compiler_tokenizer/tokenObject"; import {SymbolType, SymbolFunction} from "./symbolObject"; import {TemplateTranslation} from "./symbolUtils"; import {SymbolScope} from "./symbolScope"; import {TextLocation} from "../compiler_tokenizer/textLocation"; +import {TokenRange} from "../compiler_parser/tokenRange"; /** * Types of autocomplete targets @@ -55,7 +54,7 @@ export interface CompletionNamespace extends ComplementBase { export interface CompletionArgument extends ComplementBase { complementKind: ComplementKind.Arguments; expectedCallee: SymbolFunction; - passingRanges: ParsedRange[]; + passingRanges: TokenRange[]; templateTranslate: TemplateTranslation | undefined; } @@ -66,11 +65,11 @@ export type ComplementHints = | CompletionArgument; export function pushHintOfCompletionScopeToParent( - parentScope: SymbolScope | undefined, targetScope: SymbolScope, nodeRange: ParsedRange + parentScope: SymbolScope | undefined, targetScope: SymbolScope, nodeRange: TokenRange ) { parentScope?.completionHints.push({ complementKind: ComplementKind.Scope, - complementLocation: getNodeLocation(nodeRange), + complementLocation: nodeRange.getBoundingLocation(), targetScope: targetScope }); } \ No newline at end of file diff --git a/server/src/compiler_analyzer/symbolScope.ts b/server/src/compiler_analyzer/symbolScope.ts index 99ac6100..a3596045 100644 --- a/server/src/compiler_analyzer/symbolScope.ts +++ b/server/src/compiler_analyzer/symbolScope.ts @@ -129,7 +129,9 @@ function isSourceBuiltinString(source: TypeSourceNode | undefined): boolean { // Check if the class has a metadata that indicates it is a built-in string type. const builtinStringMetadata = "BuiltinString"; - if (source.metadata.length === 1 && source.metadata[0].text === builtinStringMetadata) return true; + if (source.metadata.some(m => m.length === 1 && m[0].text === builtinStringMetadata)) { + return true; + } // Check whether the class name is a built-in string type with global settings. return getGlobalSettings().builtinStringTypes.includes(source.identifier.text); diff --git a/server/src/compiler_parser/nodes.ts b/server/src/compiler_parser/nodes.ts index 51d2b949..08ec6a14 100644 --- a/server/src/compiler_parser/nodes.ts +++ b/server/src/compiler_parser/nodes.ts @@ -1,4 +1,5 @@ import {TokenObject} from "../compiler_tokenizer/tokenObject"; +import {TokenRange} from "./tokenRange"; export enum AccessModifier { Private = 'Private', @@ -16,18 +17,6 @@ export enum ReferenceModifier { AtConst = 'AtConst', } -export interface ParsedRange { - readonly start: TokenObject; - readonly end: TokenObject; -} - -export function makeParsedRange(start: TokenObject, end: TokenObject): ParsedRange { - return { - start: start, - end: end - }; -} - export interface EntityAttribute { readonly isShared: boolean, readonly isExternal: boolean, @@ -107,7 +96,7 @@ export enum NodeName { export interface NodesBase { readonly nodeName: NodeName; - readonly nodeRange: ParsedRange; + readonly nodeRange: TokenRange; } // SCRIPT ::= {IMPORT | ENUM | TYPEDEF | CLASS | MIXIN | INTERFACE | FUNCDEF | VIRTPROP | VAR | FUNC | NAMESPACE | ';'} @@ -136,7 +125,7 @@ export interface NodeNamespace extends NodesBase { // ENUM ::= {'shared' | 'external'} 'enum' IDENTIFIER (';' | ('{' IDENTIFIER ['=' EXPR] {',' IDENTIFIER ['=' EXPR]} '}')) export interface NodeEnum extends NodesBase { readonly nodeName: NodeName.Enum; - readonly scopeRange: ParsedRange; + readonly scopeRange: TokenRange; readonly entity: EntityAttribute | undefined; readonly identifier: TokenObject; readonly memberList: ParsedEnumMember[]; @@ -150,8 +139,8 @@ export interface ParsedEnumMember { // CLASS ::= {'shared' | 'abstract' | 'final' | 'external'} 'class' IDENTIFIER (';' | ([':' IDENTIFIER {',' IDENTIFIER}] '{' {VIRTPROP | FUNC | VAR | FUNCDEF} '}')) export interface NodeClass extends NodesBase { readonly nodeName: NodeName.Class; - readonly scopeRange: ParsedRange; - readonly metadata: TokenObject[]; + readonly scopeRange: TokenRange; + readonly metadata: TokenObject[][]; readonly entity: EntityAttribute | undefined; readonly identifier: TokenObject; readonly typeTemplates: NodeType[] | undefined; diff --git a/server/src/compiler_parser/nodesUtils.ts b/server/src/compiler_parser/nodesUtils.ts index 47824f4c..92345fa2 100644 --- a/server/src/compiler_parser/nodesUtils.ts +++ b/server/src/compiler_parser/nodesUtils.ts @@ -1,24 +1,6 @@ import {TokenObject} from "../compiler_tokenizer/tokenObject"; -import {EntityAttribute, FunctionAttribute, NodeType, ParsedRange, ReferenceModifier} from "./nodes"; +import {EntityAttribute, FunctionAttribute, NodeType, ReferenceModifier} from "./nodes"; import {Mutable} from "../utils/utilities"; -import {TextLocation} from "../compiler_tokenizer/textLocation"; - -export function getNextTokenIfExist(token: TokenObject): TokenObject { - if (token.next !== undefined) return token.next; - return token; -} - -export function isRangeInOneLine(range: ParsedRange): boolean { - return range.start.location.start.line === range.end.location.end.line; -} - -export function getLocationBetween(start: TokenObject, end: TokenObject): TextLocation { - return new TextLocation(start.location.path, start.location.start, end.location.end); -} - -export function getNodeLocation(range: ParsedRange): TextLocation { - return getLocationBetween(range.start, range.end); -} export function setEntityAttribute(attribute: Mutable, token: 'shared' | 'external' | 'abstract' | 'final') { if (token === 'shared') attribute.isShared = true; diff --git a/server/src/compiler_parser/parser.ts b/server/src/compiler_parser/parser.ts index 2000ea79..27361bd1 100644 --- a/server/src/compiler_parser/parser.ts +++ b/server/src/compiler_parser/parser.ts @@ -8,7 +8,6 @@ import { FuncHeads, FunctionAttribute, isFunctionHeadReturnValue, - makeParsedRange, NodeArgList, NodeAssign, NodeBreak, @@ -66,20 +65,21 @@ import { ReferenceModifier, TypeModifier } from "./nodes"; -import {HighlightToken} from "../code/highlight"; +import {HighlightForToken} from "../code/highlight"; import {TokenKind, TokenObject, TokenReserved} from "../compiler_tokenizer/tokenObject"; import {BreakOrThrough, ParsedResult, ParseFailure, ParserState} from "./parserState"; import {ParsedCacheKind} from "./parsedCache"; -import {isTokensLinkedBy} from "../compiler_tokenizer/tokenUtils"; +import {areTokensJoinedBy} from "../compiler_tokenizer/tokenUtils"; import {Mutable} from "../utils/utilities"; -import {getLocationBetween, setEntityAttribute, setFunctionAttribute} from "./nodesUtils"; +import {setEntityAttribute, setFunctionAttribute} from "./nodesUtils"; +import {getBoundingLocationBetween, TokenRange} from "./tokenRange"; // SCRIPT ::= {IMPORT | ENUM | TYPEDEF | CLASS | MIXIN | INTERFACE | FUNCDEF | VIRTPROP | VAR | FUNC | NAMESPACE | ';'} function parseScript(parser: ParserState): NodeScript { const script: NodeScript = []; while (parser.isEnd() === false) { if (parser.next().text === ';') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); continue; } @@ -167,11 +167,11 @@ function parseScript(parser: ParserState): NodeScript { function parseNamespace(parser: ParserState): ParsedResult { if (parser.next().text !== 'namespace') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); const namespaceList: TokenObject[] = []; while (parser.isEnd() === false) { - const identifier = expectIdentifier(parser, HighlightToken.Namespace); + const identifier = expectIdentifier(parser, HighlightForToken.Namespace); if (identifier !== undefined) namespaceList.push(identifier); if (expectContinuousOrClose(parser, '::', '{', true) === BreakOrThrough.Break) break; @@ -185,24 +185,24 @@ function parseNamespace(parser: ParserState): ParsedResult { const script = parseScript(parser); - parser.expect('}', HighlightToken.Operator); + parser.expect('}', HighlightForToken.Operator); return { nodeName: NodeName.Namespace, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), namespaceList: namespaceList, script: script }; } -function parseIdentifier(parser: ParserState, kind: HighlightToken): TokenObject | undefined { +function parseIdentifier(parser: ParserState, kind: HighlightForToken): TokenObject | undefined { const identifier = parser.next(); if (identifier.kind !== TokenKind.Identifier) return undefined; parser.commit(kind); return identifier; } -function expectIdentifier(parser: ParserState, kind: HighlightToken): TokenObject | undefined { +function expectIdentifier(parser: ParserState, kind: HighlightForToken): TokenObject | undefined { const identifier = parseIdentifier(parser, kind); if (identifier === undefined) { parser.error("Expected identifier."); @@ -215,7 +215,7 @@ function expectContextualKeyword(parser: ParserState, keyword: string): boolean parser.error(`Expected '${keyword}'.`); return false; } - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); return true; } @@ -229,24 +229,24 @@ function parseEnum(parser: ParserState): ParsedResult { parser.backtrack(rangeStart); return ParseFailure.Mismatch; } - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); - const identifier = expectIdentifier(parser, HighlightToken.Enum); + const identifier = expectIdentifier(parser, HighlightForToken.Enum); if (identifier === undefined) return ParseFailure.Pending; let memberList: ParsedEnumMember[] = []; const scopeStart = parser.next(); if (parser.next().text === ';') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); } else { memberList = expectEnumMembers(parser); } return { nodeName: NodeName.Enum, - nodeRange: {start: rangeStart, end: parser.prev()}, - scopeRange: {start: scopeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), + scopeRange: new TokenRange(scopeStart, parser.prev()), entity: entity, identifier: identifier, memberList: memberList @@ -256,21 +256,21 @@ function parseEnum(parser: ParserState): ParsedResult { // '{' IDENTIFIER ['=' EXPR] {',' IDENTIFIER ['=' EXPR]} [','] '}' function expectEnumMembers(parser: ParserState): ParsedEnumMember[] { const members: ParsedEnumMember[] = []; - parser.expect('{', HighlightToken.Operator); + parser.expect('{', HighlightForToken.Operator); while (parser.isEnd() === false) { if (expectContinuousOrClose(parser, ',', '}', members.length > 0) === BreakOrThrough.Break) break; if (parser.next().text === '}') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); break; } - const identifier = expectIdentifier(parser, HighlightToken.EnumMember); + const identifier = expectIdentifier(parser, HighlightForToken.EnumMember); if (identifier === undefined) break; let expr: NodeExpr | undefined = undefined; if (parser.next().text === '=') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); expr = expectExpr(parser); } @@ -298,7 +298,7 @@ function parseEntityAttribute(parser: ParserState): EntityAttribute | undefined isFinal: false }; setEntityAttribute(attribute, next); - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); } cache.store(attribute); @@ -317,18 +317,18 @@ function parseClass(parser: ParserState): ParsedResult { parser.backtrack(rangeStart); return ParseFailure.Mismatch; } - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); - const identifier = expectIdentifier(parser, HighlightToken.Class); + const identifier = expectIdentifier(parser, HighlightForToken.Class); if (identifier === undefined) return ParseFailure.Pending; const typeTemplates = parseTypeTemplates(parser); const baseList: TokenObject[] = []; if (parser.next().text === ':') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); while (parser.isEnd() === false) { - const identifier = expectIdentifier(parser, HighlightToken.Type); + const identifier = expectIdentifier(parser, HighlightForToken.Type); if (identifier !== undefined) baseList.push(identifier); if (expectContinuousOrClose(parser, ',', '{', true) === BreakOrThrough.Break) break; @@ -336,7 +336,7 @@ function parseClass(parser: ParserState): ParsedResult { if (identifier === undefined) parser.step(); } } else { - parser.expect('{', HighlightToken.Operator); + parser.expect('{', HighlightForToken.Operator); } const scopeStart = parser.next(); @@ -345,8 +345,8 @@ function parseClass(parser: ParserState): ParsedResult { return { nodeName: NodeName.Class, - nodeRange: {start: rangeStart, end: parser.prev()}, - scopeRange: {start: scopeStart, end: scopeEnd}, + nodeRange: new TokenRange(rangeStart, parser.prev()), + scopeRange: new TokenRange(scopeStart, scopeEnd), metadata: metadata, entity: entity, identifier: identifier, @@ -399,7 +399,7 @@ function expectClassMembers(parser: ParserState) { function parseTypeDef(parser: ParserState): ParsedResult { if (parser.next().text !== 'typedef') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); const primeType = parsePrimeType(parser); if (primeType === undefined) { @@ -408,13 +408,13 @@ function parseTypeDef(parser: ParserState): ParsedResult { } const identifier = parser.next(); - parser.commit(HighlightToken.Type); + parser.commit(HighlightForToken.Type); - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return { nodeName: NodeName.TypeDef, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), type: primeType, identifier: identifier }; @@ -432,7 +432,7 @@ function parseFunc(parser: ParserState): NodeFunc | undefined { let head: FuncHeads; if (parser.next().text === '~') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); head = funcHeadDestructor; } else if (parser.next(0).kind === TokenKind.Identifier && parser.next(1).text === '(') { head = funcHeadConstructor; @@ -448,7 +448,7 @@ function parseFunc(parser: ParserState): NodeFunc | undefined { head = {returnType: returnType, isRef: isRef}; } const identifier = parser.next(); - parser.commit(isFunctionHeadReturnValue(head) ? HighlightToken.Function : HighlightToken.Type); + parser.commit(isFunctionHeadReturnValue(head) ? HighlightForToken.Function : HighlightForToken.Type); const paramList = parseParamList(parser); if (paramList === undefined) { @@ -464,20 +464,20 @@ function parseFunc(parser: ParserState): NodeFunc | undefined { let statBlock: NodeStatBlock | undefined = undefined; if (statStart === ';') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); } else { statBlock = expectStatBlock(parser); } if (statBlock === undefined) statBlock = { nodeName: NodeName.StatBlock, - nodeRange: {start: parser.next(), end: parser.next()}, + nodeRange: new TokenRange(parser.next(), parser.next()), statementList: [] }; return { nodeName: NodeName.Func, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), entity: entityAttribute, accessor: accessor, head: head, @@ -491,49 +491,50 @@ function parseFunc(parser: ParserState): NodeFunc | undefined { function parseConst(parser: ParserState): boolean { if (parser.next().text !== 'const') return false; - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); return true; } function parseRef(parser: ParserState) { const isRef = parser.next().text === '&'; - if (isRef) parser.commit(HighlightToken.Builtin); + if (isRef) parser.commit(HighlightForToken.Builtin); return isRef; } // Metadata declarations in the same place and the only other rule is the matching count of '[' and ']' -// eg. '[Hello[]]' is ok but '[Hello[]' is not. -function parseMetadata(parser: ParserState): TokenObject[] { +// e.g., '[Hello[]]' is ok but '[Hello[]' is not. +function parseMetadata(parser: ParserState): TokenObject[][] { const rangeStart = parser.next(); if (parser.next().text !== '[') return []; let level = 0; - let metadata: TokenObject[] = []; + const metadata: TokenObject[][] = [[]]; while (parser.isEnd() === false) { if (parser.next().text === '[') { - if (level > 0) metadata.push(parser.next()); + if (level > 0) metadata.at(-1)!.push(parser.next()); level++; - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); } else if (parser.next().text === ']') { level--; - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); if (level === 0) { // Since AngelScript supports multiple metadata declarations in subsequent pairs of '[' and ']', we recursively parse those declarations here. - // eg. '[Hello][World]' is valid, as is + // e.g., '[Hello][World]' is valid, as is // [Hello] // [World] if (parser.next().text === '[') { - metadata = [...metadata, ...parseMetadata(parser)]; + metadata.push([]); + continue; } return metadata; - } else metadata.push(parser.next()); + } else metadata.at(-1)!.push(parser.next()); } else { - metadata.push(parser.next()); - parser.commit(HighlightToken.Decorator); + metadata.at(-1)!.push(parser.next()); + parser.commit(HighlightForToken.Decorator); } } @@ -546,7 +547,7 @@ function parseMetadata(parser: ParserState): TokenObject[] { function parseAccessModifier(parser: ParserState): AccessModifier | undefined { const next = parser.next().text; if (next === 'private' || next === 'protected') { - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); return next === 'private' ? AccessModifier.Private : AccessModifier.Protected; } return undefined; @@ -562,14 +563,14 @@ function parseInterface(parser: ParserState): ParsedResult { parser.backtrack(rangeStart); return ParseFailure.Mismatch; } - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); - const identifier = expectIdentifier(parser, HighlightToken.Interface); + const identifier = expectIdentifier(parser, HighlightForToken.Interface); if (identifier === undefined) return ParseFailure.Pending; const result: Mutable = { nodeName: NodeName.Interface, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), entity: entity, identifier: identifier, baseList: [], @@ -577,14 +578,14 @@ function parseInterface(parser: ParserState): ParsedResult { }; if (parser.next().text === ';') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return result; } if (parser.next().text === ':') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); while (parser.isEnd() === false) { - const identifier = expectIdentifier(parser, HighlightToken.Type); + const identifier = expectIdentifier(parser, HighlightForToken.Type); if (identifier !== undefined) result.baseList.push(identifier); if (expectContinuousOrClose(parser, ',', '{', true) === BreakOrThrough.Break) break; @@ -592,7 +593,7 @@ function parseInterface(parser: ParserState): ParsedResult { if (identifier === undefined) parser.step(); } } else { - parser.expect('{', HighlightToken.Operator); + parser.expect('{', HighlightForToken.Operator); } result.memberList = expectInterfaceMembers(parser); @@ -648,12 +649,12 @@ function parseVar(parser: ParserState): NodeVar | undefined { const variables: ParsedVariableInit[] = []; while (parser.isEnd() === false) { // 識別子 - const identifier = expectIdentifier(parser, HighlightToken.Variable); + const identifier = expectIdentifier(parser, HighlightForToken.Variable); if (identifier === undefined) break; // 初期化子 if (parser.next().text === '=') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const initListOrExpr = expectInitListOrExpr(parser); variables.push({identifier: identifier, initializer: initListOrExpr}); @@ -668,7 +669,7 @@ function parseVar(parser: ParserState): NodeVar | undefined { return { nodeName: NodeName.Var, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), accessor: accessor, type: type, variables: variables @@ -694,14 +695,14 @@ function parseImport(parser: ParserState): ParsedResult { const rangeStart = parser.next(); if (parser.next().text !== 'import') return ParseFailure.Mismatch; - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); const type = expectType(parser); if (type === undefined) return ParseFailure.Pending; const isRef = parseRef(parser); - const identifier = expectIdentifier(parser, HighlightToken.Variable); + const identifier = expectIdentifier(parser, HighlightForToken.Variable); if (identifier === undefined) return ParseFailure.Pending; const paramList = expectParamList(parser); @@ -716,13 +717,13 @@ function parseImport(parser: ParserState): ParsedResult { parser.error("Expected string path."); return ParseFailure.Pending; } - parser.commit(HighlightToken.String); + parser.commit(HighlightForToken.String); - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return { nodeName: NodeName.Import, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), type: type, isRef: isRef, identifier: identifier, @@ -742,7 +743,7 @@ function parseFuncDef(parser: ParserState): ParsedResult { parser.backtrack(rangeStart); return ParseFailure.Mismatch; } - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); const returnType = expectType(parser); if (returnType === undefined) return ParseFailure.Pending; @@ -750,16 +751,16 @@ function parseFuncDef(parser: ParserState): ParsedResult { const isRef = parseRef(parser); const identifier = parser.next(); - parser.commit(HighlightToken.Function); + parser.commit(HighlightForToken.Function); const paramList = expectParamList(parser); if (paramList === undefined) return ParseFailure.Pending; - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return { nodeName: NodeName.FuncDef, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), entity: entity, returnType: returnType, isRef: isRef, @@ -784,7 +785,7 @@ function parseVirtualProp(parser: ParserState): NodeVirtualProp | undefined { const isRef = parseRef(parser); - const identifier = parseIdentifier(parser, HighlightToken.Variable); + const identifier = parseIdentifier(parser, HighlightForToken.Variable); if (identifier === undefined) { parser.backtrack(rangeStart); return undefined; @@ -794,7 +795,7 @@ function parseVirtualProp(parser: ParserState): NodeVirtualProp | undefined { parser.backtrack(rangeStart); return undefined; } - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); let getter: ParsedGetterSetter | undefined = undefined; let setter: ParsedGetterSetter | undefined = undefined; @@ -811,7 +812,7 @@ function parseVirtualProp(parser: ParserState): NodeVirtualProp | undefined { return { nodeName: NodeName.VirtualProp, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), accessor: accessor, type: type, isRef: isRef, @@ -823,7 +824,7 @@ function parseVirtualProp(parser: ParserState): NodeVirtualProp | undefined { // ('get' | 'set') ['const'] FUNCATTR (STATBLOCK | ';') function expectGetterSetter(parser: ParserState): ParsedGetterSetter { - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); const isConst = parseConst(parser); const funcAttr = parseFuncAttr(parser); @@ -840,7 +841,7 @@ function expectGetterSetter(parser: ParserState): ParsedGetterSetter { function parseMixin(parser: ParserState): ParsedResult { if (parser.next().text !== 'mixin') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); const parsedClass = parseClass(parser); if (parsedClass === ParseFailure.Pending) return ParseFailure.Pending; @@ -851,7 +852,7 @@ function parseMixin(parser: ParserState): ParsedResult { return { nodeName: NodeName.Mixin, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), mixinClass: parsedClass }; } @@ -865,7 +866,7 @@ function parseIntfMethod(parser: ParserState): NodeIntfMethod | undefined { const isRef = parseRef(parser); - const identifier = parseIdentifier(parser, HighlightToken.Function); + const identifier = parseIdentifier(parser, HighlightForToken.Function); if (identifier === undefined) return undefined; const paramList = parseParamList(parser); @@ -873,11 +874,11 @@ function parseIntfMethod(parser: ParserState): NodeIntfMethod | undefined { const isConst = parseConst(parser); - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return { nodeName: NodeName.IntfMethod, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), returnType: returnType, isRef: isRef, identifier: identifier, @@ -890,7 +891,7 @@ function parseIntfMethod(parser: ParserState): NodeIntfMethod | undefined { function parseStatBlock(parser: ParserState): NodeStatBlock | undefined { if (parser.next().text !== '{') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const statementList: (NodeVar | NodeStatement)[] = []; while (parser.isEnd() === false) { @@ -915,7 +916,7 @@ function parseStatBlock(parser: ParserState): NodeStatBlock | undefined { return { nodeName: NodeName.StatBlock, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), statementList: statementList }; } @@ -931,11 +932,11 @@ function expectStatBlock(parser: ParserState): NodeStatBlock | undefined { // PARAMLIST ::= '(' ['void' | (TYPE TYPEMOD [IDENTIFIER] ['=' EXPR] {',' TYPE TYPEMOD [IDENTIFIER] ['=' EXPR]})] ')' function parseParamList(parser: ParserState): NodeParamList | undefined { if (parser.next().text !== '(') return undefined; - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); if (parser.next().text === 'void') { - parser.commit(HighlightToken.Builtin); - parser.expect(')', HighlightToken.Operator); + parser.commit(HighlightForToken.Builtin); + parser.expect(')', HighlightForToken.Operator); return []; } @@ -954,12 +955,12 @@ function parseParamList(parser: ParserState): NodeParamList | undefined { let identifier: TokenObject | undefined = undefined; if (parser.next().kind === TokenKind.Identifier) { identifier = parser.next(); - parser.commit(HighlightToken.Variable); + parser.commit(HighlightForToken.Variable); } let defaultExpr: NodeExpr | undefined = undefined; if (parser.next().text === '=') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); defaultExpr = expectExpr(parser); } paramList.push({type: type, modifier: typeMod, identifier: identifier, defaultExpr: defaultExpr}); @@ -989,11 +990,11 @@ function parseContinuousOrClose( ): BreakOrThrough | undefined { const next = parser.next().text; if (next === closeOp) { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return BreakOrThrough.Break; } else if (canColon) { if (next !== continuousOp) return undefined; - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); } return BreakOrThrough.Through; } @@ -1011,7 +1012,7 @@ function expectContinuousOrClose( function parseCloseOperator(parser: ParserState, closeOp: string): BreakOrThrough { const next = parser.next().text; if (next === closeOp) { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return BreakOrThrough.Break; } return BreakOrThrough.Through; @@ -1020,11 +1021,11 @@ function parseCloseOperator(parser: ParserState, closeOp: string): BreakOrThroug // TYPEMOD ::= ['&' ['in' | 'out' | 'inout']] function parseTypeMod(parser: ParserState): TypeModifier | undefined { if (parser.next().text !== '&') return undefined; - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); const next = parser.next().text; if (next === 'in' || next === 'out' || next === 'inout') { - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); if (next === 'in') return TypeModifier.In; if (next === 'out') return TypeModifier.Out; } @@ -1051,7 +1052,7 @@ function parseType(parser: ParserState): NodeType | undefined { return { nodeName: NodeName.Type, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), isConst: isConst, scope: scope, dataType: datatype, @@ -1066,14 +1067,14 @@ function parseTypeTail(parser: ParserState) { let refModifier: ReferenceModifier | undefined = undefined; while (parser.isEnd() === false) { if (parser.next(0).text === '[' && parser.next(1).text === ']') { - parser.commit(HighlightToken.Operator); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); + parser.commit(HighlightForToken.Operator); isArray = true; continue; } else if (parser.next().text === '@') { - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); if (parser.next().text === 'const') { - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); refModifier = ReferenceModifier.AtConst; } else { refModifier = ReferenceModifier.At; @@ -1100,7 +1101,7 @@ function parseTypeTemplates(parser: ParserState): NodeType[] | undefined { const rangeStart = parser.next(); if (parser.next().text !== '<') return undefined; - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const typeTemplates: NodeType[] = []; while (parser.isEnd() === false) { @@ -1129,7 +1130,7 @@ function parseTypeTemplates(parser: ParserState): NodeType[] | undefined { function parseInitList(parser: ParserState): NodeInitList | undefined { if (parser.next().text !== '{') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const initList: (NodeAssign | NodeInitList)[] = []; while (parser.isEnd() === false) { @@ -1152,7 +1153,7 @@ function parseInitList(parser: ParserState): NodeInitList | undefined { } return { nodeName: NodeName.InitList, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), initList: initList }; } @@ -1166,7 +1167,7 @@ function parseScope(parser: ParserState): NodeScope | undefined { let isGlobal = false; if (parser.next().text === '::') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); isGlobal = true; } @@ -1179,19 +1180,19 @@ function parseScope(parser: ParserState): NodeScope | undefined { } if (parser.next(1).text === '::') { - parser.commit(HighlightToken.Namespace); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Namespace); + parser.commit(HighlightForToken.Operator); scopeList.push(identifier); continue; } else if (parser.next(1).text === '<') { const typesStart = parser.next(); - parser.commit(HighlightToken.Class); + parser.commit(HighlightForToken.Class); typeTemplates = parseTypeTemplates(parser); if (typeTemplates === undefined || parser.next().text !== '::') { parser.backtrack(typesStart); } else { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); scopeList.push(identifier); } } @@ -1205,7 +1206,7 @@ function parseScope(parser: ParserState): NodeScope | undefined { const nodeScope: NodeScope = { nodeName: NodeName.Scope, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), isGlobal: isGlobal, scopeList: scopeList, typeTemplates: typeTemplates ?? [] @@ -1218,19 +1219,19 @@ function parseScope(parser: ParserState): NodeScope | undefined { function parseDatatype(parser: ParserState): NodeDataType | undefined { const next = parser.next(); if (next.kind === TokenKind.Identifier) { - parser.commit(HighlightToken.Type); + parser.commit(HighlightForToken.Type); return { nodeName: NodeName.DataType, - nodeRange: {start: next, end: next}, + nodeRange: new TokenRange(next, next), identifier: next }; } if (next.text === '?' || next.text === 'auto') { - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); return { nodeName: NodeName.DataType, - nodeRange: {start: next, end: next}, + nodeRange: new TokenRange(next, next), identifier: next }; } @@ -1238,7 +1239,7 @@ function parseDatatype(parser: ParserState): NodeDataType | undefined { const primType = parsePrimeType(parser); if (primType !== undefined) return { nodeName: NodeName.DataType, - nodeRange: {start: next, end: next}, + nodeRange: new TokenRange(next, next), identifier: primType }; @@ -1249,7 +1250,7 @@ function parseDatatype(parser: ParserState): NodeDataType | undefined { function parsePrimeType(parser: ParserState) { const next = parser.next(); if (next.isReservedToken() === false || next.property.isPrimeType === false) return undefined; - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); return next; } @@ -1267,7 +1268,7 @@ function parseFuncAttr(parser: ParserState): FunctionAttribute | undefined { isProperty: false }; setFunctionAttribute(attribute, next); - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); } return attribute; } @@ -1331,15 +1332,15 @@ function expectStatement(parser: ParserState): NodeStatement | undefined { function parseSwitch(parser: ParserState): ParsedResult { if (parser.next().text !== 'switch') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); - parser.expect('(', HighlightToken.Operator); + parser.expect('(', HighlightForToken.Operator); const assign = expectAssign(parser); if (assign === undefined) return ParseFailure.Pending; - parser.expect(')', HighlightToken.Operator); - parser.expect('{', HighlightToken.Operator); + parser.expect(')', HighlightForToken.Operator); + parser.expect('{', HighlightForToken.Operator); const cases: NodeCase[] = []; while (parser.isEnd() === false) { @@ -1357,7 +1358,7 @@ function parseSwitch(parser: ParserState): ParsedResult { return { nodeName: NodeName.Switch, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), assign: assign, caseList: cases }; @@ -1367,19 +1368,19 @@ function parseSwitch(parser: ParserState): ParsedResult { function parseBreak(parser: ParserState): NodeBreak | undefined { if (parser.next().text !== 'break') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); - parser.expect(';', HighlightToken.Operator); - return {nodeName: NodeName.Break, nodeRange: {start: rangeStart, end: parser.prev()}}; + parser.expect(';', HighlightForToken.Operator); + return {nodeName: NodeName.Break, nodeRange: new TokenRange(rangeStart, parser.prev())}; } // FOR ::= 'for' '(' (VAR | EXPRSTAT) EXPRSTAT [ASSIGN {',' ASSIGN}] ')' STATEMENT function parseFor(parser: ParserState): ParsedResult { if (parser.next().text !== 'for') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); - if (parser.expect('(', HighlightToken.Operator) === false) return ParseFailure.Pending; + if (parser.expect('(', HighlightForToken.Operator) === false) return ParseFailure.Pending; const initial: NodeExprStat | NodeVar | undefined = parseVar(parser) ?? parseExprStat(parser); if (initial === undefined) { @@ -1389,7 +1390,7 @@ function parseFor(parser: ParserState): ParsedResult { const result: Mutable = { nodeName: NodeName.For, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), initial: initial, condition: undefined, incrementList: [], @@ -1416,21 +1417,21 @@ function parseFor(parser: ParserState): ParsedResult { function parseWhile(parser: ParserState): ParsedResult { if (parser.next().text !== 'while') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); - if (parser.expect('(', HighlightToken.Operator) === false) return ParseFailure.Pending; + if (parser.expect('(', HighlightForToken.Operator) === false) return ParseFailure.Pending; const assign = expectAssign(parser); if (assign === undefined) return ParseFailure.Pending; const result: Mutable = { nodeName: NodeName.While, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), assign: assign, statement: undefined }; - if (parser.expect(')', HighlightToken.Operator) === false) return appliedNodeEnd(parser, result); + if (parser.expect(')', HighlightForToken.Operator) === false) return appliedNodeEnd(parser, result); result.statement = expectStatement(parser); return appliedNodeEnd(parser, result); @@ -1440,27 +1441,27 @@ function parseWhile(parser: ParserState): ParsedResult { function parseDoWhile(parser: ParserState): ParsedResult { if (parser.next().text !== 'do') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); const statement = expectStatement(parser); if (statement === undefined) return ParseFailure.Pending; const result: Mutable = { nodeName: NodeName.DoWhile, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), statement: statement, assign: undefined }; - if (parser.expect('while', HighlightToken.Keyword) === false) return appliedNodeEnd(parser, result); - if (parser.expect('(', HighlightToken.Operator) === false) return appliedNodeEnd(parser, result); + if (parser.expect('while', HighlightForToken.Keyword) === false) return appliedNodeEnd(parser, result); + if (parser.expect('(', HighlightForToken.Operator) === false) return appliedNodeEnd(parser, result); result.assign = expectAssign(parser); if (result.assign === undefined) return appliedNodeEnd(parser, result); - if (parser.expect(')', HighlightToken.Operator) === false) return appliedNodeEnd(parser, result); + if (parser.expect(')', HighlightForToken.Operator) === false) return appliedNodeEnd(parser, result); - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return appliedNodeEnd(parser, result); } @@ -1468,28 +1469,28 @@ function parseDoWhile(parser: ParserState): ParsedResult { function parseIf(parser: ParserState): ParsedResult { if (parser.next().text !== 'if') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); - if (parser.expect('(', HighlightToken.Operator) === false) return ParseFailure.Pending; + if (parser.expect('(', HighlightForToken.Operator) === false) return ParseFailure.Pending; const assign = expectAssign(parser); if (assign === undefined) return ParseFailure.Pending; const result: Mutable = { nodeName: NodeName.If, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), condition: assign, thenStat: undefined, elseStat: undefined }; - if (parser.expect(')', HighlightToken.Operator) === false) return appliedNodeEnd(parser, result); + if (parser.expect(')', HighlightForToken.Operator) === false) return appliedNodeEnd(parser, result); result.thenStat = expectStatement(parser); if (result.thenStat === undefined) return appliedNodeEnd(parser, result); if (parser.next().text === 'else') { - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); result.elseStat = expectStatement(parser); } @@ -1498,7 +1499,7 @@ function parseIf(parser: ParserState): ParsedResult { } function appliedNodeEnd(parser: ParserState, node: Mutable): T { - node.nodeRange = makeParsedRange(node.nodeRange.start, parser.prev()); + node.nodeRange = new TokenRange(node.nodeRange.start, parser.prev()); return node; } @@ -1506,19 +1507,19 @@ function appliedNodeEnd(parser: ParserState, node: Mutable< function parseContinue(parser: ParserState): NodeContinue | undefined { if (parser.next().text !== 'continue') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); - parser.expect(';', HighlightToken.Operator); - return {nodeName: NodeName.Continue, nodeRange: {start: rangeStart, end: parser.prev()}}; + parser.commit(HighlightForToken.Keyword); + parser.expect(';', HighlightForToken.Operator); + return {nodeName: NodeName.Continue, nodeRange: new TokenRange(rangeStart, parser.prev())}; } // EXPRSTAT ::= [ASSIGN] ';' function parseExprStat(parser: ParserState): NodeExprStat | undefined { const rangeStart = parser.next(); if (parser.next().text === ';') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return { nodeName: NodeName.ExprStat, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), assign: undefined }; } @@ -1526,11 +1527,11 @@ function parseExprStat(parser: ParserState): NodeExprStat | undefined { const assign = parseAssign(parser); if (assign === undefined) return undefined; - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return { nodeName: NodeName.ExprStat, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), assign: assign }; } @@ -1547,19 +1548,19 @@ function expectExprStat(parser: ParserState): NodeExprStat | undefined { function parseTry(parser: ParserState): ParsedResult { if (parser.next().text !== 'try') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); const tryBlock = expectStatBlock(parser); if (tryBlock === undefined) return ParseFailure.Pending; const result: Mutable = { nodeName: NodeName.Try, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), tryBlock: tryBlock, catchBlock: undefined }; - if (parser.expect('catch', HighlightToken.Keyword) === false) return appliedNodeEnd(parser, result); + if (parser.expect('catch', HighlightForToken.Keyword) === false) return appliedNodeEnd(parser, result); result.catchBlock = expectStatBlock(parser); return appliedNodeEnd(parser, result); @@ -1569,23 +1570,23 @@ function parseTry(parser: ParserState): ParsedResult { function parseReturn(parser: ParserState): ParsedResult { if (parser.next().text !== 'return') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); const result: Mutable = { nodeName: NodeName.Return, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), assign: undefined }; if (parser.next().text === ';') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return appliedNodeEnd(parser, result); } result.assign = expectAssign(parser); if (result.assign === undefined) return appliedNodeEnd(parser, result); - parser.expect(';', HighlightToken.Operator); + parser.expect(';', HighlightForToken.Operator); return appliedNodeEnd(parser, result); } @@ -1595,17 +1596,17 @@ function parseCase(parser: ParserState): ParsedResult { let expr = undefined; if (parser.next().text === 'case') { - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); expr = expectExpr(parser); if (expr === undefined) return ParseFailure.Pending; } else if (parser.next().text === 'default') { - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); } else { return ParseFailure.Mismatch; } - parser.expect(':', HighlightToken.Operator); + parser.expect(':', HighlightForToken.Operator); const statements: NodeStatement[] = []; while (parser.isEnd() === false) { @@ -1617,7 +1618,7 @@ function parseCase(parser: ParserState): ParsedResult { return { nodeName: NodeName.Case, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), expr: expr, statementList: statements }; @@ -1633,7 +1634,7 @@ function parseExpr(parser: ParserState): NodeExpr | undefined { const exprOp = parseExprOp(parser); if (exprOp === undefined) return { nodeName: NodeName.Expr, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), head: exprTerm, tail: undefined }; @@ -1641,14 +1642,14 @@ function parseExpr(parser: ParserState): NodeExpr | undefined { const tail = expectExpr(parser); if (tail === undefined) return { nodeName: NodeName.Expr, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), head: exprTerm, tail: undefined }; return { nodeName: NodeName.Expr, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), head: exprTerm, tail: { operator: exprOp, @@ -1686,7 +1687,7 @@ function parseExprTerm1(parser: ParserState): NodeExprTerm1 | undefined { parser.backtrack(rangeStart); return undefined; } - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); } const initList = parseInitList(parser); @@ -1697,7 +1698,7 @@ function parseExprTerm1(parser: ParserState): NodeExprTerm1 | undefined { return { nodeName: NodeName.ExprTerm, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), exprTerm: 1, type: type, initList: initList @@ -1713,7 +1714,7 @@ function parseExprTerm2(parser: ParserState): NodeExprTerm2 | undefined { const next = parser.next(); if (next.isReservedToken() === false || next.property.isExprPreOp === false) break; preOps.push(parser.next()); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); } const exprValue = parseExprValue(parser); @@ -1731,7 +1732,7 @@ function parseExprTerm2(parser: ParserState): NodeExprTerm2 | undefined { return { nodeName: NodeName.ExprTerm, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), exprTerm: 2, preOps: preOps, value: exprValue, @@ -1746,12 +1747,12 @@ function parseExprValue(parser: ParserState): ParsedResult { if (cast !== ParseFailure.Mismatch) return cast; if (parser.next().text === '(') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const assign = expectAssign(parser); if (assign === undefined) return ParseFailure.Pending; - parser.expect(')', HighlightToken.Operator); + parser.expect(')', HighlightForToken.Operator); return assign; } @@ -1788,7 +1789,7 @@ function parseConstructCall(parser: ParserState): NodeConstructCall | undefined return { nodeName: NodeName.ConstructCall, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), type: type, argList: argList }; @@ -1810,17 +1811,17 @@ function parseExprPostOp(parser: ParserState): NodeExprPostOp | undefined { if (argList !== undefined) return { nodeName: NodeName.ExprPostOp, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), postOp: 3, args: argList }; const maybeOperator = parser.next().text; if (maybeOperator === '++' || maybeOperator === '--') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return { nodeName: NodeName.ExprPostOp, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), postOp: 4, operator: maybeOperator }; @@ -1833,21 +1834,21 @@ function parseExprPostOp(parser: ParserState): NodeExprPostOp | undefined { function parseExprPostOp1(parser: ParserState): NodeExprPostOp1 | undefined { if (parser.next().text !== '.') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const funcCall = parseFuncCall(parser); if (funcCall !== undefined) return { nodeName: NodeName.ExprPostOp, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), postOp: 1, member: funcCall, }; - const identifier = expectIdentifier(parser, HighlightToken.Variable); + const identifier = expectIdentifier(parser, HighlightForToken.Variable); return { nodeName: NodeName.ExprPostOp, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), postOp: 1, member: identifier }; @@ -1857,7 +1858,7 @@ function parseExprPostOp1(parser: ParserState): NodeExprPostOp1 | undefined { function parseExprPostOp2(parser: ParserState): NodeExprPostOp2 | undefined { if (parser.next().text !== '[') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const indexerList: ParsedPostIndexer[] = []; while (parser.isEnd() === false) { @@ -1876,7 +1877,7 @@ function parseExprPostOp2(parser: ParserState): NodeExprPostOp2 | undefined { return { nodeName: NodeName.ExprPostOp, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), postOp: 2, indexerList: indexerList }; @@ -1886,8 +1887,8 @@ function parseExprPostOp2(parser: ParserState): NodeExprPostOp2 | undefined { function parseIdentifierWithColon(parser: ParserState): TokenObject | undefined { if (parser.next(0).kind === TokenKind.Identifier && parser.next(1).text === ':') { const identifier = parser.next(); - parser.commit(HighlightToken.Parameter); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Parameter); + parser.commit(HighlightForToken.Operator); return identifier; } return undefined; @@ -1897,24 +1898,24 @@ function parseIdentifierWithColon(parser: ParserState): TokenObject | undefined function parseCast(parser: ParserState): ParsedResult { if (parser.next().text !== 'cast') return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Keyword); + parser.commit(HighlightForToken.Keyword); - if (parser.expect('<', HighlightToken.Operator) === false) return ParseFailure.Pending; + if (parser.expect('<', HighlightForToken.Operator) === false) return ParseFailure.Pending; const type = expectType(parser); if (type === undefined) return ParseFailure.Pending; - if (parser.expect('>', HighlightToken.Operator) === false) return ParseFailure.Pending; - if (parser.expect('(', HighlightToken.Operator) === false) return ParseFailure.Pending; + if (parser.expect('>', HighlightForToken.Operator) === false) return ParseFailure.Pending; + if (parser.expect('(', HighlightForToken.Operator) === false) return ParseFailure.Pending; const assign = expectAssign(parser); if (assign === undefined) return ParseFailure.Pending; - parser.expect(')', HighlightToken.Operator); + parser.expect(')', HighlightForToken.Operator); return { nodeName: NodeName.Cast, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), type: type, assign: assign }; @@ -1926,13 +1927,13 @@ const parseLambda = (parser: ParserState): ParsedResult => { if (canParseLambda(parser) === false) return ParseFailure.Mismatch; const rangeStart = parser.next(); - parser.commit(HighlightToken.Builtin); + parser.commit(HighlightForToken.Builtin); - parser.expect('(', HighlightToken.Operator); + parser.expect('(', HighlightForToken.Operator); const result: Mutable = { nodeName: NodeName.Lambda, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), paramList: [], statBlock: undefined }; @@ -1942,13 +1943,13 @@ const parseLambda = (parser: ParserState): ParsedResult => { if (parser.next(0).kind === TokenKind.Identifier && isCommaOrParensClose(parser.next(1).text)) { result.paramList.push({type: undefined, typeMod: undefined, identifier: parser.next()}); - parser.commit(HighlightToken.Parameter); + parser.commit(HighlightForToken.Parameter); continue; } const type = parseType(parser); const typeMod = type !== undefined ? parseTypeMod(parser) : undefined; - const identifier: TokenObject | undefined = parseIdentifier(parser, HighlightToken.Parameter); + const identifier: TokenObject | undefined = parseIdentifier(parser, HighlightForToken.Parameter); result.paramList.push({type: type, typeMod: typeMod, identifier: identifier}); } @@ -1973,16 +1974,16 @@ function canParseLambda(parser: ParserState): boolean { function parseLiteral(parser: ParserState): NodeLiteral | undefined { const next = parser.next(); if (next.kind === TokenKind.Number) { - parser.commit(HighlightToken.Number); - return {nodeName: NodeName.Literal, nodeRange: {start: next, end: next}, value: next}; + parser.commit(HighlightForToken.Number); + return {nodeName: NodeName.Literal, nodeRange: new TokenRange(next, next), value: next}; } if (next.kind === TokenKind.String) { - parser.commit(HighlightToken.String); - return {nodeName: NodeName.Literal, nodeRange: {start: next, end: next}, value: next}; + parser.commit(HighlightForToken.String); + return {nodeName: NodeName.Literal, nodeRange: new TokenRange(next, next), value: next}; } if (next.text === 'true' || next.text === 'false' || next.text === 'null') { - parser.commit(HighlightToken.Builtin); - return {nodeName: NodeName.Literal, nodeRange: {start: next, end: next}, value: next}; + parser.commit(HighlightForToken.Builtin); + return {nodeName: NodeName.Literal, nodeRange: new TokenRange(next, next), value: next}; } return undefined; } @@ -1992,7 +1993,7 @@ function parseFuncCall(parser: ParserState): NodeFuncCall | undefined { const rangeStart = parser.next(); const scope = parseScope(parser); - const identifier = parseIdentifier(parser, HighlightToken.Function); + const identifier = parseIdentifier(parser, HighlightForToken.Function); if (identifier === undefined) { parser.backtrack(rangeStart); return undefined; @@ -2006,7 +2007,7 @@ function parseFuncCall(parser: ParserState): NodeFuncCall | undefined { return { nodeName: NodeName.FuncCall, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), scope: scope, identifier: identifier, argList: argList @@ -2024,17 +2025,17 @@ function parseVarAccess(parser: ParserState): NodeVarAccess | undefined { parser.error("Expected identifier."); return { nodeName: NodeName.VarAccess, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), scope: scope, identifier: undefined }; } const isBuiltin: boolean = scope === undefined && next.text === 'this'; - parser.commit(isBuiltin ? HighlightToken.Builtin : HighlightToken.Variable); + parser.commit(isBuiltin ? HighlightForToken.Builtin : HighlightForToken.Variable); return { nodeName: NodeName.VarAccess, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), scope: scope, identifier: next }; @@ -2044,7 +2045,7 @@ function parseVarAccess(parser: ParserState): NodeVarAccess | undefined { function parseArgList(parser: ParserState): NodeArgList | undefined { if (parser.next().text !== '(') return undefined; const rangeStart = parser.next(); - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const argList: ParsedArgument[] = []; while (parser.isEnd() === false) { @@ -2060,7 +2061,7 @@ function parseArgList(parser: ParserState): NodeArgList | undefined { return { nodeName: NodeName.ArgList, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), argList: argList }; } @@ -2076,7 +2077,7 @@ function parseAssign(parser: ParserState): NodeAssign | undefined { const result: Mutable = { nodeName: NodeName.Assign, - nodeRange: {start: rangeStart, end: parser.prev()}, + nodeRange: new TokenRange(rangeStart, parser.prev()), condition: condition, tail: undefined }; @@ -2087,7 +2088,7 @@ function parseAssign(parser: ParserState): NodeAssign | undefined { if (assign === undefined) return result; result.tail = {operator: operator, assign: assign}; - result.nodeRange = makeParsedRange(rangeStart, parser.prev()); + result.nodeRange = new TokenRange(rangeStart, parser.prev()); return result; } @@ -2109,18 +2110,18 @@ function parseCondition(parser: ParserState): NodeCondition | undefined { const result: Mutable = { nodeName: NodeName.Condition, - nodeRange: {start: rangeStart, end: rangeStart}, + nodeRange: new TokenRange(rangeStart, rangeStart), expr: expr, ternary: undefined }; if (parser.next().text === '?') { - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); const trueAssign = expectAssign(parser); if (trueAssign === undefined) return result; - parser.expect(':', HighlightToken.Operator); + parser.expect(':', HighlightForToken.Operator); const falseAssign = expectAssign(parser); if (falseAssign === undefined) return result; @@ -2128,7 +2129,7 @@ function parseCondition(parser: ParserState): NodeCondition | undefined { result.ternary = {trueAssign: trueAssign, falseAssign: falseAssign}; } - result.nodeRange = makeParsedRange(rangeStart, parser.prev()); + result.nodeRange = new TokenRange(rangeStart, parser.prev()); return result; } @@ -2137,17 +2138,17 @@ function parseExprOp(parser: ParserState) { const next = getNextLinkedGreaterThan(parser); if (next.isReservedToken() === false) return undefined; if (next.property.isExprOp === false) return parseNotIsOperator(parser); - parser.commit(next.text === 'is' ? HighlightToken.Builtin : HighlightToken.Operator); + parser.commit(next.text === 'is' ? HighlightForToken.Builtin : HighlightForToken.Operator); return next; } // '!is' requires special handling. function parseNotIsOperator(parser: ParserState) { - if (isTokensLinkedBy(parser.next(), ['!', 'is']) === false) return undefined; + if (areTokensJoinedBy(parser.next(), ['!', 'is']) === false) return undefined; - const location = getLocationBetween(parser.next(0), parser.next(1)); - parser.commit(HighlightToken.Builtin); - parser.commit(HighlightToken.Builtin); + const location = getBoundingLocationBetween(parser.next(0), parser.next(1)); + parser.commit(HighlightForToken.Builtin); + parser.commit(HighlightForToken.Builtin); return TokenReserved.createVirtual('!is', location); } @@ -2164,7 +2165,7 @@ function parseNotIsOperator(parser: ParserState) { function parseAssignOp(parser: ParserState) { const next = getNextLinkedGreaterThan(parser); if (next.isReservedToken() === false || next.property.isAssignOp === false) return undefined; - parser.commit(HighlightToken.Operator); + parser.commit(HighlightForToken.Operator); return next; } @@ -2172,9 +2173,9 @@ function getNextLinkedGreaterThan(parser: ParserState) { if (parser.next().text !== '>') return parser.next(); const check = (targets: string[], uniqueTokenText: string) => { - if (isTokensLinkedBy(parser.next(1), targets) === false) return undefined; - const location = getLocationBetween(parser.next(0), parser.next(targets.length)); - for (let i = 0; i < targets.length; ++i) parser.commit(HighlightToken.Operator); + if (areTokensJoinedBy(parser.next(1), targets) === false) return undefined; + const location = getBoundingLocationBetween(parser.next(0), parser.next(targets.length)); + for (let i = 0; i < targets.length; ++i) parser.commit(HighlightForToken.Operator); return TokenReserved.createVirtual(uniqueTokenText, location); }; diff --git a/server/src/compiler_parser/parserPreprocess.ts b/server/src/compiler_parser/parserPreprocess.ts index 3c0242c6..332d9613 100644 --- a/server/src/compiler_parser/parserPreprocess.ts +++ b/server/src/compiler_parser/parserPreprocess.ts @@ -1,6 +1,6 @@ import {TokenKind, TokenObject, TokenString} from "../compiler_tokenizer/tokenObject"; import {diagnostic} from "../code/diagnostic"; -import {HighlightToken} from "../code/highlight"; +import {HighlightForToken} from "../code/highlight"; import {TextLocation} from "../compiler_tokenizer/textLocation"; /** @@ -66,10 +66,10 @@ function preprocessDirectives(tokens: TokenObject[]): TokenObject[] { } function handleDirectiveTokens(directiveTokens: TokenObject[], includeFiles: TokenObject[]) { - directiveTokens[0].highlight.token = HighlightToken.Directive; + directiveTokens[0].setHighlight(HighlightForToken.Directive); if (directiveTokens[1]?.text === 'include') { - directiveTokens[1].highlight.token = HighlightToken.Directive; + directiveTokens[1].setHighlight(HighlightForToken.Directive); // Check the include directive. const fileName = directiveTokens[2]; @@ -85,7 +85,7 @@ function handleDirectiveTokens(directiveTokens: TokenObject[], includeFiles: Tok includeFiles.push(fileName); } else { - if (directiveTokens[1] != null) directiveTokens[1].highlight.token = HighlightToken.Label; + if (directiveTokens[1] != null) directiveTokens[1].setHighlight(HighlightForToken.Label); } } diff --git a/server/src/compiler_parser/parserState.ts b/server/src/compiler_parser/parserState.ts index c022795f..bec2f78b 100644 --- a/server/src/compiler_parser/parserState.ts +++ b/server/src/compiler_parser/parserState.ts @@ -1,4 +1,4 @@ -import {HighlightToken} from "../code/highlight"; +import {HighlightForToken} from "../code/highlight"; import {diagnostic} from "../code/diagnostic"; import {TokenKind, TokenObject} from "../compiler_tokenizer/tokenObject"; import { @@ -57,13 +57,13 @@ export class ParserState { this.cursorIndex++; } - public commit(analyzeToken: HighlightToken) { + public commit(highlightForToken: HighlightForToken) { const next = this.next(); - if (next.isVirtual() === false) next.highlight.token = analyzeToken; + if (next.isVirtual() === false) next.setHighlight(highlightForToken); this.step(); } - public expect(word: string, analyzeToken: HighlightToken) { + public expect(word: string, analyzeToken: HighlightForToken) { if (this.isEnd()) { diagnostic.addError(this.next().location, "Unexpected end of file."); return false; diff --git a/server/src/compiler_parser/tokenRange.ts b/server/src/compiler_parser/tokenRange.ts new file mode 100644 index 00000000..2084512a --- /dev/null +++ b/server/src/compiler_parser/tokenRange.ts @@ -0,0 +1,28 @@ +import {TokenObject} from "../compiler_tokenizer/tokenObject"; +import {TextLocation} from "../compiler_tokenizer/textLocation"; + +export function getBoundingLocationBetween(start: TokenObject, end: TokenObject): TextLocation { + return start.location.withEnd(end.location.end); +} + +export class TokenRange { + public constructor( + public readonly start: TokenObject, + public readonly end: TokenObject + ) { + } + + /** + * Get text range covering two tokens + */ + public getBoundingLocation(): TextLocation { + return getBoundingLocationBetween(this.start, this.end); + } + + /** + * Checks if the token spans a single line. + */ + public isOneLine(): boolean { + return this.start.location.start.line === this.end.location.end.line; + } +} \ No newline at end of file diff --git a/server/src/compiler_tokenizer/textLocation.ts b/server/src/compiler_tokenizer/textLocation.ts index 6b350221..2e52740f 100644 --- a/server/src/compiler_tokenizer/textLocation.ts +++ b/server/src/compiler_tokenizer/textLocation.ts @@ -28,6 +28,27 @@ export class TextPosition implements languageserver.Position { } } +/** + * Represents a mutable text position. + * This does not satisfy `languageserver.Position`, + * so please make it immutable when passing it to `languageserver.Position`. + */ +export class MutableTextPosition { + public constructor( + public line_: number, + public character_: number + ) { + } + + public static create(position: languageserver.Position): MutableTextPosition { + return new MutableTextPosition(position.line, position.character); + } + + public freeze(): TextPosition { + return new TextPosition(this.line_, this.character_); + } +} + export class TextRange implements languageserver.Range { constructor( public readonly start: TextPosition, @@ -48,6 +69,22 @@ export class TextRange implements languageserver.Range { } } +export class MutableTextRange { + public constructor( + public start: MutableTextPosition, + public end: MutableTextPosition + ) { + } + + public static create(range: languageserver.Range): MutableTextRange { + return new MutableTextRange(MutableTextPosition.create(range.start), MutableTextPosition.create(range.end)); + } + + public freeze(): TextRange { + return new TextRange(this.start.freeze(), this.end.freeze()); + } +} + /** * Represents a location in a text file. */ @@ -68,6 +105,10 @@ export class TextLocation extends TextRange { return Object.assign(Object.create(Object.getPrototypeOf(this)), this); } + public equals(other: TextLocation): boolean { + return this.path === other.path && this.start.equals(other.start) && this.end.equals(other.end); + } + public withEnd(newEnd: TextPosition): TextLocation { return new TextLocation(this.path, this.start, newEnd); } diff --git a/server/src/compiler_tokenizer/tokenObject.ts b/server/src/compiler_tokenizer/tokenObject.ts index c5b5c031..4bfc6331 100644 --- a/server/src/compiler_tokenizer/tokenObject.ts +++ b/server/src/compiler_tokenizer/tokenObject.ts @@ -1,4 +1,4 @@ -import {HighlightModifier, HighlightToken} from "../code/highlight"; +import {HighlightForModifier, HighlightForToken} from "../code/highlight"; import {findAllReservedWordProperty, ReservedWordProperty} from "./reservedWord"; import {TextLocation} from "./textLocation"; @@ -15,19 +15,8 @@ export enum TokenKind { } export interface HighlightInfo { - token: HighlightToken; - modifier: HighlightModifier; -} - -/** - * Creates virtual highlight information. - * Used to treat built-in keywords like 'int' as tokens, even though they don't actually exist in the code. - */ -export function createVirtualHighlight(): HighlightInfo { - return { - token: HighlightToken.Invalid, - modifier: HighlightModifier.Nothing, - }; + token: HighlightForToken; + modifier: HighlightForModifier; } /** @@ -46,19 +35,23 @@ export abstract class TokenBase { public readonly text: string, // The location information of a token including the file path and the position within the file. public readonly location: TextLocation, - highlightToken: HighlightToken, - highlightModifier: HighlightModifier = HighlightModifier.Nothing, + highlightToken: HighlightForToken, + highlightModifier: HighlightForModifier = HighlightForModifier.Nothing, ) { this._highlight = {token: highlightToken, modifier: highlightModifier}; } public abstract get kind(): TokenKind; - public setHighlight(token: HighlightToken, modifier: HighlightModifier = HighlightModifier.Nothing) { - this._highlight = {token: token, modifier: modifier}; + public setHighlight(token: HighlightForToken, modifier?: HighlightForModifier) { + if (modifier === undefined) { + this._highlight.token = token; + } else { + this._highlight = {token: token, modifier: modifier}; + } } - public get highlight(): HighlightInfo { + public get highlight(): Readonly { return this._highlight; } @@ -68,14 +61,14 @@ export abstract class TokenBase { */ protected markVirtual() { // We recognize the virtual token by whether the highlight is invalid. - this._highlight.token = HighlightToken.Invalid; + this._highlight.token = HighlightForToken.Invalid; } /** * Returns whether the token does not exist in the original code. */ public isVirtual(): boolean { - return this._highlight.token === HighlightToken.Invalid; + return this._highlight.token === HighlightForToken.Invalid; } public isReservedToken(): this is TokenReserved { @@ -104,6 +97,17 @@ export abstract class TokenBase { public get next() { return this._nextPreprocessedToken; } + + /** + * Returns the next token if it exists; otherwise, returns the current token. + */ + public getNextOrSelf() { + return this.next ?? this; + } + + public equals(other: TokenBase): boolean { + return this === other || (this.location.equals(other.location)); + } } export class TokenReserved extends TokenBase { @@ -114,7 +118,7 @@ export class TokenReserved extends TokenBase { location: TextLocation, property?: ReservedWordProperty, ) { - super(text, location, HighlightToken.Keyword); + super(text, location, HighlightForToken.Keyword); this.property = property ?? findAllReservedWordProperty(text); } @@ -135,7 +139,7 @@ export class TokenIdentifier extends TokenBase { text: string, location: TextLocation, ) { - super(text, location, HighlightToken.Variable); + super(text, location, HighlightForToken.Variable); } public static createVirtual(text: string, location?: TextLocation): TokenIdentifier { @@ -159,9 +163,9 @@ export class TokenNumber extends TokenBase { public constructor( text: string, location: TextLocation, - public readonly numeric: NumberLiterals, + public readonly numberLiteral: NumberLiterals, ) { - super(text, location, HighlightToken.Number); + super(text, location, HighlightForToken.Number); } public get kind(): TokenKind { @@ -174,7 +178,7 @@ export class TokenString extends TokenBase { text: string, location: TextLocation, ) { - super(text, location, HighlightToken.String); + super(text, location, HighlightForToken.String); } public static createVirtual(text: string, location?: TextLocation): TokenString { @@ -193,7 +197,7 @@ export class TokenComment extends TokenBase { text: string, location: TextLocation, ) { - super(text, location, HighlightToken.Comment); + super(text, location, HighlightForToken.Comment); } public get kind(): TokenKind { diff --git a/server/src/compiler_tokenizer/tokenUtils.ts b/server/src/compiler_tokenizer/tokenUtils.ts index 685c0b06..da9ccb49 100644 --- a/server/src/compiler_tokenizer/tokenUtils.ts +++ b/server/src/compiler_tokenizer/tokenUtils.ts @@ -1,40 +1,26 @@ import { - TokenBase, TokenObject } from "./tokenObject"; -/** - * Determines if two tokens are identical. - * This function does not check if they are the same instance; - * instead, it compares the members of each token object individually. - */ -export function isSameToken(l: TokenBase, r: TokenBase): boolean { - return l.text === r.text - && l.location.path === r.location.path - && l.location.start.line === r.location.start.line - && l.location.start.character === r.location.start.character - && l.location.end.line === r.location.end.line - && l.location.end.character === r.location.end.character; -} - /** * Determines if a given sequence of tokens matches the specified string sequence. * For example, this can be used to check if tokens like ['>', '>'] form the string '>>'. * - * @param head The starting token to check. - * @param targets The expected string sequence. - * @returns `true` if the tokens match the target sequence, otherwise `false`. + * @param headToken The starting token to check. + * @param expectedTexts The expected string sequence. + * @returns `true` if the tokens match the expected sequence, otherwise `false`. */ -export function isTokensLinkedBy(head: TokenObject, targets: string[]): boolean { - if (head.text !== targets[0]) return false; +export function areTokensJoinedBy(headToken: TokenObject, expectedTexts: string[]): boolean { + if (headToken.text !== expectedTexts[0]) return false; + + let cursor = headToken.next; + let tailColumn = headToken.location.end.character; + for (let i = 1; i < expectedTexts.length; i++) { + if (cursor === undefined || cursor.text !== expectedTexts[i]) return false; + if (cursor.location.start.line !== headToken.location.start.line) return false; + if (cursor.location.start.character !== tailColumn) return false; - let cursor = head.next; - let column = head.location.end.character; - for (let i = 1; i < targets.length; i++) { - if (cursor === undefined || cursor.text !== targets[i]) return false; - if (cursor.location.start.line !== head.location.start.line) return false; - if (cursor.location.start.character !== column) return false; - column = cursor.location.end.character; + tailColumn = cursor.location.end.character; cursor = cursor.next; } diff --git a/server/src/compiler_tokenizer/tokenizer.ts b/server/src/compiler_tokenizer/tokenizer.ts index ead5e35d..aabffa0c 100644 --- a/server/src/compiler_tokenizer/tokenizer.ts +++ b/server/src/compiler_tokenizer/tokenizer.ts @@ -1,11 +1,8 @@ -import {HighlightModifier, HighlightToken} from "../code/highlight"; import { - HighlightInfo, NumberLiterals, TokenComment, TokenIdentifier, TokenObject, - TokenKind, TokenNumber, TokenReserved, TokenString @@ -13,7 +10,7 @@ import { import {diagnostic} from "../code/diagnostic"; import {TokenizerState, UnknownBuffer} from "./tokenizerState"; import {findReservedKeywordProperty, findReservedWeakMarkProperty, ReservedWordProperty} from "./reservedWord"; -import {TextLocation, TextPosition} from "./textLocation"; +import {TextLocation} from "./textLocation"; function isDigit(c: string): boolean { return /^[0-9]$/.test(c); @@ -42,22 +39,23 @@ function tryComment(tokenizer: TokenizerState, location: TextLocation): TokenCom } else if (tokenizer.isNext('/*')) { return tokenizeBlockComment(tokenizer, location); } + return undefined; } function tokenizeLineComment(tokenizer: TokenizerState, location: TextLocation) { - const start = tokenizer.getCursor(); + const start = tokenizer.getCursorOffset(); tokenizer.stepFor(2); for (; ;) { if (tokenizer.isEnd() || tokenizer.isNextWrap()) break; tokenizer.stepNext(); } - return new TokenComment(tokenizer.substrFrom(start), location.withEnd(tokenizer.copyHead())); + return new TokenComment(tokenizer.substrToCursor(start), location.withEnd(tokenizer.getCursorPosition())); } function tokenizeBlockComment(tokenizer: TokenizerState, location: TextLocation) { - const start = tokenizer.getCursor(); + const start = tokenizer.getCursorOffset(); tokenizer.stepFor(2); for (; ;) { if (tokenizer.isEnd()) break; @@ -68,21 +66,21 @@ function tokenizeBlockComment(tokenizer: TokenizerState, location: TextLocation) tokenizer.stepNext(); } - return new TokenComment(tokenizer.substrFrom(start), location.withEnd(tokenizer.copyHead())); + return new TokenComment(tokenizer.substrToCursor(start), location.withEnd(tokenizer.getCursorPosition())); } // Check if the next token is a number and tokenize it. function tryNumber(tokenizer: TokenizerState, location: TextLocation): TokenNumber | undefined { - const start = tokenizer.getCursor(); + const start = tokenizer.getCursorOffset(); - const numeric = consumeNumber(tokenizer); + const numberLiteral = consumeNumber(tokenizer); - if (start === tokenizer.getCursor()) return undefined; + if (start === tokenizer.getCursorOffset()) return undefined; return new TokenNumber( - tokenizer.substrFrom(start), - location.withEnd(tokenizer.copyHead()), - numeric); + tokenizer.substrToCursor(start), + location.withEnd(tokenizer.getCursorPosition()), + numberLiteral); } function consumeNumber(tokenizer: TokenizerState) { @@ -115,14 +113,14 @@ function consumeNumber(tokenizer: TokenizerState) { // Read until it is 0-9. while (tokenizer.isEnd() === false && isDigit(tokenizer.next())) tokenizer.stepNext(); - let numeric = NumberLiterals.Integer; + let numberLiteral = NumberLiterals.Integer; // Check if it is a floating point number let f = 0; if (tokenizer.next() === '.') { f++; while (isDigit(tokenizer.next(f))) f++; - numeric = NumberLiterals.Double; + numberLiteral = NumberLiterals.Double; } // Check if it has an exponent @@ -130,14 +128,14 @@ function consumeNumber(tokenizer: TokenizerState) { if (/^[eE]$/.test(tokenizer.next(f)) && /^[+-]$/.test(tokenizer.next(f + 1)) && isDigit(tokenizer.next(f + 2))) { f += 3; while (isDigit(tokenizer.next(f))) f++; - numeric = NumberLiterals.Double; + numberLiteral = NumberLiterals.Double; } if (f >= 1) { tokenizer.stepFor(f); // Check half precision floating point - if (numeric === NumberLiterals.Double) { + if (numberLiteral === NumberLiterals.Double) { if (/^[fF]$/.test(tokenizer.next())) { tokenizer.stepNext(); return NumberLiterals.Float; @@ -145,13 +143,13 @@ function consumeNumber(tokenizer: TokenizerState) { } } - return numeric; + return numberLiteral; } // Check if the next token is a string and tokenize it. function tryString(tokenizer: TokenizerState, location: TextLocation): TokenString | undefined { - const start = tokenizer.getCursor(); + const start = tokenizer.getCursorOffset(); if (tokenizer.next() !== '\'' && tokenizer.next() !== '"') return undefined; const startQuote: '\'' | '"' | '"""' = (() => { if (tokenizer.isNext('"""')) return '"""'; @@ -166,8 +164,8 @@ function tryString(tokenizer: TokenizerState, location: TextLocation): TokenStri if (startQuote !== '"""' && tokenizer.isNextWrap()) { diagnostic.addError({ - start: tokenizer.copyHead(), - end: tokenizer.copyHead(), + start: tokenizer.getCursorPosition(), + end: tokenizer.getCursorPosition(), }, 'Missing end quote ' + startQuote); break; } else if (isEscaping === false && tokenizer.isNext(startQuote)) { @@ -183,17 +181,17 @@ function tryString(tokenizer: TokenizerState, location: TextLocation): TokenStri } } - return new TokenString(tokenizer.substrFrom(start), location.withEnd(tokenizer.copyHead())); + return new TokenString(tokenizer.substrToCursor(start), location.withEnd(tokenizer.getCursorPosition())); } // Check if the next token is a mark and tokenize it. function tryMark(tokenizer: TokenizerState, location: TextLocation): TokenReserved | undefined { - const mark = findReservedWeakMarkProperty(tokenizer.content, tokenizer.getCursor()); + const mark = findReservedWeakMarkProperty(tokenizer._fileContent, tokenizer.getCursorOffset()); if (mark === undefined) return undefined; tokenizer.stepFor(mark.key.length); - return createTokenReserved(mark.key, mark.value, location.withEnd(tokenizer.copyHead())); + return createTokenReserved(mark.key, mark.value, location.withEnd(tokenizer.getCursorPosition())); } function createTokenReserved(text: string, property: ReservedWordProperty, location: TextLocation): TokenReserved { @@ -202,28 +200,21 @@ function createTokenReserved(text: string, property: ReservedWordProperty, locat // Check if the next token is an identifier and tokenize it. function tryIdentifier(tokenizer: TokenizerState, location: TextLocation): TokenObject | TokenIdentifier | undefined { - const start = tokenizer.getCursor(); + const start = tokenizer.getCursorOffset(); while (tokenizer.isEnd() === false && isAlphanumeric(tokenizer.next())) { tokenizer.stepFor(1); } - const identifier = tokenizer.substrFrom(start); + const identifier = tokenizer.substrToCursor(start); if (identifier === "") return undefined; - const tokenLocation = location.withEnd(tokenizer.copyHead()); + const tokenLocation = location.withEnd(tokenizer.getCursorPosition()); const reserved = findReservedKeywordProperty(identifier); if (reserved !== undefined) return createTokenReserved(identifier, reserved, tokenLocation); return new TokenIdentifier(identifier, tokenLocation); } -function createHighlight(token: HighlightToken, modifier: HighlightModifier): HighlightInfo { - return { - token: token, - modifier: modifier, - }; -} - /** * The entry point for the tokenizer. * @param content The content of the file to tokenize. @@ -244,45 +235,46 @@ export function tokenize(content: string, path: string): TokenObject[] { const location: TextLocation = new TextLocation( path, - tokenizer.copyHead(), - tokenizer.copyHead(), + tokenizer.getCursorPosition(), + tokenizer.getCursorPosition(), ); - // Tokenize Comment + // Tokenize a comment const triedComment = tryComment(tokenizer, location); if (triedComment !== undefined) { tokens.push(triedComment); continue; } - // Tokenize Number + // Tokenize a number const triedNumber = tryNumber(tokenizer, location); if (triedNumber !== undefined) { tokens.push(triedNumber); continue; } - // Tokenize String + // Tokenize a string const triedString = tryString(tokenizer, location); if (triedString !== undefined) { tokens.push(triedString); continue; } - // Tokenize Non-alphabetic Symbol + // Tokenize a non-alphabetic symbol const triedMark = tryMark(tokenizer, location); if (triedMark !== undefined) { tokens.push(triedMark); continue; } - // Tokenize Identifier or Reserved Keyword + // Tokenize an identifier or reserved keyword const triedIdentifier = tryIdentifier(tokenizer, location); if (triedIdentifier !== undefined) { tokens.push(triedIdentifier); continue; } + // If the token is unknown, buffer it. unknownBuffer.append(location, tokenizer.next()); tokenizer.stepNext(); } diff --git a/server/src/compiler_tokenizer/tokenizerState.ts b/server/src/compiler_tokenizer/tokenizerState.ts index 8a2a4aa6..4c3f2ada 100644 --- a/server/src/compiler_tokenizer/tokenizerState.ts +++ b/server/src/compiler_tokenizer/tokenizerState.ts @@ -1,107 +1,112 @@ import {diagnostic} from "../code/diagnostic"; -import {TextLocation, TextPosition, TextRange} from "./textLocation"; -import {DeepMutable, Mutable} from "../utils/utilities"; +import {MutableTextPosition, MutableTextRange, TextLocation, TextPosition, TextRange} from "./textLocation"; export class TokenizerState { // The content of the file to be tokenized - public readonly content: string; + public readonly _fileContent: string; - // Index of the current cursor position in the content string - private cursor: number; + // Current offset position of the head in the file content string + private _cursorOffset: number; - // Same as cursor, but expressed in terms of line and character position rather than index - private readonly head: Mutable; + // Same as _cursorOffset, but expressed in terms of line and character position + private readonly _cursorPosition: MutableTextPosition; - public getCursor() { - return this.cursor; + public getCursorOffset() { + return this._cursorOffset; } - constructor(content: string) { - this.content = content; - this.cursor = 0; - this.head = new TextPosition(0, 0); + public getCursorPosition(): TextPosition { + return this._cursorPosition.freeze(); } - next(offset: number = 0) { - return this.content[this.cursor + offset]; + public constructor(content: string) { + this._fileContent = content; + this._cursorOffset = 0; + this._cursorPosition = new MutableTextPosition(0, 0); } - isEnd() { - return this.cursor >= this.content.length; + public next(offset: number = 0) { + return this._fileContent[this._cursorOffset + offset]; } - isNext(expected: string) { - return this.content.substring(this.cursor, this.cursor + expected.length) === expected; + public isEnd() { + return this._cursorOffset >= this._fileContent.length; } - isNextWrap() { + public isNext(expected: string) { + return this._fileContent.substring(this._cursorOffset, this._cursorOffset + expected.length) === expected; + } + + public isNextWrap() { const next = this.next(); return next === '\r' || next === '\n'; } - isNextWhitespace() { - const next = this.content[this.cursor]; + public isNextWhitespace() { + const next = this._fileContent[this._cursorOffset]; return next === ' ' || next === '\t'; } - stepNext() { + public stepNext() { if (this.isEnd()) return; if (this.isNextWrap()) { - this.head.line++; - this.head.character = 0; - if (this.isNext('\r\n')) this.cursor += 2; - else this.cursor += 1; + this._cursorPosition.line_++; + this._cursorPosition.character_ = 0; + if (this.isNext('\r\n')) this._cursorOffset += 2; + else this._cursorOffset += 1; } else { - this.head.character++; - this.cursor += 1; + this._cursorPosition.character_++; + this._cursorOffset += 1; } } - stepFor(count: number) { - this.head.character += count; - this.cursor += count; - } - - substrFrom(start: number) { - return this.content.substring(start, this.cursor); + public stepFor(count: number) { + this._cursorPosition.character_ += count; + this._cursorOffset += count; } - copyHead(): TextPosition { - return this.head.clone(); + /** + * Returns the substring from the specified end position to the current cursor position + */ + public substrToCursor(start: number) { + return this._fileContent.substring(start, this._cursorOffset); } } /** - * Buffer for strings that are not Alphabets, numbers, or symbols + * Buffer for strings that are not alphabets, numbers, or symbols */ export class UnknownBuffer { - private buffer: string = ""; - private location: DeepMutable | null = null; - - public append(head: TextRange, next: string) { - if (this.location === null) { - this.location = head; - } else if (head.start.line !== this.location.start.line - || head.start.character - this.location.end.character > 1 + private _bufferText: string = ""; + private _bufferLocation: MutableTextRange | null = null; + + public append(cursor: TextRange, next: string) { + if (this._bufferLocation === null) { + // Initialize the location + this._bufferLocation = MutableTextRange.create(cursor); + } else if ( + cursor.start.line !== this._bufferLocation.end.line_ // if the line is different + || cursor.start.character - this._bufferLocation.end.character_ > 1 // or if there is a space gap between the last token ) { + // Flushes the buffer this.flush(); - this.location = head; + this._bufferLocation.start = MutableTextPosition.create(cursor.start); } - this.location.end = head.end; - this.buffer += next; + this._bufferLocation.end = MutableTextPosition.create(cursor.end); + this._bufferText += next; } /** * Flushes the buffer and reports an error if the buffer is not empty */ public flush() { - if (this.buffer.length === 0) return; - if (this.location === null) return; + if (this._bufferText.length === 0) return; + if (this._bufferLocation === null) return; - this.location.end.character++; - diagnostic.addError(this.location, 'Unknown token: ' + this.buffer); - this.buffer = ""; + this._bufferLocation.end.character_++; + diagnostic.addError(this._bufferLocation.freeze(), 'Unknown token: ' + this._bufferText); + this._bufferText = ""; } } \ No newline at end of file diff --git a/server/src/formatter/formatter.ts b/server/src/formatter/formatter.ts index 8cc9ca12..47524596 100644 --- a/server/src/formatter/formatter.ts +++ b/server/src/formatter/formatter.ts @@ -35,7 +35,6 @@ import {FormatterState, isEditedWrapAt} from "./formatterState"; import {TextEdit} from "vscode-languageserver-types/lib/esm/main"; import {formatMoveToNonComment, formatMoveUntil, formatMoveUntilNodeStart, formatTargetBy} from "./formatterDetail"; import {TokenObject} from "../compiler_tokenizer/tokenObject"; -import {isRangeInOneLine} from "../compiler_parser/nodesUtils"; // SCRIPT ::= {IMPORT | ENUM | TYPEDEF | CLASS | MIXIN | INTERFACE | FUNCDEF | VIRTPROP | VAR | FUNC | NAMESPACE | ';'} function formatScript(format: FormatterState, nodeScript: NodeScript) { @@ -403,7 +402,7 @@ function formatIntfMethod(format: FormatterState, intfMethod: NodeIntfMethod) { function formatStatBlock(format: FormatterState, statBlock: NodeStatBlock) { formatMoveUntilNodeStart(format, statBlock); - const isOneLine = isRangeInOneLine(statBlock.nodeRange); + const isOneLine = statBlock.nodeRange.isOneLine(); formatBraceBlock(format, () => { for (const statement of statBlock.statementList) { diff --git a/server/src/server.ts b/server/src/server.ts index 4b6b07c9..40978a29 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -15,7 +15,7 @@ import { import { TextDocument } from 'vscode-languageserver-textdocument'; -import {highlightModifiers, highlightTokens} from "./code/highlight"; +import {highlightForModifierList, highlightForTokenList} from "./code/highlight"; import {getFileLocationOfToken, serveDefinition, serveDefinitionAsToken} from "./services/definition"; import { requestCleanInspectedResults, @@ -84,8 +84,8 @@ connection.onInitialize((params: InitializeParams) => { }, semanticTokensProvider: { legend: { - tokenTypes: highlightTokens, - tokenModifiers: highlightModifiers + tokenTypes: highlightForTokenList, + tokenModifiers: highlightForModifierList }, range: false, // if true, the server supports range-based requests full: true diff --git a/server/src/services/reference.ts b/server/src/services/reference.ts index 88ffc132..3408a967 100644 --- a/server/src/services/reference.ts +++ b/server/src/services/reference.ts @@ -1,7 +1,6 @@ import {Position} from "vscode-languageserver"; import {serveDefinitionAsToken} from "./definition"; import {AnalyzedScope, SymbolScope} from "../compiler_analyzer/symbolScope"; -import {isSameToken} from "../compiler_tokenizer/tokenUtils"; import {TokenObject} from "../compiler_tokenizer/tokenObject"; export function serveReferences(targetScope: AnalyzedScope, analyzedScopes: SymbolScope[], caret: Position): TokenObject[] { @@ -21,9 +20,7 @@ function collectReferencesInScope(scope: SymbolScope, targetDefinition: TokenObj for (const reference of scope.referencedList) { // Search for reference locations in the scope (since the token instance changes every time it is compiled, strict comparison is required) if (reference.declaredSymbol.declaredPlace === targetDefinition - || isSameToken( - reference.declaredSymbol.declaredPlace, - targetDefinition) + || reference.declaredSymbol.declaredPlace.equals(targetDefinition) ) { references.push(reference.referencedToken); } diff --git a/server/src/utils/utilities.ts b/server/src/utils/utilities.ts index 44858362..38281229 100644 --- a/server/src/utils/utilities.ts +++ b/server/src/utils/utilities.ts @@ -8,13 +8,13 @@ export type Mutable = { -readonly [P in keyof T]: T[P]; }; -export type DeepMutable = { - -readonly [P in keyof T]: T[P] extends (infer U)[] - ? Array> - : T[P] extends object - ? DeepMutable - : T[P]; -}; +// export type DeepMutable = { +// -readonly [P in keyof T]: T[P] extends (infer U)[] +// ? Array> +// : T[P] extends object +// ? DeepMutable +// : T[P]; +// }; export type DeepReadonly = { readonly [P in keyof T]: T[P] extends (infer U)[]