From 72d45625b99578273c7950017bf00efc35859a2c Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 15 Apr 2024 17:27:31 +0300 Subject: [PATCH 01/47] update package version and bump dependecy versions to the latest --- package-lock.json | 10 +++++----- package.json | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/package-lock.json b/package-lock.json index 76d750e..0188781 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,21 +1,21 @@ { "name": "@syntaxs/compiler", - "version": "0.0.1-alpha", + "version": "0.0.2-alpha", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@syntaxs/compiler", - "version": "0.0.1-alpha", + "version": "0.0.2-alpha", "license": "MIT", "dependencies": { "js-levenshtein": "^1.1.6" }, "devDependencies": { "@types/js-levenshtein": "^1.1.3", - "@types/node": "^20.12.5", - "@typescript-eslint/eslint-plugin": "^7.4.0", - "@typescript-eslint/parser": "^7.4.0", + "@types/node": "^20.12.7", + "@typescript-eslint/eslint-plugin": "^7.6.0", + "@typescript-eslint/parser": "^7.6.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", "lsp-types": "^3.17.0-f3" diff --git a/package.json b/package.json index bb98339..7b034b6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@syntaxs/compiler", - "version": "0.0.1-alpha", + "version": "0.0.2-alpha", "description": "Compiler used to compile Syntax Script projects.", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -36,9 +36,9 @@ "homepage": "https://github.com/syntaxs/compiler#readme", "devDependencies": { "@types/js-levenshtein": "^1.1.3", - "@types/node": "^20.12.5", - "@typescript-eslint/eslint-plugin": "^7.4.0", - "@typescript-eslint/parser": "^7.4.0", + "@types/node": "^20.12.7", + "@typescript-eslint/eslint-plugin": "^7.6.0", + "@typescript-eslint/parser": "^7.6.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", "lsp-types": "^3.17.0-f3" @@ -46,4 +46,4 @@ "dependencies": { "js-levenshtein": "^1.1.6" } -} +} \ No newline at end of file From 3c92765d7afd116db4803663256f0a5ae6d6e778 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 15 Apr 2024 17:52:40 +0300 Subject: [PATCH 02/47] remove ExportStatement and add modifiers --- src/types.ts | 66 +++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 47 insertions(+), 19 deletions(-) diff --git a/src/types.ts b/src/types.ts index e7ac27a..fba0624 100644 --- a/src/types.ts +++ b/src/types.ts @@ -204,11 +204,6 @@ export enum NodeType { */ Imports, // imports() method - /** - * {@link ExportStatement}. - */ - Export, - /** * {@link FunctionStatement}. */ @@ -281,12 +276,13 @@ export interface ProgramStatement extends Statement { /** * Base statement interface. * @author efekos - * @version 1.0.3 + * @version 1.0.4 * @since 0.0.1-alpha */ export interface Statement { type: NodeType; range: Range; + modifiers: Token[]; } /** @@ -454,17 +450,6 @@ export interface ImportStatement extends Statement { path: string; } -/** - * Export statements are used to export a certain statement, such as an operator or a keyword. Uses type {@link NodeType.Export} - * @author efekos - * @version 1.0.0 - * @since 0.0.1-alpha - */ -export interface ExportStatement extends Statement { - type: NodeType.Export, - body: Statement; -} - /** * Function statements are used to define possible function calls. How the function is called depends on the place this statement is * used. Uses type {@link NodeType.Function}. @@ -479,6 +464,18 @@ export interface FunctionStatement extends Statement { body: Statement[]; } +/** + * Global statements are used to define values that are global. They can be global classes, interfaces, or just global methods depending on + * the language. But the only thing that matters here is that they are global, and can be used from anywhere. + * @author efekos + * @version 1.0.0 + * @since 0.0.2-alpha + */ +export interface GlobalStatement extends Statement { + body: Statement[]; + name: string; +} + /** * Represents any interface that is a node. @@ -487,7 +484,7 @@ export interface FunctionStatement extends Statement { * @since 0.0.1-alpha */ export type Node = - ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ExportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | + ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression; /** @@ -516,7 +513,6 @@ export interface SyxConfigCompile { } - /** * An error that occured while tokenizing, parsing or compiling a file. * @author efekos @@ -553,4 +549,36 @@ export class CompilerError extends Error { */ export function isCompilerError(error: Error): error is CompilerError { return error.name === 'CompilerError'; +} + +interface NodeTypes { + [NodeType.Brace]: BraceExpression; + [NodeType.Compile]: CompileStatement; + [NodeType.Function]: FunctionStatement; + [NodeType.Import]: ImportStatement; + [NodeType.Imports]: ImportsStatement; + [NodeType.Keyword]: KeywordStatement; + [NodeType.Operator]: OperatorStatement; + [NodeType.Paren]: ParenExpression; + [NodeType.PrimitiveType]: PrimitiveTypeExpression; + [NodeType.Program]: ProgramStatement; + [NodeType.Rule]: RuleStatement; + [NodeType.Square]: SquareExpression; + [NodeType.String]: StringExpression; + [NodeType.Variable]: VariableExpression; + [NodeType.WhitespaceIdentifier]: WhitespaceIdentifierExpression; + [NodeType.Global]: GlobalStatement; +} + +/** + * Determines whether the given node matches the expected node type. + * @param {Node} node Any node. + * @param {NodeType} nodeType Expected node type. + * @returns {boolean} True if the given node is of the expected node type, otherwise false. + * @author efekos + * @since 0.0.2-alpha + * @version 1.0.0 + */ +export function statementIsA(node:Statement,nodeType:T): node is NodeTypes[T] { + return node.type === nodeType; } \ No newline at end of file From 47a537baf34371903a5104f635f13ddf179038e7 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 15 Apr 2024 17:54:58 +0300 Subject: [PATCH 03/47] Use modifiers to handle export statements --- src/ast.ts | 74 ++++++++++++++++++++++++------------------------- src/compiler.ts | 57 +++++++++++++++++-------------------- 2 files changed, 63 insertions(+), 68 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 23ced50..26b7cb9 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -1,4 +1,4 @@ -import { BraceExpression, CompileStatement, CompilerError, ExportStatement, Expression, FunctionStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression } from './types.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression, statementIsA } from './types.js'; import { CodeAction, CodeActionKind, Range } from 'lsp-types'; import { dictionary } from './dictionary/dictionary.js'; import levenshtein from 'js-levenshtein'; @@ -7,9 +7,10 @@ import { subRange } from './diagnostic.js'; const caf = { mk: (keyword: string, program: ProgramStatement, range: Range, filePath: string): CodeAction[] => { const existingKeywords = program.body - .filter(r => r.type === NodeType.Keyword || (r.type === NodeType.Export && (r as ExportStatement).body.type === NodeType.Keyword)) - .map(stmt => stmt.type === NodeType.Export ? ((stmt as ExportStatement).body as KeywordStatement).word : (stmt as KeywordStatement).word) - .filter(a => levenshtein(a, keyword)); + .filter(r => statementIsA(r,NodeType.Keyword)) + .map(r => r as KeywordStatement) + .map(r => r.word) + .sort(a=>levenshtein(keyword,a)); return existingKeywords.map(word => { return { @@ -43,7 +44,7 @@ export namespace syxparser { if (ex.type !== NodeType.String) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range) }, put); + return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -64,19 +65,18 @@ export namespace syxparser { if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()) }, put); + return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()), modifiers:[] }, put); } else if (rule.type === 'keyword') { const keyEx = parseExpression(false, false, true) as Expression; if (!( keyEx.type === NodeType.String && program.body.some(s => - (s.type === NodeType.Keyword && (s as KeywordStatement).word === keyEx.value) || - (s.type === NodeType.Export && (s as ExportStatement).body.type === NodeType.Keyword && ((s as ExportStatement).body as KeywordStatement).word === keyEx.value) + statementIsA(s,NodeType.Keyword) && s.word === keyEx.value ) )) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found ${at().value}.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, tokens.shift()) }, put); + return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, tokens.shift()), modifiers: [] }, put); } } @@ -89,7 +89,7 @@ export namespace syxparser { if (ex.type !== NodeType.String) throw new CompilerError(ex.range, `Expected identifier after keyword statement, found '${ex.value}'.`, filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after statement, found '${at().value}'.`, filePath); tokens.shift(); // skip semicolon - return node({ type: NodeType.Keyword, word: ex.value, range: combineTwo(token, ex.range) }, put); + return node({ type: NodeType.Keyword, word: ex.value, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -98,8 +98,9 @@ export namespace syxparser { */ export function parseExportStatement(token: Token, put: boolean): Node { const stmt = parseStatement(false); - if (!exportable.includes(stmt.type)) throw new CompilerError(stmt.range, 'Expected exportable statement after \'export\'.', filePath); - return node({ type: NodeType.Export, body: stmt, range: combineTwo(token, stmt.range) }, put); + stmt.range = combineTwo(token,stmt.range); + stmt.modifiers.push(token); + return node(stmt, put); } /** @@ -107,7 +108,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseFunctionStatement(token: Token, put: boolean): Node { - const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: '', body: [], range: defaultRange }; + const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: '', body: [], range: defaultRange,modifiers:[] }; if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); statement.name = at().value; @@ -134,7 +135,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseImportsStatement(token: Token, put: boolean) { - const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: '', range: defaultRange }; + const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: '', range: defaultRange, modifiers:[] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Imports statement require parens.', filePath); @@ -171,7 +172,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseCompileStatement(token: Token, put: boolean): Node { - const statement: CompileStatement = { type: NodeType.Compile, formats: [], body: [], range: defaultRange }; + const statement: CompileStatement = { type: NodeType.Compile, formats: [], body: [], range: defaultRange, modifiers:[] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Compile statement require parens.', filePath); @@ -203,7 +204,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseOperatorStatement(token: Token, put: boolean) { - const statement: OperatorStatement = { type: NodeType.Operator, regex: [], body: [], range: defaultRange }; + const statement: OperatorStatement = { type: NodeType.Operator, regex: [], body: [], range: defaultRange, modifiers:[] }; while (at().type !== TokenType.OpenBrace) { @@ -241,7 +242,7 @@ export namespace syxparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers:[] }, put); } /** @@ -260,7 +261,7 @@ export namespace syxparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()),modifiers:[] }, put); } /** @@ -274,7 +275,7 @@ export namespace syxparser { if (at(2).type !== TokenType.CloseDiamond) throw new CompilerError(at(2).range, `Expected '>' after primitive type identifier, found '${at(2).value}'`, filePath); const t = tokens.shift(); tokens.shift(); - return node({ type: NodeType.PrimitiveType, value: newToken.value, range: combineTwo(t, tokens.shift()) }, put); + return node({ type: NodeType.PrimitiveType, value: newToken.value, range: combineTwo(t, tokens.shift()),modifiers:[] }, put); } /** @@ -283,7 +284,7 @@ export namespace syxparser { */ export function parseWhitespaceIdentifier(put: boolean): Node { const { range } = tokens.shift(); - return node({ type: NodeType.WhitespaceIdentifier, value: '+s', range }, put); + return node({ type: NodeType.WhitespaceIdentifier, value: '+s', range,modifiers:[] }, put); } /** @@ -293,7 +294,7 @@ export namespace syxparser { export function parseBraceExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: BraceExpression = { type: NodeType.Brace, body: [], value: '{', range: dr }; + const expr: BraceExpression = { type: NodeType.Brace, body: [], value: '{', range: dr ,modifiers:[]}; while (at().type !== TokenType.CloseBrace) { const stmt = parseStatement(false); @@ -310,7 +311,7 @@ export namespace syxparser { export function parseSquareExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: SquareExpression = { type: NodeType.Square, body: [], value: '[', range: dr }; + const expr: SquareExpression = { type: NodeType.Square, body: [], value: '[', range: dr ,modifiers:[]}; while (at().type !== TokenType.CloseSquare) { const stmt = parseStatement(false); @@ -327,7 +328,7 @@ export namespace syxparser { export function parseParenExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: ParenExpression = { type: NodeType.Paren, body: [], value: '(', range: dr }; + const expr: ParenExpression = { type: NodeType.Paren, body: [], value: '(', range: dr,modifiers:[] }; while (at().type !== TokenType.CloseParen) { const stmt = parseStatement(false); @@ -348,7 +349,7 @@ export namespace syxparser { const id = tokens.shift(); // id tokens.shift(); // sep const index = tokens.shift(); // index - const expr: VariableExpression = { index: parseInt(index.value), type: NodeType.Variable, value: id.value, range: combineTwo(id, index) }; + const expr: VariableExpression = { index: parseInt(index.value), type: NodeType.Variable, value: id.value, range: combineTwo(id, index),modifiers:[] }; return node(expr, put); } @@ -379,14 +380,14 @@ export namespace syxparser { * @param {string} _filePath Path of the file that is being parsed. * @returns Main {@link ProgramStatement} containing all other statements. * @author efekos - * @version 1.0.3 - * @since 0.0.1-alpha + * @version 1.0.4 + * @since 0.0.2-alpha */ export function parseTokens(t: Token[], _filePath: string): ProgramStatement { tokens = t; const eof = t.find(r => r.type === TokenType.EndOfFile); - program = { body: [], type: NodeType.Program, range: { end: eof.range.end, start: { line: 0, character: 0 } } }; + program = { body: [], type: NodeType.Program, range: { end: eof.range.end, start: { line: 0, character: 0 } },modifiers:[] }; filePath = _filePath; while (canGo()) { @@ -409,7 +410,6 @@ export namespace syxparser { return tokens[i]; } - const exportable = [NodeType.Operator, NodeType.Function, NodeType.Keyword]; const defaultRange: Range = { end: { line: 0, character: 0 }, start: { character: 0, line: 0 } }; @@ -472,8 +472,8 @@ export namespace syxparser { * @param {boolean} expectIdentifier Whether identifiers should be allowed. Unknown identifiers will stop the function with this value set to `false`, returning the identifier as a {@link StringExpression} otherwise. * @returns The parsed node. * @author efekos - * @version 1.0.8 - * @since 0.0.1-alpha + * @version 1.0.9 + * @since 0.0.2-alpha */ export function parseExpression(put: boolean = true, statements: boolean = true, expectIdentifier: boolean = false): Node { const tt = at().type; @@ -491,7 +491,7 @@ export namespace syxparser { return parseStatement(); } else if (tt === TokenType.Identifier && expectIdentifier) { const { value, range } = tokens.shift(); - return node({ type: NodeType.String, value, range }, put); + return node({ type: NodeType.String, value, range, modifiers:[] }, put); } else throw new CompilerError(at().range, `Unexpected expression: '${at().value}'`, filePath); @@ -521,7 +521,7 @@ export namespace sysparser { if (ex.type !== NodeType.String) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range) }, put); + return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range),modifiers:[] }, put); } //# @@ -543,7 +543,7 @@ export namespace sysparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) ,modifiers:[]}, put); } /** @@ -562,7 +562,7 @@ export namespace sysparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) ,modifiers:[]}, put); } @@ -600,14 +600,14 @@ export namespace sysparser { * @param {Token[]} t Token list to parse. * @returns Main {@link ProgramStatement} containing all other statements. * @author efekos - * @version 1.0.2 - * @since 0.0.1-alpha + * @version 1.0.3 + * @since 0.0.2-alpha */ export function parseTokens(t: Token[], _filePath: string): ProgramStatement { tokens = t; const eof = t.find(r => r.type === TokenType.EndOfFile); - program = { body: [], type: NodeType.Program, range: { start: { character: 0, line: 0 }, end: eof.range.end } }; + program = { body: [], type: NodeType.Program, range: { start: { character: 0, line: 0 }, end: eof.range.end },modifiers:[] }; filePath = _filePath; while (canGo()) { diff --git a/src/compiler.ts b/src/compiler.ts index 967de98..d76492f 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -1,4 +1,4 @@ -import { CompileStatement, CompilerError, ExportStatement, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, OperatorStatement, PrimitiveTypeExpression, StringExpression, VariableExpression } from './types.js'; +import { CompileStatement, CompilerError, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, OperatorStatement, PrimitiveTypeExpression, StringExpression, TokenType, VariableExpression, statementIsA } from './types.js'; import { dirname, join } from 'path'; import { existsSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; @@ -65,23 +65,23 @@ export class SyntaxScriptCompiler { * Compiles one .syx file from the path given. * @param {string} file Path to a file to compile. * @author efekos - * @version 1.0.4 - * @since 0.0.1-alpha + * @version 1.0.5 + * @since 0.0.2-alpha */ public compileSyx(file: string) { const ast = syxparser.parseTokens(tokenizeSyx(readFileSync(file).toString()), file); const out: AnyExportable[] = []; ast.body.forEach(statement => { - if (statement.type !== NodeType.Export) return; - const exported = (statement as ExportStatement).body; + if (!statement.modifiers.some(token=>token.type===TokenType.ExportKeyword)) return; + - if (exported.type === NodeType.Operator) { - const operatorStmt = exported as OperatorStatement; + if (statementIsA(statement,NodeType.Operator)) { + //# Generate regexMatcher let regexMatcher: RegExp = new RegExp(''); - operatorStmt.regex.forEach(regexStatement => { + statement.regex.forEach(regexStatement => { if (regexStatement.type === NodeType.PrimitiveType) { regexMatcher = new RegExp(regexMatcher.source + regexes[(regexStatement as PrimitiveTypeExpression).value].source); @@ -98,7 +98,7 @@ export class SyntaxScriptCompiler { const operatorStmtExport: Operator = { imports: {}, outputGenerators: {}, regexMatcher, type: ExportType.Operator }; //# Handle statements - operatorStmt.body.forEach(stmt => { + statement.body.forEach(stmt => { if (stmt.type === NodeType.Compile) { const compileStmt = stmt as CompileStatement; @@ -136,24 +136,21 @@ export class SyntaxScriptCompiler { }); out.push(operatorStmtExport); - } else if (exported.type === NodeType.Function) { - const stmt = exported as FunctionStatement; - const statementExport: Function = { type: ExportType.Function, args: stmt.arguments.map(s => regexes[s]), name: stmt.name, formatNames: {}, imports: {} }; - - stmt.body.forEach(statement => { - - if (statement.type === NodeType.Compile) { - const compileStatement = statement as CompileStatement; - if (compileStatement.body[0].type !== NodeType.String) throw new CompilerError(compileStatement.range, 'Expected a string after compile statement parens'); - compileStatement.formats.forEach(each => { - if (statementExport.formatNames[each] !== undefined) throw new CompilerError(compileStatement.range, `Encountered multiple compile statements for target language '${each}'`); - statementExport.formatNames[each] = compileStatement.body[0].value; + } else if (statementIsA(statement,NodeType.Function)) { + const statementExport: Function = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s]), name: statement.name, formatNames: {}, imports: {} }; + + statement.body.forEach(stmt => { + + if (statementIsA(stmt,NodeType.Compile)) { + if (stmt.body[0].type !== NodeType.String) throw new CompilerError(stmt.range, 'Expected a string after compile statement parens'); + stmt.formats.forEach(each => { + if (statementExport.formatNames[each] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple compile statements for target language '${each}'`); + statementExport.formatNames[each] = stmt.body[0].value; }); - } else if (statement.type === NodeType.Imports) { - const importsStatement = statement as ImportsStatement; - importsStatement.formats.forEach(each => { - if (statementExport.imports[each] !== undefined) throw new CompilerError(importsStatement.range, `Encountered multiple import statements for target language '${each}'`); - statementExport.imports[each] = importsStatement.module; + } else if (statementIsA(stmt,NodeType.Imports)) { + stmt.formats.forEach(each => { + if (statementExport.imports[each] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple import statements for target language '${each}'`); + statementExport.imports[each] = stmt.module; }); } @@ -161,11 +158,9 @@ export class SyntaxScriptCompiler { out.push(statementExport); - } else if (exported.type === NodeType.Keyword) { - const stmt = exported as KeywordStatement; - - out.push({ type: ExportType.Keyword, word: stmt.word }); - } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`); + } else if (statementIsA(statement,NodeType.Keyword)) { + out.push({ type: ExportType.Keyword, word: statement.word }); + } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`,file); }); From 0ff00511afed38c35bbde530a019440fd0911b85 Mon Sep 17 00:00:00 2001 From: efekos Date: Tue, 16 Apr 2024 07:44:27 +0300 Subject: [PATCH 04/47] clean up some code and use statementIsA in most conditions --- src/ast.ts | 39 ++++++++++++++++++--------------------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 26b7cb9..9ac5bbf 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -41,10 +41,10 @@ export namespace syxparser { */ export function parseImportStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false); - if (ex.type !== NodeType.String) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); + if (!statementIsA(ex,NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range), modifiers: [] }, put); + return node({ type: NodeType.Import, path: ex.value, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -53,27 +53,22 @@ export namespace syxparser { */ export function parseRuleStatement(token: Token, put: boolean): Node { const ruleExpr = parseExpression(false, false) as Expression; - if (ruleExpr.type !== NodeType.String) { throw new CompilerError(ruleExpr.range, `Expected rule name as string after 'rule', found ${ruleExpr.value}.`, filePath); } + if (!statementIsA(ruleExpr,NodeType.String)) throw new CompilerError(ruleExpr.range, `Expected rule name as string after 'rule', found ${ruleExpr.value}.`, filePath); if (at().value !== ':') throw new CompilerError(at().range, `Expected \':\' after rule name, found ${at().value}.`, filePath); tokens.shift(); - if (!dictionary.Rules.find(r => r.name === ruleExpr.value)) throw new CompilerError(ruleExpr.range, `Unknown rule '${ruleExpr.value}'.`, filePath); + if (!dictionary.Rules.some(r => r.name === ruleExpr.value)) throw new CompilerError(ruleExpr.range, `Unknown rule '${ruleExpr.value}'.`, filePath); const rule = dictionary.Rules.find(r => r.name === ruleExpr.value); if (rule.type === 'boolean') { const boolEx = parseExpression(false, false, true) as Expression; - if (!(boolEx.type === NodeType.String && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) { throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); } - + if (!(statementIsA(boolEx,NodeType.String) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()), modifiers:[] }, put); } else if (rule.type === 'keyword') { - const keyEx = parseExpression(false, false, true) as Expression; - if (!( - keyEx.type === NodeType.String && - program.body.some(s => - statementIsA(s,NodeType.Keyword) && s.word === keyEx.value - ) - )) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); + const keyEx = parseExpression(false, false, true); + if(!statementIsA(keyEx,NodeType.String)) throw new CompilerError(keyEx.range,'Excepted keyword.',filePath); + if (!program.body.some(s =>statementIsA(s,NodeType.Keyword) && s.word === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found ${at().value}.`, filePath); return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, tokens.shift()), modifiers: [] }, put); @@ -85,10 +80,12 @@ export namespace syxparser { * @returns Parsed node. */ export function parseKeywordStatement(put: boolean, token: Token): Node { - const ex = parseExpression(false, false, true) as Expression; - if (ex.type !== NodeType.String) throw new CompilerError(ex.range, `Expected identifier after keyword statement, found '${ex.value}'.`, filePath); + const ex = parseExpression(false, false, true); + if (!statementIsA(ex,NodeType.String)) throw new CompilerError(ex.range, 'Expected identifier after keyword statement.', filePath); + if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after statement, found '${at().value}'.`, filePath); tokens.shift(); // skip semicolon + return node({ type: NodeType.Keyword, word: ex.value, range: combineTwo(token, ex.range), modifiers: [] }, put); } @@ -116,12 +113,12 @@ export namespace syxparser { while (at().type !== TokenType.OpenBrace) { const expr = parseExpression(false, false) as Expression; - if (expr.type !== NodeType.PrimitiveType) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); - statement.arguments.push((expr as PrimitiveTypeExpression).value); + if (!statementIsA(expr,NodeType.PrimitiveType)) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); + statement.arguments.push(expr.value); } const braceExpr = parseExpression(false); - if (braceExpr.type !== NodeType.Brace) throw new CompilerError(braceExpr.range, 'Function statement requires braces.', filePath); + if (!statementIsA(braceExpr,NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Function statement requires braces.', filePath); braceExpr.body.forEach(s => { if (!([NodeType.Compile, NodeType.Imports].includes(s.type))) throw new CompilerError(s.range, 'Statement not allowed inside a function statement.', filePath); }); statement.body = braceExpr.body; @@ -156,7 +153,7 @@ export namespace syxparser { const moduleExpr = parseExpression(false, false) as Expression; - if (moduleExpr.type !== NodeType.String) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); + if (!statementIsA(moduleExpr,NodeType.String)) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); statement.module = moduleExpr.value; statement.range = combineTwo(token, moduleExpr.range); @@ -213,7 +210,7 @@ export namespace syxparser { } const braceExpr = parseExpression(false); - if (braceExpr.type !== NodeType.Brace) throw new CompilerError(braceExpr.range, 'Expected braces after operator regex.', filePath); + if (!statementIsA(braceExpr,NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after operator regex.', filePath); braceExpr.body.forEach(s => { if (!([NodeType.Compile, NodeType.Imports].includes(s.type))) throw new CompilerError(s.range, 'Statement not allowed inside of operator statement.'); }, filePath); statement.body = braceExpr.body; @@ -518,7 +515,7 @@ export namespace sysparser { */ export function parseImportStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false); - if (ex.type !== NodeType.String) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); + if (!statementIsA(ex,NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range),modifiers:[] }, put); From fcb9b8b5b3904ad61841ec47a201f1445eb85c22 Mon Sep 17 00:00:00 2001 From: efekos Date: Tue, 16 Apr 2024 08:03:36 +0300 Subject: [PATCH 05/47] use switch statements and parse global statement --- src/ast.ts | 141 ++++++++++++++++++++++++++++++--------------------- src/types.ts | 2 +- 2 files changed, 84 insertions(+), 59 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 9ac5bbf..227301c 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -1,4 +1,4 @@ -import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression, statementIsA } from './types.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression, statementIsA } from './types.js'; import { CodeAction, CodeActionKind, Range } from 'lsp-types'; import { dictionary } from './dictionary/dictionary.js'; import levenshtein from 'js-levenshtein'; @@ -7,10 +7,10 @@ import { subRange } from './diagnostic.js'; const caf = { mk: (keyword: string, program: ProgramStatement, range: Range, filePath: string): CodeAction[] => { const existingKeywords = program.body - .filter(r => statementIsA(r,NodeType.Keyword)) + .filter(r => statementIsA(r, NodeType.Keyword)) .map(r => r as KeywordStatement) .map(r => r.word) - .sort(a=>levenshtein(keyword,a)); + .sort(a => levenshtein(keyword, a)); return existingKeywords.map(word => { return { @@ -41,7 +41,7 @@ export namespace syxparser { */ export function parseImportStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false); - if (!statementIsA(ex,NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); + if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); return node({ type: NodeType.Import, path: ex.value, range: combineTwo(token, ex.range), modifiers: [] }, put); @@ -53,7 +53,7 @@ export namespace syxparser { */ export function parseRuleStatement(token: Token, put: boolean): Node { const ruleExpr = parseExpression(false, false) as Expression; - if (!statementIsA(ruleExpr,NodeType.String)) throw new CompilerError(ruleExpr.range, `Expected rule name as string after 'rule', found ${ruleExpr.value}.`, filePath); + if (!statementIsA(ruleExpr, NodeType.String)) throw new CompilerError(ruleExpr.range, `Expected rule name as string after 'rule', found ${ruleExpr.value}.`, filePath); if (at().value !== ':') throw new CompilerError(at().range, `Expected \':\' after rule name, found ${at().value}.`, filePath); tokens.shift(); if (!dictionary.Rules.some(r => r.name === ruleExpr.value)) throw new CompilerError(ruleExpr.range, `Unknown rule '${ruleExpr.value}'.`, filePath); @@ -61,14 +61,14 @@ export namespace syxparser { if (rule.type === 'boolean') { const boolEx = parseExpression(false, false, true) as Expression; - if (!(statementIsA(boolEx,NodeType.String) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); + if (!(statementIsA(boolEx, NodeType.String) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()), modifiers:[] }, put); + return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()), modifiers: [] }, put); } else if (rule.type === 'keyword') { const keyEx = parseExpression(false, false, true); - if(!statementIsA(keyEx,NodeType.String)) throw new CompilerError(keyEx.range,'Excepted keyword.',filePath); - if (!program.body.some(s =>statementIsA(s,NodeType.Keyword) && s.word === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); + if (!statementIsA(keyEx, NodeType.String)) throw new CompilerError(keyEx.range, 'Excepted keyword.', filePath); + if (!program.body.some(s => statementIsA(s, NodeType.Keyword) && s.word === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found ${at().value}.`, filePath); return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, tokens.shift()), modifiers: [] }, put); @@ -81,7 +81,7 @@ export namespace syxparser { */ export function parseKeywordStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false, true); - if (!statementIsA(ex,NodeType.String)) throw new CompilerError(ex.range, 'Expected identifier after keyword statement.', filePath); + if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected identifier after keyword statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after statement, found '${at().value}'.`, filePath); tokens.shift(); // skip semicolon @@ -95,7 +95,7 @@ export namespace syxparser { */ export function parseExportStatement(token: Token, put: boolean): Node { const stmt = parseStatement(false); - stmt.range = combineTwo(token,stmt.range); + stmt.range = combineTwo(token, stmt.range); stmt.modifiers.push(token); return node(stmt, put); } @@ -105,7 +105,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseFunctionStatement(token: Token, put: boolean): Node { - const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: '', body: [], range: defaultRange,modifiers:[] }; + const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: '', body: [], range: defaultRange, modifiers: [] }; if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); statement.name = at().value; @@ -113,12 +113,12 @@ export namespace syxparser { while (at().type !== TokenType.OpenBrace) { const expr = parseExpression(false, false) as Expression; - if (!statementIsA(expr,NodeType.PrimitiveType)) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); + if (!statementIsA(expr, NodeType.PrimitiveType)) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); statement.arguments.push(expr.value); } const braceExpr = parseExpression(false); - if (!statementIsA(braceExpr,NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Function statement requires braces.', filePath); + if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Function statement requires braces.', filePath); braceExpr.body.forEach(s => { if (!([NodeType.Compile, NodeType.Imports].includes(s.type))) throw new CompilerError(s.range, 'Statement not allowed inside a function statement.', filePath); }); statement.body = braceExpr.body; @@ -132,7 +132,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseImportsStatement(token: Token, put: boolean) { - const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: '', range: defaultRange, modifiers:[] }; + const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: '', range: defaultRange, modifiers: [] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Imports statement require parens.', filePath); @@ -153,7 +153,7 @@ export namespace syxparser { const moduleExpr = parseExpression(false, false) as Expression; - if (!statementIsA(moduleExpr,NodeType.String)) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); + if (!statementIsA(moduleExpr, NodeType.String)) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); statement.module = moduleExpr.value; statement.range = combineTwo(token, moduleExpr.range); @@ -169,7 +169,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseCompileStatement(token: Token, put: boolean): Node { - const statement: CompileStatement = { type: NodeType.Compile, formats: [], body: [], range: defaultRange, modifiers:[] }; + const statement: CompileStatement = { type: NodeType.Compile, formats: [], body: [], range: defaultRange, modifiers: [] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Compile statement require parens.', filePath); @@ -201,7 +201,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseOperatorStatement(token: Token, put: boolean) { - const statement: OperatorStatement = { type: NodeType.Operator, regex: [], body: [], range: defaultRange, modifiers:[] }; + const statement: OperatorStatement = { type: NodeType.Operator, regex: [], body: [], range: defaultRange, modifiers: [] }; while (at().type !== TokenType.OpenBrace) { @@ -210,7 +210,7 @@ export namespace syxparser { } const braceExpr = parseExpression(false); - if (!statementIsA(braceExpr,NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after operator regex.', filePath); + if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after operator regex.', filePath); braceExpr.body.forEach(s => { if (!([NodeType.Compile, NodeType.Imports].includes(s.type))) throw new CompilerError(s.range, 'Statement not allowed inside of operator statement.'); }, filePath); statement.body = braceExpr.body; @@ -219,6 +219,24 @@ export namespace syxparser { return node(statement, put); } + /** + * Parses an operator statement. Parameters are related to the environment of {@link syxparser.parseStatement} or {@link sysparser.parseStatement}. + * @returns Parsed node. + */ + export function parseGlobalStatement(token: Token, put: boolean) { + const stmt: GlobalStatement = { type: NodeType.Global, range: token.range, body: [], modifiers: [], name: '' }; + + if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); + stmt.name = tokens.shift().value; + + const braceExpr = parseExpression(false, false, false); + if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after global name.', filePath); + + stmt.body = braceExpr.body; + stmt.range = combineTwo(token, braceExpr.range); + return node(stmt, put); + } + //# //# EXPRESSION PARSERS @@ -239,7 +257,7 @@ export namespace syxparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers:[] }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } /** @@ -258,7 +276,7 @@ export namespace syxparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()),modifiers:[] }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } /** @@ -272,7 +290,7 @@ export namespace syxparser { if (at(2).type !== TokenType.CloseDiamond) throw new CompilerError(at(2).range, `Expected '>' after primitive type identifier, found '${at(2).value}'`, filePath); const t = tokens.shift(); tokens.shift(); - return node({ type: NodeType.PrimitiveType, value: newToken.value, range: combineTwo(t, tokens.shift()),modifiers:[] }, put); + return node({ type: NodeType.PrimitiveType, value: newToken.value, range: combineTwo(t, tokens.shift()), modifiers: [] }, put); } /** @@ -281,7 +299,7 @@ export namespace syxparser { */ export function parseWhitespaceIdentifier(put: boolean): Node { const { range } = tokens.shift(); - return node({ type: NodeType.WhitespaceIdentifier, value: '+s', range,modifiers:[] }, put); + return node({ type: NodeType.WhitespaceIdentifier, value: '+s', range, modifiers: [] }, put); } /** @@ -291,7 +309,7 @@ export namespace syxparser { export function parseBraceExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: BraceExpression = { type: NodeType.Brace, body: [], value: '{', range: dr ,modifiers:[]}; + const expr: BraceExpression = { type: NodeType.Brace, body: [], value: '{', range: dr, modifiers: [] }; while (at().type !== TokenType.CloseBrace) { const stmt = parseStatement(false); @@ -308,7 +326,7 @@ export namespace syxparser { export function parseSquareExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: SquareExpression = { type: NodeType.Square, body: [], value: '[', range: dr ,modifiers:[]}; + const expr: SquareExpression = { type: NodeType.Square, body: [], value: '[', range: dr, modifiers: [] }; while (at().type !== TokenType.CloseSquare) { const stmt = parseStatement(false); @@ -325,7 +343,7 @@ export namespace syxparser { export function parseParenExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: ParenExpression = { type: NodeType.Paren, body: [], value: '(', range: dr,modifiers:[] }; + const expr: ParenExpression = { type: NodeType.Paren, body: [], value: '(', range: dr, modifiers: [] }; while (at().type !== TokenType.CloseParen) { const stmt = parseStatement(false); @@ -346,7 +364,7 @@ export namespace syxparser { const id = tokens.shift(); // id tokens.shift(); // sep const index = tokens.shift(); // index - const expr: VariableExpression = { index: parseInt(index.value), type: NodeType.Variable, value: id.value, range: combineTwo(id, index),modifiers:[] }; + const expr: VariableExpression = { index: parseInt(index.value), type: NodeType.Variable, value: id.value, range: combineTwo(id, index), modifiers: [] }; return node(expr, put); } @@ -384,7 +402,7 @@ export namespace syxparser { tokens = t; const eof = t.find(r => r.type === TokenType.EndOfFile); - program = { body: [], type: NodeType.Program, range: { end: eof.range.end, start: { line: 0, character: 0 } },modifiers:[] }; + program = { body: [], type: NodeType.Program, range: { end: eof.range.end, start: { line: 0, character: 0 } }, modifiers: [] }; filePath = _filePath; while (canGo()) { @@ -427,22 +445,25 @@ export namespace syxparser { * @param {boolean} put Whether the result should be added to the program statement. * @returns A node that is either a statement or an expression if a statement wasn't present. * @author efekos - * @version 1.0.9 - * @since 0.0.1-alpha + * @version 1.1.0 + * @since 0.0.2-alpha */ export function parseStatement(put: boolean = true): Node { if (keywords.includes(at().type)) { const token = at(); tokens.shift(); - if (token.type === TokenType.ImportKeyword) return parseImportStatement(put, token); - else if (token.type === TokenType.OperatorKeyword) return parseOperatorStatement(token, put); - else if (token.type === TokenType.CompileKeyword) return parseCompileStatement(token, put); - else if (token.type === TokenType.ExportKeyword) return parseExportStatement(token, put); - else if (token.type === TokenType.ImportsKeyword) return parseImportsStatement(token, put); - else if (token.type === TokenType.FunctionKeyword) return parseFunctionStatement(token, put); - else if (token.type === TokenType.KeywordKeyword) return parseKeywordStatement(put, token); - else if (token.type === TokenType.RuleKeyword) return parseRuleStatement(token, put); + switch (token.type) { + case TokenType.ImportKeyword: return parseImportStatement(put, token); + case TokenType.OperatorKeyword: return parseOperatorStatement(token, put); + case TokenType.CompileKeyword: return parseCompileStatement(token, put); + case TokenType.ExportKeyword: return parseExportStatement(token, put); + case TokenType.ImportsKeyword: return parseImportsStatement(token, put); + case TokenType.FunctionKeyword: return parseFunctionStatement(token, put); + case TokenType.KeywordKeyword: return parseKeywordStatement(put, token); + case TokenType.RuleKeyword: return parseRuleStatement(token, put); + case TokenType.GlobalKeyword: return parseGlobalStatement(token, put); + } } else parseExpression(); @@ -475,22 +496,26 @@ export namespace syxparser { export function parseExpression(put: boolean = true, statements: boolean = true, expectIdentifier: boolean = false): Node { const tt = at().type; - if (tt === TokenType.SingleQuote) return parseSingleQuotedString(put); - else if (tt === TokenType.DoubleQuote) return parseDoubleQuotedString(put); - else if (tt === TokenType.OpenDiamond) return parsePrimitiveType(primitiveTypes, put); - else if (tt === TokenType.WhitespaceIdentifier) return parseWhitespaceIdentifier(put); - else if (tt === TokenType.OpenBrace) return parseBraceExpression(put, defaultRange); - else if (tt === TokenType.OpenSquare) return parseSquareExpression(put, defaultRange); - else if (tt === TokenType.OpenParen) return parseParenExpression(put, defaultRange); - else if (tt === TokenType.Identifier && at(1).type === TokenType.VarSeperator) return parsePrimitiveVariable(put); - else if (keywords.includes(tt)) { - if (!statements) throw new CompilerError(at().range, 'Statement not allowed here.', filePath); - return parseStatement(); - } else if (tt === TokenType.Identifier && expectIdentifier) { - const { value, range } = tokens.shift(); - return node({ type: NodeType.String, value, range, modifiers:[] }, put); + switch (tt) { + case TokenType.SingleQuote: return parseSingleQuotedString(put); + case TokenType.DoubleQuote: return parseDoubleQuotedString(put); + case TokenType.OpenDiamond: return parsePrimitiveType(primitiveTypes, put); + case TokenType.WhitespaceIdentifier: return parseWhitespaceIdentifier(put); + case TokenType.OpenBrace: return parseBraceExpression(put, defaultRange); + case TokenType.OpenSquare: return parseSquareExpression(put, defaultRange); + case TokenType.OpenParen: return parseParenExpression(put, defaultRange); + case TokenType.Identifier: + if (at(1).type === TokenType.VarSeperator) return parsePrimitiveVariable(put); + else if (keywords.includes(tt)) { + if (!statements) throw new CompilerError(at().range, 'Statement not allowed here.', filePath); + return parseStatement(); + } else if (expectIdentifier) { + const { value, range } = tokens.shift(); + return node({ type: NodeType.String, value, range, modifiers: [] }, put); + } } - else throw new CompilerError(at().range, `Unexpected expression: '${at().value}'`, filePath); + + throw new CompilerError(at().range, `Unexpected expression: '${at().value}'`, filePath); } @@ -515,10 +540,10 @@ export namespace sysparser { */ export function parseImportStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false); - if (!statementIsA(ex,NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); + if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range),modifiers:[] }, put); + return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range), modifiers: [] }, put); } //# @@ -540,7 +565,7 @@ export namespace sysparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) ,modifiers:[]}, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } /** @@ -559,7 +584,7 @@ export namespace sysparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) ,modifiers:[]}, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } @@ -604,7 +629,7 @@ export namespace sysparser { tokens = t; const eof = t.find(r => r.type === TokenType.EndOfFile); - program = { body: [], type: NodeType.Program, range: { start: { character: 0, line: 0 }, end: eof.range.end },modifiers:[] }; + program = { body: [], type: NodeType.Program, range: { start: { character: 0, line: 0 }, end: eof.range.end }, modifiers: [] }; filePath = _filePath; while (canGo()) { diff --git a/src/types.ts b/src/types.ts index fba0624..be935bc 100644 --- a/src/types.ts +++ b/src/types.ts @@ -484,7 +484,7 @@ export interface GlobalStatement extends Statement { * @since 0.0.1-alpha */ export type Node = - ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | + ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | GlobalStatement | StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression; /** From b23a68dedb1bdef717b29ee076bc0f3ddc06ad18 Mon Sep 17 00:00:00 2001 From: efekos Date: Tue, 16 Apr 2024 08:04:40 +0300 Subject: [PATCH 06/47] remove ExportStatement from exports --- src/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/index.ts b/src/index.ts index 6e9d7ca..924b8f4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,6 +1,6 @@ import { AnyExportable, Export, ExportType, Function, Keyword, OneParameterMethod, Operator, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; -import { BraceExpression, CompileStatement, ExportStatement, Expression, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, VariableExpression, WhitespaceIdentifierExpression } from './types.js'; +import { BraceExpression, CompileStatement, Expression, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, VariableExpression, WhitespaceIdentifierExpression } from './types.js'; import { CompilerError, isCompilerError } from './types.js'; import { Node, NodeType, Token, TokenType } from './types.js'; import { SyxConfig, SyxConfigCompile } from './types.js'; @@ -22,5 +22,5 @@ export { AnyExportable, Export, Function, Keyword, OneParameterMethod, Operator, export { CompilerError }; export { Token, TokenType, Node, NodeType }; export { Expression, BraceExpression, ParenExpression, SquareExpression, StringExpression, VariableExpression, PrimitiveTypeExpression, WhitespaceIdentifierExpression }; -export { Statement, RuleStatement, ExportStatement, ImportStatement, ImportsStatement, CompileStatement, OperatorStatement, FunctionStatement, KeywordStatement, ProgramStatement }; +export { Statement, RuleStatement, ImportStatement, ImportsStatement, CompileStatement, OperatorStatement, FunctionStatement, KeywordStatement, ProgramStatement }; export { SyxConfig, SyxConfigCompile }; \ No newline at end of file From 07730767a52b617fd40af70558837a6665fd4192 Mon Sep 17 00:00:00 2001 From: efekos Date: Tue, 16 Apr 2024 08:06:32 +0300 Subject: [PATCH 07/47] remove unnecessary import statements and format code --- src/ast.ts | 2 +- src/compiler.ts | 20 ++++++++++---------- src/diagnostic.ts | 5 +---- src/index.ts | 1 - src/lexer.ts | 2 +- src/types.ts | 6 +++--- 6 files changed, 16 insertions(+), 20 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 227301c..01ba8db 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -1,4 +1,4 @@ -import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression, statementIsA } from './types.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression, statementIsA } from './types.js'; import { CodeAction, CodeActionKind, Range } from 'lsp-types'; import { dictionary } from './dictionary/dictionary.js'; import levenshtein from 'js-levenshtein'; diff --git a/src/compiler.ts b/src/compiler.ts index d76492f..5bbe26d 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -1,4 +1,4 @@ -import { CompileStatement, CompilerError, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, OperatorStatement, PrimitiveTypeExpression, StringExpression, TokenType, VariableExpression, statementIsA } from './types.js'; +import { CompileStatement, CompilerError, ImportStatement, ImportsStatement, NodeType, PrimitiveTypeExpression, StringExpression, TokenType, VariableExpression, statementIsA } from './types.js'; import { dirname, join } from 'path'; import { existsSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; @@ -73,11 +73,11 @@ export class SyntaxScriptCompiler { const out: AnyExportable[] = []; ast.body.forEach(statement => { - if (!statement.modifiers.some(token=>token.type===TokenType.ExportKeyword)) return; - + if (!statement.modifiers.some(token => token.type === TokenType.ExportKeyword)) return; + + + if (statementIsA(statement, NodeType.Operator)) { - if (statementIsA(statement,NodeType.Operator)) { - //# Generate regexMatcher let regexMatcher: RegExp = new RegExp(''); @@ -136,18 +136,18 @@ export class SyntaxScriptCompiler { }); out.push(operatorStmtExport); - } else if (statementIsA(statement,NodeType.Function)) { + } else if (statementIsA(statement, NodeType.Function)) { const statementExport: Function = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s]), name: statement.name, formatNames: {}, imports: {} }; statement.body.forEach(stmt => { - if (statementIsA(stmt,NodeType.Compile)) { + if (statementIsA(stmt, NodeType.Compile)) { if (stmt.body[0].type !== NodeType.String) throw new CompilerError(stmt.range, 'Expected a string after compile statement parens'); stmt.formats.forEach(each => { if (statementExport.formatNames[each] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple compile statements for target language '${each}'`); statementExport.formatNames[each] = stmt.body[0].value; }); - } else if (statementIsA(stmt,NodeType.Imports)) { + } else if (statementIsA(stmt, NodeType.Imports)) { stmt.formats.forEach(each => { if (statementExport.imports[each] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple import statements for target language '${each}'`); statementExport.imports[each] = stmt.module; @@ -158,9 +158,9 @@ export class SyntaxScriptCompiler { out.push(statementExport); - } else if (statementIsA(statement,NodeType.Keyword)) { + } else if (statementIsA(statement, NodeType.Keyword)) { out.push({ type: ExportType.Keyword, word: statement.word }); - } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`,file); + } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`, file); }); diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 6d08d00..a9c8141 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,11 +1,9 @@ -import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; +import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; import { isCompilerError } from './types.js'; import { readFileSync } from 'fs'; - - /** * Creates a diagnostic report from the file path given. * @param {string} filePath Path of the file to create a report. @@ -39,7 +37,6 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent } - /** * Modifies the given range to be zero-based. * @param {Range} r Any range. diff --git a/src/index.ts b/src/index.ts index 924b8f4..88ebc2d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -15,7 +15,6 @@ export { escapeRegex, createSyntaxScriptDiagnosticReport, tokenizeSys, tokenizeS export { BaseRule, BooleanRule, Rule, RuleType, StringRule }; export { Functionary, FunctionaryValueType }; - export { SyntaxScriptCompiler, ExportType }; export { AnyExportable, Export, Function, Keyword, OneParameterMethod, Operator, ReturnerMethod }; diff --git a/src/lexer.ts b/src/lexer.ts index 90b3b19..29f0cb1 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -1,5 +1,5 @@ -import { CompilerError, Token, TokenType } from './types.js'; import { Position, Range } from 'lsp-types'; +import { Token, TokenType } from './types.js'; const keywords: Record = { operator: TokenType.OperatorKeyword, diff --git a/src/types.ts b/src/types.ts index be935bc..8edd771 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { CodeAction, CodeActionKind, Range } from 'lsp-types'; +import { CodeAction, Range } from 'lsp-types'; /** * Every token type a syntax script declaration file can contain. If something can't be recognized as a token, @@ -484,7 +484,7 @@ export interface GlobalStatement extends Statement { * @since 0.0.1-alpha */ export type Node = - ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | GlobalStatement | + ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | GlobalStatement | StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression; /** @@ -579,6 +579,6 @@ interface NodeTypes { * @since 0.0.2-alpha * @version 1.0.0 */ -export function statementIsA(node:Statement,nodeType:T): node is NodeTypes[T] { +export function statementIsA(node: Statement, nodeType: T): node is NodeTypes[T] { return node.type === nodeType; } \ No newline at end of file From a3ea4e70ea901b5126a2f5dceb6190683d7492ca Mon Sep 17 00:00:00 2001 From: efekos Date: Tue, 16 Apr 2024 18:58:31 +0300 Subject: [PATCH 08/47] rename export interfacted to exported interfaces --- src/compiler.ts | 41 ++++++++++++++++++++++++----------------- src/index.ts | 4 ++-- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/src/compiler.ts b/src/compiler.ts index 5bbe26d..4d4dda6 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -95,7 +95,7 @@ export class SyntaxScriptCompiler { }); - const operatorStmtExport: Operator = { imports: {}, outputGenerators: {}, regexMatcher, type: ExportType.Operator }; + const operatorStmtExport: ExportedOperator = { imports: {}, outputGenerators: {}, regexMatcher, type: ExportType.Operator }; //# Handle statements statement.body.forEach(stmt => { @@ -137,7 +137,7 @@ export class SyntaxScriptCompiler { out.push(operatorStmtExport); } else if (statementIsA(statement, NodeType.Function)) { - const statementExport: Function = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s]), name: statement.name, formatNames: {}, imports: {} }; + const statementExport: ExportedFunction = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s]), name: statement.name, formatNames: {}, imports: {} }; statement.body.forEach(stmt => { @@ -160,6 +160,8 @@ export class SyntaxScriptCompiler { out.push(statementExport); } else if (statementIsA(statement, NodeType.Keyword)) { out.push({ type: ExportType.Keyword, word: statement.word }); + } else if (statementIsA(statement,NodeType.Global)) { + //TODO } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`, file); }); @@ -204,7 +206,7 @@ export class SyntaxScriptCompiler { if (!existsSync(pathToImport)) throw new CompilerError(importStmt.range, `File \'${pathToImport}\' imported from \'${file}\' does not exist.`); this.exportData[pathToImport].forEach(exported => { if (exported.type === ExportType.Operator) - if (imported.filter(r => r.type === ExportType.Operator).some(i => exported.regexMatcher === (i as Operator).regexMatcher)) throw new CompilerError(importStmt.range, `There are more than one operators with the same syntax imported to \'${file}\'.`); + if (imported.filter(r => r.type === ExportType.Operator).some(i => exported.regexMatcher === (i as ExportedOperator).regexMatcher)) throw new CompilerError(importStmt.range, `There are more than one operators with the same syntax imported to \'${file}\'.`); imported.push(exported); }); } @@ -245,46 +247,51 @@ export class SyntaxScriptCompiler { /** * Type of something that can be exported. - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha * @author efekos */ export enum ExportType { /** - * {@link Operator}. + * {@link ExportedOperator}. */ Operator, /** - * {@link Function}. + * {@link ExportedFunction}. */ Function, /** - * {@link Keyword}. + * {@link ExportedKeyword}. */ - Keyword + Keyword, + + /** + * {@link ExportedGlobal}. + */ + Global } /** * Base exportable interface. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Export { +export interface Exported { type: ExportType; } /** * Represents an exported operator. Uses type {@link ExportType.Operator}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Operator extends Export { +export interface ExportedOperator extends Exported { type: ExportType.Operator, regexMatcher: RegExp; outputGenerators: Record>; @@ -294,10 +301,10 @@ export interface Operator extends Export { /** * Represents an exported function. Uses type {@link ExportType.Function}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Function extends Export { +export interface ExportedFunction extends Exported { type: ExportType.Function; name: string; args: RegExp[]; @@ -308,10 +315,10 @@ export interface Function extends Export { /** * Represents an exported keyword. Uses type {@link ExportType.Keyword}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Keyword extends Export { +export interface ExportedKeyword extends Exported { type: ExportType.Keyword; word: string; } @@ -335,7 +342,7 @@ export type ReturnerMethod = () => R; /** * Any interface that represents something exportable. */ -export type AnyExportable = Operator | Function | Keyword; +export type AnyExportable = ExportedOperator | ExportedFunction | ExportedKeyword; export const regexes: Record = { /** diff --git a/src/index.ts b/src/index.ts index 88ebc2d..5457c70 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,4 @@ -import { AnyExportable, Export, ExportType, Function, Keyword, OneParameterMethod, Operator, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; +import { AnyExportable, Exported, ExportType, ExportedFunction, ExportedKeyword, OneParameterMethod, ExportedOperator, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; import { BraceExpression, CompileStatement, Expression, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, VariableExpression, WhitespaceIdentifierExpression } from './types.js'; import { CompilerError, isCompilerError } from './types.js'; @@ -16,7 +16,7 @@ export { BaseRule, BooleanRule, Rule, RuleType, StringRule }; export { Functionary, FunctionaryValueType }; export { SyntaxScriptCompiler, ExportType }; -export { AnyExportable, Export, Function, Keyword, OneParameterMethod, Operator, ReturnerMethod }; +export { AnyExportable, Exported, ExportedFunction, ExportedKeyword, OneParameterMethod, ExportedOperator, ReturnerMethod }; export { CompilerError }; export { Token, TokenType, Node, NodeType }; From a29c7a148eea43ef27de7cb49c4c663496667dc7 Mon Sep 17 00:00:00 2001 From: efekos Date: Tue, 16 Apr 2024 20:29:53 +0300 Subject: [PATCH 09/47] sort imports --- src/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 5457c70..19677a6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,4 @@ -import { AnyExportable, Exported, ExportType, ExportedFunction, ExportedKeyword, OneParameterMethod, ExportedOperator, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; +import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; import { BraceExpression, CompileStatement, Expression, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, VariableExpression, WhitespaceIdentifierExpression } from './types.js'; import { CompilerError, isCompilerError } from './types.js'; From 35159a40a359119461015fc65e2bc4757d319289 Mon Sep 17 00:00:00 2001 From: efekos Date: Fri, 19 Apr 2024 21:23:47 +0300 Subject: [PATCH 10/47] setup es-test module --- .npmignore | 1 + package-lock.json | 384 +++++++++++++++++++++++++++++++++++ package.json | 10 +- src/dictionary/dictionary.ts | 2 +- src/dictionary/index.ts | 6 +- 5 files changed, 397 insertions(+), 6 deletions(-) diff --git a/.npmignore b/.npmignore index b4e4e00..2cb1551 100644 --- a/.npmignore +++ b/.npmignore @@ -1,6 +1,7 @@ # Folders /src /package +/dist/test # Dev-only or unrelated files .gitignore diff --git a/package-lock.json b/package-lock.json index 0188781..4f8b653 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,10 +12,13 @@ "js-levenshtein": "^1.1.6" }, "devDependencies": { + "@efekos/es-test": "^1.0.2", + "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", "@typescript-eslint/eslint-plugin": "^7.6.0", "@typescript-eslint/parser": "^7.6.0", + "chai": "^5.1.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", "lsp-types": "^3.17.0-f3" @@ -30,6 +33,31 @@ "node": ">=0.10.0" } }, + "node_modules/@efekos/es-test": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.2.tgz", + "integrity": "sha512-0CmdQSyifFyuG8AWpO6nDnkvYXH2Q9kfCbXUl5RUZTcLWS7x0rj7cinG8cFEmneGxl3MGdvA4gzCF6goPkmVGA==", + "dev": true, + "dependencies": { + "chalk": "^5.3.0", + "log-update": "^6.0.0" + }, + "bin": { + "estest": "bin/index.js" + } + }, + "node_modules/@efekos/es-test/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@es-joy/jsdoccomment": { "version": "0.42.0", "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.42.0.tgz", @@ -212,6 +240,12 @@ "node": ">= 8" } }, + "node_modules/@types/chai": { + "version": "4.3.14", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.14.tgz", + "integrity": "sha512-Wj71sXE4Q4AkGdG9Tvq1u/fquNz9EdG4LIJMwVVII7ashjD/8cf8fyIfJAjRr6YcsXnSE8cOGQPq1gqeR8z+3w==", + "dev": true + }, "node_modules/@types/js-levenshtein": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@types/js-levenshtein/-/js-levenshtein-1.1.3.tgz", @@ -472,6 +506,18 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ansi-escapes": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-6.2.1.tgz", + "integrity": "sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -520,6 +566,15 @@ "node": ">=8" } }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -568,6 +623,22 @@ "node": ">=6" } }, + "node_modules/chai": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.0.tgz", + "integrity": "sha512-kDZ7MZyM6Q1DhR9jy7dalKohXQ2yrlXkk59CR52aRKxJrobmlBNqnFQxX9xOX8w+4mz8SYlKJa/7D7ddltFXCw==", + "dev": true, + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.0.0", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -584,6 +655,30 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/check-error": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.0.0.tgz", + "integrity": "sha512-tjLAOBHKVxtPoHe/SA7kNOMvhCRdCJ3vETdeY0RuAc9popf+hyaSV6ZEg9hr4cpWF7jmo/JSWEnLDrnijS9Tog==", + "dev": true, + "engines": { + "node": ">= 16" + } + }, + "node_modules/cli-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", + "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "dev": true, + "dependencies": { + "restore-cursor": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -648,6 +743,15 @@ } } }, + "node_modules/deep-eql": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.1.tgz", + "integrity": "sha512-nwQCf6ne2gez3o1MxWifqkciwt0zhl0LO1/UwVu4uMBuPmflWM4oQ70XMqHqnBJA+nhzncaqL9HVL6KkHJ28lw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -678,6 +782,12 @@ "node": ">=6.0.0" } }, + "node_modules/emoji-regex": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz", + "integrity": "sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==", + "dev": true + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -998,6 +1108,27 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, + "node_modules/get-east-asian-width": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.2.0.tgz", + "integrity": "sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -1176,6 +1307,21 @@ "node": ">=0.10.0" } }, + "node_modules/is-fullwidth-code-point": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", + "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", + "dev": true, + "dependencies": { + "get-east-asian-width": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -1302,6 +1448,61 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "node_modules/log-update": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.0.0.tgz", + "integrity": "sha512-niTvB4gqvtof056rRIrTZvjNYE4rCUzO6X/X+kYjd7WFxXeJ0NwEFnRxX6ehkvv3jTwrXnNdtAak5XYZuIyPFw==", + "dev": true, + "dependencies": { + "ansi-escapes": "^6.2.0", + "cli-cursor": "^4.0.0", + "slice-ansi": "^7.0.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/log-update/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/loupe": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.0.tgz", + "integrity": "sha512-qKl+FrLXUhFuHUoDJG7f8P8gEMHq9NFS0c6ghXG1J0rldmZFQZoNVv/vyirE9qwCIhWZDsvEFd1sbFu3GvRQFg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -1342,6 +1543,15 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/minimatch": { "version": "9.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", @@ -1378,6 +1588,21 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/optionator": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", @@ -1473,6 +1698,15 @@ "node": ">=8" } }, + "node_modules/pathval": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "dev": true, + "engines": { + "node": ">= 14.16" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -1532,6 +1766,22 @@ "node": ">=4" } }, + "node_modules/restore-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", + "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -1616,6 +1866,12 @@ "node": ">=8" } }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -1625,6 +1881,34 @@ "node": ">=8" } }, + "node_modules/slice-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", + "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/spdx-exceptions": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", @@ -1647,6 +1931,50 @@ "integrity": "sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg==", "dev": true }, + "node_modules/string-width": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.1.0.tgz", + "integrity": "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw==", + "dev": true, + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -1781,6 +2109,62 @@ "node": ">= 8" } }, + "node_modules/wrap-ansi": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/package.json b/package.json index 7b034b6..93f8c90 100644 --- a/package.json +++ b/package.json @@ -4,12 +4,15 @@ "description": "Compiler used to compile Syntax Script projects.", "main": "dist/index.js", "types": "dist/index.d.ts", + "type": "module", "scripts": { "lint": "eslint ./src/**/*.ts --fix", "prebuild": "npm run lint && node distDeletor.mjs", "build": "tsc", "postbuild": "cls && echo Builded", - "prepack": "npm run build" + "prepack": "npm run build", + "test": "estest", + "pretest": "tsc" }, "repository": { "type": "git", @@ -35,10 +38,13 @@ }, "homepage": "https://github.com/syntaxs/compiler#readme", "devDependencies": { + "@efekos/es-test": "^1.0.2", + "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", "@typescript-eslint/eslint-plugin": "^7.6.0", "@typescript-eslint/parser": "^7.6.0", + "chai": "^5.1.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", "lsp-types": "^3.17.0-f3" @@ -46,4 +52,4 @@ "dependencies": { "js-levenshtein": "^1.1.6" } -} \ No newline at end of file +} diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index ff2ab68..53ba752 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -1,4 +1,4 @@ -import { Functionary, Rule, RuleType } from './index'; +import { Functionary, Rule, RuleType } from './index.js'; const rules: Rule[] = [ { diff --git a/src/dictionary/index.ts b/src/dictionary/index.ts index 998507e..02349ec 100644 --- a/src/dictionary/index.ts +++ b/src/dictionary/index.ts @@ -1,6 +1,6 @@ -import { BaseRule, BooleanRule, Rule, RuleType, StringRule } from './rules'; -import { Functionary, FunctionaryValueType } from './functionaries'; -import { dictionary } from './dictionary'; +import { BaseRule, BooleanRule, Rule, RuleType, StringRule } from './rules.js'; +import { Functionary, FunctionaryValueType } from './functionaries.js'; +import { dictionary } from './dictionary.js'; export { dictionary }; export { BaseRule, RuleType, Functionary, FunctionaryValueType, BooleanRule, Rule, StringRule }; \ No newline at end of file From afa5ca13fe82a8425da02112924d21d42492cd0e Mon Sep 17 00:00:00 2001 From: efekos Date: Fri, 19 Apr 2024 21:25:50 +0300 Subject: [PATCH 11/47] create a test for compiler statements (3 failed) --- src/test/compiler.test.ts | 139 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 139 insertions(+) create mode 100644 src/test/compiler.test.ts diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts new file mode 100644 index 0000000..618fc33 --- /dev/null +++ b/src/test/compiler.test.ts @@ -0,0 +1,139 @@ +import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType } from '../types.js'; +import { describe, it } from '@efekos/es-test/bin/testRunner.js'; +import { tokenizeSys, tokenizeSyx } from '../lexer.js'; +import { expect } from 'chai'; +import { syxparser } from '../ast.js'; + +describe('Compiler module', () => { + + it('should provide correct ranges', () => { + + const tokens = tokenizeSyx('keyword hello;'); + const r = tokens[0].range; + + expect(r).to.have.property('start').to.be.a('object'); + expect(r).to.have.property('end').to.be.a('object'); + expect(r.start).to.have.property('line').to.be.a('number'); + expect(r.start).to.have.property('character').to.be.a('number'); + expect(r.end).to.have.property('character').to.be.a('number'); + expect(r.end).to.have.property('line').to.be.a('number'); + expect(r).to.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 0 } }); + + }); + + it('should provide correct tokenization', () => { + const t = tokenizeSyx('class } > ) ] , compile " export function global random import imports 1 keyword { < ( [ operator * rule ; \' | +s'); + const tList = [ + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, + TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword, + TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, + TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile + ]; + + expect(t).to.be.a('array'); + expect(t).to.have.lengthOf(tList.length); + expect(t.map(tt => tt.type)).to.be.deep.equal(tList); + + const sys = tokenizeSys('import \' " ; :::'); + const sysList = [TokenType.ImportKeyword, TokenType.SingleQuote, TokenType.DoubleQuote, TokenType.Semicolon, TokenType.EndOfFile]; + + expect(sys).to.be.a('array'); + expect(sys).to.have.lengthOf(sysList.length); + expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList); + }); + + describe('should provide correct parsing', () => { + + function astTypeExpectations(ast:ProgramStatement){ + expect(ast).to.be.a('object'); + expect(ast).to.have.property('type').to.be.a('number').to.be.equal(NodeType.Program); + expect(ast).to.have.property('modifiers').to.be.a('array').to.have.lengthOf(0); + expect(ast).to.have.property('body').to.be.a('array').to.have.lengthOf(1); + expect(ast).to.have.property('range').to.be.a('object'); + expect(ast.range).to.have.property('start').to.be.a('object'); + expect(ast.range).to.have.property('end').to.be.a('object'); + expect(ast.range.start).to.have.property('line').to.be.a('number'); + expect(ast.range.start).to.have.property('character').to.be.a('number'); + expect(ast.range.end).to.have.property('line').to.be.a('number'); + expect(ast.range.end).to.have.property('character').to.be.a('number'); + } + + it('for keyword statements', () => { + + const tokens = tokenizeSyx('keyword ruleish;'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 14 }, start: { line: 1, character: 0 } }, word: 'ruleish' }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for rule statements', () => { + + const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt:RuleStatement = {range:{start:{line:1,character:0},end:{line:1,character:41}},modifiers:[],rule:'function-value-return-enabled',value:'true',type:NodeType.Rule}; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for compile statements',()=>{ + + const tokens = tokenizeSyx('compile(ts,js) \'test\';'); + const ast = syxparser.parseTokens(tokens,'TEST_FILE'); + const stmt:CompileStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},formats:['ts','js'],type:NodeType.Compile,modifiers:[],body:[{type:NodeType.String,modifiers:[],range:{start:{line:1,character:15},end:{line:1,character:20}},value:'test'}]}; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for imports statements',()=>{ + + const tokens = tokenizeSyx('imports(ts,js) \'math\';'); + const ast = syxparser.parseTokens(tokens,'TEST_FILE'); + const stmt:ImportsStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},formats:['ts','js'],type:NodeType.Imports,modifiers:[],module:'math'}; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for global statements',()=>{ + + const tokens = tokenizeSyx('global randomizer {}'); + const ast = syxparser.parseTokens(tokens,'TEST_FILE'); + const stmt:GlobalStatement = {range:{start:{line:1,character:0},end:{line:1,character:19}},name:'randomizer',type:NodeType.Global,modifiers:[],body:[]}; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for function statements',()=>{ + + const tokens = tokenizeSyx('function randomizer {}'); + const ast = syxparser.parseTokens(tokens,'TEST_FILE'); + const stmt:FunctionStatement = {range:{start:{line:1,character:0},end:{line:1,character:27}},name:'randomizer',type:NodeType.Function,modifiers:[],body:[],arguments:['int']}; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for import statements',()=>{ + + const tokens = tokenizeSyx('import \'./math\';'); + const ast = syxparser.parseTokens(tokens,'TEST_FILE'); + const stmt:ImportStatement = {range:{start:{line:1,character:0},end:{line:1,character:14}},type:NodeType.Import,modifiers:[],path:'./math'}; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + }); +}); \ No newline at end of file From 09e046d38061cbad9749cda7e97f08f51eefdc39 Mon Sep 17 00:00:00 2001 From: efekos Date: Fri, 19 Apr 2024 21:46:19 +0300 Subject: [PATCH 12/47] fix range ends in parser tests (4 failed) --- src/test/compiler.test.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 618fc33..6cb67a2 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -62,7 +62,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('keyword ruleish;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 14 }, start: { line: 1, character: 0 } }, word: 'ruleish' }; + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 15 }, start: { line: 1, character: 0 } }, word: 'ruleish' }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -73,7 +73,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt:RuleStatement = {range:{start:{line:1,character:0},end:{line:1,character:41}},modifiers:[],rule:'function-value-return-enabled',value:'true',type:NodeType.Rule}; + const stmt:RuleStatement = {range:{start:{line:1,character:0},end:{line:1,character:42}},modifiers:[],rule:'function-value-return-enabled',value:'true',type:NodeType.Rule}; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -84,7 +84,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('compile(ts,js) \'test\';'); const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:CompileStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},formats:['ts','js'],type:NodeType.Compile,modifiers:[],body:[{type:NodeType.String,modifiers:[],range:{start:{line:1,character:15},end:{line:1,character:20}},value:'test'}]}; + const stmt:CompileStatement = {range:{start:{line:1,character:0},end:{line:1,character:21}},formats:['ts','js'],type:NodeType.Compile,modifiers:[],body:[{type:NodeType.String,modifiers:[],range:{start:{line:1,character:15},end:{line:1,character:20}},value:'test'}]}; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -95,7 +95,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('imports(ts,js) \'math\';'); const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:ImportsStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},formats:['ts','js'],type:NodeType.Imports,modifiers:[],module:'math'}; + const stmt:ImportsStatement = {range:{start:{line:1,character:0},end:{line:1,character:21}},formats:['ts','js'],type:NodeType.Imports,modifiers:[],module:'math'}; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -106,7 +106,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('global randomizer {}'); const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:GlobalStatement = {range:{start:{line:1,character:0},end:{line:1,character:19}},name:'randomizer',type:NodeType.Global,modifiers:[],body:[]}; + const stmt:GlobalStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},name:'randomizer',type:NodeType.Global,modifiers:[],body:[]}; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -117,7 +117,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('function randomizer {}'); const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:FunctionStatement = {range:{start:{line:1,character:0},end:{line:1,character:27}},name:'randomizer',type:NodeType.Function,modifiers:[],body:[],arguments:['int']}; + const stmt:FunctionStatement = {range:{start:{line:1,character:0},end:{line:1,character:28}},name:'randomizer',type:NodeType.Function,modifiers:[],body:[],arguments:['int']}; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -128,7 +128,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('import \'./math\';'); const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:ImportStatement = {range:{start:{line:1,character:0},end:{line:1,character:14}},type:NodeType.Import,modifiers:[],path:'./math'}; + const stmt:ImportStatement = {range:{start:{line:1,character:0},end:{line:1,character:15}},type:NodeType.Import,modifiers:[],path:'./math'}; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); From 209451763e6eef25382f9cae5ff7cd32a0ee9a7b Mon Sep 17 00:00:00 2001 From: efekos Date: Fri, 19 Apr 2024 22:34:21 +0300 Subject: [PATCH 13/47] pass 4 tests related to wrong range issues --- src/ast.ts | 9 +++-- src/lexer.ts | 4 +-- src/test/compiler.test.ts | 69 ++++++++++++++++++++------------------- 3 files changed, 43 insertions(+), 39 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 01ba8db..2b002d0 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -64,14 +64,16 @@ export namespace syxparser { if (!(statementIsA(boolEx, NodeType.String) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()), modifiers: [] }, put); + tokens.shift(); + return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, boolEx.range), modifiers: [] }, put); } else if (rule.type === 'keyword') { const keyEx = parseExpression(false, false, true); if (!statementIsA(keyEx, NodeType.String)) throw new CompilerError(keyEx.range, 'Excepted keyword.', filePath); if (!program.body.some(s => statementIsA(s, NodeType.Keyword) && s.word === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found ${at().value}.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, tokens.shift()), modifiers: [] }, put); + tokens.shift(); + return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, keyEx.range), modifiers: [] }, put); } } @@ -190,8 +192,9 @@ export namespace syxparser { while (at().type !== TokenType.Semicolon) { const expr = parseExpression(false, false); statement.body.push(expr as Expression); + statement.range = combineTwo(token, expr.range); } - statement.range = combineTwo(token, tokens.shift()); // Skip semicolon and make it the end of the range. + tokens.shift(); return node(statement, put); } diff --git a/src/lexer.ts b/src/lexer.ts index 29f0cb1..fc934c5 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -53,7 +53,7 @@ function isInt(src: string) { } function opr(line: number, character: number): Range { - return { end: { line, character }, start: { line, character } }; + return { end: { line, character:character+1 }, start: { line, character:character } }; } function pos(line: number, character: number): Position { @@ -76,7 +76,7 @@ function tpr(start: Position, end: Position): Range { export function tokenizeSyx(source: string): Token[] { const tokens: Token[] = []; const src = source.split(''); - let curPos = 0; + let curPos = 1; let curLine = 1; while (src.length > 0) { diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 6cb67a2..a47838e 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -1,23 +1,29 @@ import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType } from '../types.js'; import { describe, it } from '@efekos/es-test/bin/testRunner.js'; import { tokenizeSys, tokenizeSyx } from '../lexer.js'; +import { Range } from 'lsp-types'; import { expect } from 'chai'; import { syxparser } from '../ast.js'; describe('Compiler module', () => { + function rangeExpectations(r: Range) { + expect(r).to.have.property('start').to.be.a('object'); + expect(r).to.have.property('end').to.be.a('object'); + expect(r.start).to.have.property('line').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(r.start).to.have.property('character').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(r.end).to.have.property('character').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(r.end).to.have.property('line').to.be.a('number').to.be.greaterThanOrEqual(0); + } + it('should provide correct ranges', () => { const tokens = tokenizeSyx('keyword hello;'); - const r = tokens[0].range; - expect(r).to.have.property('start').to.be.a('object'); - expect(r).to.have.property('end').to.be.a('object'); - expect(r.start).to.have.property('line').to.be.a('number'); - expect(r.start).to.have.property('character').to.be.a('number'); - expect(r.end).to.have.property('character').to.be.a('number'); - expect(r.end).to.have.property('line').to.be.a('number'); - expect(r).to.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 0 } }); + rangeExpectations(tokens[0].range); + expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } }); + expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); }); @@ -44,25 +50,20 @@ describe('Compiler module', () => { describe('should provide correct parsing', () => { - function astTypeExpectations(ast:ProgramStatement){ + function astTypeExpectations(ast: ProgramStatement) { expect(ast).to.be.a('object'); expect(ast).to.have.property('type').to.be.a('number').to.be.equal(NodeType.Program); expect(ast).to.have.property('modifiers').to.be.a('array').to.have.lengthOf(0); expect(ast).to.have.property('body').to.be.a('array').to.have.lengthOf(1); expect(ast).to.have.property('range').to.be.a('object'); - expect(ast.range).to.have.property('start').to.be.a('object'); - expect(ast.range).to.have.property('end').to.be.a('object'); - expect(ast.range.start).to.have.property('line').to.be.a('number'); - expect(ast.range.start).to.have.property('character').to.be.a('number'); - expect(ast.range.end).to.have.property('line').to.be.a('number'); - expect(ast.range.end).to.have.property('character').to.be.a('number'); + rangeExpectations(ast.range); } it('for keyword statements', () => { const tokens = tokenizeSyx('keyword ruleish;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 15 }, start: { line: 1, character: 0 } }, word: 'ruleish' }; + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 16 }, start: { line: 1, character: 1 } }, word: 'ruleish' }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -73,62 +74,62 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt:RuleStatement = {range:{start:{line:1,character:0},end:{line:1,character:42}},modifiers:[],rule:'function-value-return-enabled',value:'true',type:NodeType.Rule}; + const stmt: RuleStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 43 } }, modifiers: [], rule: 'function-value-return-enabled', value: 'true', type: NodeType.Rule }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); }); - it('for compile statements',()=>{ + it('for compile statements', () => { const tokens = tokenizeSyx('compile(ts,js) \'test\';'); - const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:CompileStatement = {range:{start:{line:1,character:0},end:{line:1,character:21}},formats:['ts','js'],type:NodeType.Compile,modifiers:[],body:[{type:NodeType.String,modifiers:[],range:{start:{line:1,character:15},end:{line:1,character:20}},value:'test'}]}; + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: CompileStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: ['ts', 'js'], type: NodeType.Compile, modifiers: [], body: [{ type: NodeType.String, modifiers: [], range: { start: { line: 1, character: 16 }, end: { line: 1, character: 22 } }, value: 'test' }] }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); }); - it('for imports statements',()=>{ + it('for imports statements', () => { const tokens = tokenizeSyx('imports(ts,js) \'math\';'); - const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:ImportsStatement = {range:{start:{line:1,character:0},end:{line:1,character:21}},formats:['ts','js'],type:NodeType.Imports,modifiers:[],module:'math'}; + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: ImportsStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: ['ts', 'js'], type: NodeType.Imports, modifiers: [], module: 'math' }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); }); - it('for global statements',()=>{ + it('for global statements', () => { const tokens = tokenizeSyx('global randomizer {}'); - const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:GlobalStatement = {range:{start:{line:1,character:0},end:{line:1,character:20}},name:'randomizer',type:NodeType.Global,modifiers:[],body:[]}; - + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: GlobalStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 21 } }, name: 'randomizer', type: NodeType.Global, modifiers: [], body: [] }; + astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); }); - it('for function statements',()=>{ + it('for function statements', () => { const tokens = tokenizeSyx('function randomizer {}'); - const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:FunctionStatement = {range:{start:{line:1,character:0},end:{line:1,character:28}},name:'randomizer',type:NodeType.Function,modifiers:[],body:[],arguments:['int']}; - + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: FunctionStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 29 } }, name: 'randomizer', type: NodeType.Function, modifiers: [], body: [], arguments: ['int'] }; + astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); }); - it('for import statements',()=>{ + it('for import statements', () => { const tokens = tokenizeSyx('import \'./math\';'); - const ast = syxparser.parseTokens(tokens,'TEST_FILE'); - const stmt:ImportStatement = {range:{start:{line:1,character:0},end:{line:1,character:15}},type:NodeType.Import,modifiers:[],path:'./math'}; + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: ImportStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 16 } }, type: NodeType.Import, modifiers: [], path: './math' }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); From 27ab0c8cf133e6456db61354b42521fb2db8f95f Mon Sep 17 00:00:00 2001 From: efekos Date: Sat, 20 Apr 2024 02:57:51 +0300 Subject: [PATCH 14/47] update exports --- src/index.ts | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/index.ts b/src/index.ts index 19677a6..9f75f3b 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,16 +1,12 @@ -import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; +import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex, regexes } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; -import { BraceExpression, CompileStatement, Expression, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, VariableExpression, WhitespaceIdentifierExpression } from './types.js'; -import { CompilerError, isCompilerError } from './types.js'; -import { Node, NodeType, Token, TokenType } from './types.js'; -import { SyxConfig, SyxConfigCompile } from './types.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression,Statement,StringExpression,SyxConfig,SyxConfigCompile,Token,TokenType,VariableExpression,WhitespaceIdentifierExpression,isCompilerError,statementIsA } from './types.js'; +import { createSyntaxScriptDiagnosticReport,subRange } from './diagnostic.js'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; -import { createSyntaxScriptDiagnosticReport } from './diagnostic.js'; - export { sysparser, syxparser, dictionary }; -export { escapeRegex, createSyntaxScriptDiagnosticReport, tokenizeSys, tokenizeSyx, isCompilerError }; +export { escapeRegex, createSyntaxScriptDiagnosticReport, tokenizeSys, tokenizeSyx, isCompilerError, statementIsA, subRange }; export { BaseRule, BooleanRule, Rule, RuleType, StringRule }; export { Functionary, FunctionaryValueType }; @@ -21,5 +17,5 @@ export { AnyExportable, Exported, ExportedFunction, ExportedKeyword, OneParamete export { CompilerError }; export { Token, TokenType, Node, NodeType }; export { Expression, BraceExpression, ParenExpression, SquareExpression, StringExpression, VariableExpression, PrimitiveTypeExpression, WhitespaceIdentifierExpression }; -export { Statement, RuleStatement, ImportStatement, ImportsStatement, CompileStatement, OperatorStatement, FunctionStatement, KeywordStatement, ProgramStatement }; +export { Statement, RuleStatement, ImportStatement, ImportsStatement, CompileStatement, OperatorStatement, FunctionStatement, KeywordStatement, ProgramStatement, GlobalStatement }; export { SyxConfig, SyxConfigCompile }; \ No newline at end of file From 41b800d556d932b8c2aa0c56a889880d2f68c667 Mon Sep 17 00:00:00 2001 From: efekos Date: Sat, 20 Apr 2024 03:00:51 +0300 Subject: [PATCH 15/47] add conflicts to rules and two new rules --- src/dictionary/dictionary.ts | 21 ++++++++++++++++++--- src/dictionary/rules.ts | 1 + 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index 53ba752..6bd889c 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -4,17 +4,32 @@ const rules: Rule[] = [ { name: 'imports-keyword', type: 'keyword', - default: 'import' + default: 'import', + conflicts:[] }, { name: 'function-value-return-enabled', type: 'boolean', - default: false + default: false, + conflicts:[] }, { name: 'function-value-return-keyword', type: 'keyword', - default: 'return' + default: 'return', + conflicts:[] + }, + { + name: 'enforce-single-string-quotes', + type: 'boolean', + default: false, + conflicts:['enforge-double-string-quotes'] + }, + { + name: 'enforce-double-string-quotes', + type: 'boolean', + default: false, + conflicts:['enforce-single-string-quotes'] } ]; diff --git a/src/dictionary/rules.ts b/src/dictionary/rules.ts index 7b98648..d5fa1bd 100644 --- a/src/dictionary/rules.ts +++ b/src/dictionary/rules.ts @@ -12,6 +12,7 @@ export type RuleType = 'keyword' | 'boolean'; export interface BaseRule { name: string; type: RuleType; + conflicts: string[]; } /** From 51263dd99bdead3a211f90edbdaa0f6ffeddc50d Mon Sep 17 00:00:00 2001 From: efekos Date: Sat, 20 Apr 2024 03:01:16 +0300 Subject: [PATCH 16/47] add 2 new test --- src/index.ts | 2 +- src/test/compiler.test.ts | 39 ++++++++++++++++++++++++++++++++++++--- 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/src/index.ts b/src/index.ts index 9f75f3b..cdffeba 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,4 @@ -import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex, regexes } from './compiler.js'; +import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression,Statement,StringExpression,SyxConfig,SyxConfigCompile,Token,TokenType,VariableExpression,WhitespaceIdentifierExpression,isCompilerError,statementIsA } from './types.js'; import { createSyntaxScriptDiagnosticReport,subRange } from './diagnostic.js'; diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index a47838e..22349b5 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -1,7 +1,8 @@ -import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType } from '../types.js'; -import { describe, it } from '@efekos/es-test/bin/testRunner.js'; +import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType, isCompilerError } from '../types.js'; +import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types'; +import { describe, it, onError } from '@efekos/es-test/bin/testRunner.js'; import { tokenizeSys, tokenizeSyx } from '../lexer.js'; -import { Range } from 'lsp-types'; +import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js'; import { expect } from 'chai'; import { syxparser } from '../ast.js'; @@ -136,5 +137,37 @@ describe('Compiler module', () => { }); + it('for export statements', () => { + + const tokens = tokenizeSyx('export keyword ruleish;'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{range:{end:{line:1,character:7},start:{line:1,character:1}},type:TokenType.ExportKeyword,value:'export'}], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: 'ruleish' }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + }); + + it('should provide correct diagnostic reports',()=>{ + + const report = createSyntaxScriptDiagnosticReport('TEST_FILE.syx','keyword ruleis'); + + expect(report).to.be.a('object'); + expect(report).to.have.property('items').to.be.a('array').to.have.lengthOf(1); + expect(report).to.have.property('kind').to.be.a('string').to.be.equal(DocumentDiagnosticReportKind.Full); + + const diag = report.items[0]; + const item: Diagnostic = {message:'Expected \';\' after statement, found \'EOF\'.',range:{start:{line:0,character:0},end:{line:0,character:0}},severity:DiagnosticSeverity.Error,source:'syntax-script',data:[]}; + + expect(diag).to.have.property('message').to.be.a('string'); + expect(diag).to.have.property('range'); + expect(diag).to.have.property('severity').to.be.a('number').to.be.equal(DiagnosticSeverity.Error); + rangeExpectations(diag.range); + expect(diag).to.have.property('source').to.be.a('string').to.be.equal('syntax-script'); + expect(diag).to.be.a('object').to.be.deep.equal(item); + + }); + }); \ No newline at end of file From c6017f4270e4160b8644581fd046807a339df3e7 Mon Sep 17 00:00:00 2001 From: efekos Date: Sat, 20 Apr 2024 22:35:47 +0300 Subject: [PATCH 17/47] add a script to auto-update vscode extension module --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 93f8c90..b3caa58 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,8 @@ "postbuild": "cls && echo Builded", "prepack": "npm run build", "test": "estest", - "pretest": "tsc" + "pretest": "tsc", + "postpack": "cd ..&&cd ..&&cd .\\LanguageServer\\syntax-script\\server\\&&npm update @syntaxs/compiler" }, "repository": { "type": "git", From de4d2441be9901438c206dcdfe513bedbc9645ba Mon Sep 17 00:00:00 2001 From: efekos Date: Sat, 20 Apr 2024 22:36:30 +0300 Subject: [PATCH 18/47] add ExportableNodeTypes and StatementTypesWithBody --- src/dictionary/dictionary.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index 6bd889c..bb43924 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -1,4 +1,5 @@ import { Functionary, Rule, RuleType } from './index.js'; +import { NodeType } from '../types.js'; const rules: Rule[] = [ { @@ -53,5 +54,7 @@ export namespace dictionary { export const PrimitiveTypes: string[] = ['int', 'decimal', 'boolean', 'string']; export const Keywords: string[] = ['export', 'rule', 'keyword', 'import', 'operator', 'function', 'global']; export const Functionaries: Functionary[] = func; + export const ExportableNodeTypes: NodeType[] = [NodeType.Function,NodeType.Operator,NodeType.Keyword,NodeType.Rule]; + export const StatementTypesWithBody: NodeType[] = [NodeType.Operator,NodeType.Function,NodeType.Global]; } \ No newline at end of file From d05152f839d1798b9f4ee3e74c6771a9ef947435 Mon Sep 17 00:00:00 2001 From: efekos Date: Sat, 20 Apr 2024 22:36:47 +0300 Subject: [PATCH 19/47] Create some post-diagnostics --- src/diagnostic.ts | 232 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 228 insertions(+), 4 deletions(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index a9c8141..f75671e 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,8 +1,15 @@ -import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; +import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; +import { ImportStatement, NodeType, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; +import { existsSync, readFileSync, statSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; -import { isCompilerError } from './types.js'; -import { readFileSync } from 'fs'; +import { dictionary } from './dictionary/index.js'; +import { fileURLToPath } from 'url'; +import { join } from 'path'; + + +// Use with addRange to include semicolons +const semiRange: Range = { end: { line: 0, character: 1 }, start: { line: 0, character: 0 } }; /** * Creates a diagnostic report from the file path given. @@ -19,8 +26,12 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent const content = fileContent ?? readFileSync(filePath).toString(); const tokens = (isSyx ? tokenizeSyx : tokenizeSys)(content); - (isSyx ? syxparser : sysparser).parseTokens(tokens, filePath); + const ast = (isSyx ? syxparser : sysparser).parseTokens(tokens, filePath); + items.push(...exportableCheck(ast, filePath)); + items.push(...ruleConflictCheck(ast, filePath)); + items.push(...sameRuleCheck(ast, filePath)); + items.push(...importedExistentCheck(ast, filePath)); } catch (error) { if (isCompilerError(error)) { items.push({ @@ -37,6 +48,215 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent } +// Checks rule conflicts and adds warnings when there is two defined rules that conflict each other +function ruleConflictCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + ast.body.forEach(stmt => { + if (statementIsA(stmt, NodeType.Rule)) { + const dictRule = dictionary.Rules.find(r => r.name === stmt.rule); + + ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { + if (dictRule.conflicts.includes(otherRules.rule)) items.push({ + message: `Rule '${otherRules.rule}' conflicts with '${stmt.rule}', Both of them should not be defined.`, + range: subRange(otherRules.range), + source: 'syntax-script', + severity: DiagnosticSeverity.Warning, + data: [ + { + title: `Remove ${stmt.rule} definition`, + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + range: subRange(addRange(stmt.range, semiRange)), + newText: '' + } + ] + } + } + }, + { + title: `Remove ${otherRules.rule} definition`, + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + range: subRange(addRange(otherRules.range, semiRange)), + newText: '' + } + ] + } + } + } + ] as CodeAction[] + }); + }); + } + }); + + return items; +} + +// Checks if same rule is defined twice +function sameRuleCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + ast.body.forEach(stmt => { + if (statementIsA(stmt, NodeType.Rule)) { + ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { + if (otherRules.rule === stmt.rule) items.push({ + message: `Rule '${stmt.rule}' is already defined.`, + range: subRange(stmt.range), + source: 'syntax-script', + severity: DiagnosticSeverity.Error, + data: [ + { + title: 'Remove this definition', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + range: subRange(addRange(stmt.range, semiRange)), + newText: '' + } + ] + } + } + } + ] as CodeAction[] + }); + }); + } + }); + + return items; +} + +// Checks if an import statements refers to an empty file +function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + ast.body.filter(r => statementIsA(r, NodeType.Import)).map(r => r as ImportStatement).forEach(stmt => { + + const filePathButPath = fileURLToPath(filePath); + const fullPath = join(filePathButPath, '../', stmt.path); + if (!existsSync(fullPath)) items.push({ + message: `Can't find file '${fullPath}' imported from '${filePathButPath}'`, + severity: DiagnosticSeverity.Error, + range: subRange(stmt.range), + source: 'syntax-script', + data: [ + { + title: 'Remove this import statement', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { range: subRange(addRange(stmt.range, semiRange)), newText: '' } + ] + } + } + } + ] as CodeAction[] + }); + + if (existsSync(fullPath)) { + const status = statSync(fullPath); + + if (!status.isFile()) items.push({ + message: `'${fullPath}' imported from '${filePathButPath}' doesn't seem to be a file.`, + severity: DiagnosticSeverity.Error, + range: subRange(stmt.range), + source: 'syntax-script', + data: [ + { + title: 'Remove this import statement', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { range: subRange(addRange(stmt.range, semiRange)), newText: '' } + ] + } + } + } + ] as CodeAction[] + }); + + if (!fullPath.endsWith('.syx')) items.push({ + message: `'${fullPath}' imported from '${filePathButPath}' cannot be imported.`, + severity: DiagnosticSeverity.Error, + range: subRange(stmt.range), + source: 'syntax-script', + data: [ + { + title: 'Remove this import statement', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { range: subRange(addRange(stmt.range, semiRange)), newText: '' } + ] + } + } + } + ] as CodeAction[] + }); + } + + }); + + return items; +} + +// Checks if every exported statement it actually exportable +// TODO this doesnt work for some reason +function exportableCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + + const items: Diagnostic[] = []; + + ast.body.forEach(stmt => { + + items.push({ + message: `${stmt.modifiers.map(r => r.type).join(',')}l`, + range: subRange(stmt.range), + severity: DiagnosticSeverity.Error, + source: 'syntax-script', + data: [] + }); + + if (stmt.modifiers.some(t => t.type === TokenType.ExportKeyword) && !dictionary.ExportableNodeTypes.includes(stmt.type)) items.push({ + message: 'This statement cannot be exported.', + range: subRange(stmt.range), + severity: DiagnosticSeverity.Error, + source: 'syntax-script', + data: [ + { + title: 'Remove export keyword', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + newText: '', range: subRange(stmt.modifiers.find(r => r.type === TokenType.ExportKeyword).range) + } + ] + } + } + } + ] as CodeAction[] + }); + + // if (dictionary.ExportableNodeTypes.includes(stmt.type)) c((stmt as GlobalStatement).body); + }); + + return items; +} + /** * Modifies the given range to be zero-based. * @param {Range} r Any range. @@ -52,4 +272,8 @@ export function subRange(r: Range): Range { const d = r.end.line; return { start: { character: a === 0 ? 0 : a - 1, line: b === 0 ? 0 : b - 1 }, end: { character: c === 0 ? 0 : c - 1, line: d === 0 ? 0 : d - 1 } }; +} + +function addRange(r: Range, r2: Range): Range { + return { end: { line: r.end.line + r2.end.line, character: r.end.character + r.end.character }, start: { character: r.start.character, line: r.start.line } }; } \ No newline at end of file From 9beac256c9c924aedffe0153fae06c56c7e7526e Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 15:20:32 +0300 Subject: [PATCH 20/47] contain every character and skip comment tokens in strings --- src/lexer.ts | 62 ++++++++++++++++++++++++++++++++-------------------- 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/src/lexer.ts b/src/lexer.ts index fc934c5..a609031 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -69,35 +69,42 @@ function tpr(start: Position, end: Position): Range { * @param {string} source Source string. * @returns A list of tokens generated from source string. * @author efekos - * @version 1.0.7 - * @since 0.0.1-alpha - * @throws LexerError if an error occurs. + * @version 1.0.8 + * @since 0.0.2-alpha */ export function tokenizeSyx(source: string): Token[] { const tokens: Token[] = []; const src = source.split(''); + let lastString = 'n'; + let inString = false; + function t(s:string){ + if(lastString==='\''&&s==='\'') {lastString='n';inString = !inString;} + if(lastString==='"'&&s==='"') {lastString='n';inString = !inString;} + if(lastString==='n') {lastString=s;inString=!inString;} + } let curPos = 1; let curLine = 1; while (src.length > 0) { - if (src[0] === '/' && src[1] === '/') { + if (src[0] === '/' && src[1] === '/'&&!inString) { while (src.length > 0 && src[0] as string !== '\n') { src.shift(); + curPos++; } } - if (src[0] === '(') tokens.push({ type: TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ')') tokens.push({ type: TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '{') tokens.push({ type: TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '}') tokens.push({ type: TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '[') tokens.push({ type: TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ']') tokens.push({ type: TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ',') tokens.push({ type: TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '<') tokens.push({ type: TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '>') tokens.push({ type: TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '|') tokens.push({ type: TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === '(') tokens.push({ type: inString?20:TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ')') tokens.push({ type: inString?20:TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '{') tokens.push({ type: inString?20:TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '}') tokens.push({ type: inString?20:TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '[') tokens.push({ type: inString?20:TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ']') tokens.push({ type: inString?20:TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ',') tokens.push({ type: inString?20:TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '<') tokens.push({ type: inString?20:TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '>') tokens.push({ type: inString?20:TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '\'') {tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('\'');} + else if (src[0] === '"') {tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('"');} + else if (src[0] === '|') tokens.push({ type: inString?20:TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '+' && chars.includes(src[1])) { if (src[1] === 's') tokens.push({ type: TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); curPos += 2; @@ -121,7 +128,7 @@ export function tokenizeSyx(source: string): Token[] { const reserved = keywords[ident]; tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); - } else if (isSkippable(src[0])) { + } else if (isSkippable(src[0])&&!inString) { src.shift(); curPos++; if (src[0] === '\n') { curLine++; curPos = 0; }; @@ -138,20 +145,27 @@ export function tokenizeSyx(source: string): Token[] { * @param {string} source Source string. * @returns A list of tokens generated from the source file. * @author efekos - * @version 1.0.4 - * @since 0.0.1-alpha + * @version 1.0.5 + * @since 0.0.2-alpha */ export function tokenizeSys(source: string): Token[] { const src = source.split(''); const tokens: Token[] = []; + let lastString = 'n'; + let inString = false; + function t(s:string){ + if(lastString==='\''&&s==='\'') {lastString='n';inString = !inString;} + if(lastString==='"'&&s==='"') {lastString='n';inString = !inString;} + if(lastString==='n') {lastString=s;inString=!inString;} + } let curPos = 0; let curLine = 1; while (src.length > 0 && `${src[0]}${src[1]}${src[2]}` !== ':::') { - if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '\'') {tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('\'');} + else if (src[0] === '"') {tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('"');} else if (isAlphabetic(src[0])) { let ident = ''; const startPos = curPos; @@ -162,7 +176,7 @@ export function tokenizeSys(source: string): Token[] { const reserved = keywords[ident]; tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); - } else if (isSkippable(src[0])) { + } else if (isSkippable(src[0])&&!inString) { src.shift(); curPos++; if (src[0] === '\n') curLine++; From 535053eb38cce94211869a363a4e020ec7f98774 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 15:20:58 +0300 Subject: [PATCH 21/47] fix exportableCheck diagnostics --- src/diagnostic.ts | 24 ++++++++++-------------- src/dictionary/dictionary.ts | 2 +- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index f75671e..7f55fc7 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,5 +1,5 @@ import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; -import { ImportStatement, NodeType, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; +import { GlobalStatement, ImportStatement, NodeType, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; import { existsSync, readFileSync, statSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; @@ -15,6 +15,9 @@ const semiRange: Range = { end: { line: 0, character: 1 }, start: { line: 0, cha * Creates a diagnostic report from the file path given. * @param {string} filePath Path of the file to create a report. * @param {string} fileContent Content of the file if it is already fetched. + * @author efekos + * @version 1.0.1 + * @since 0.0.2-alpha * @returns A diagnostic report language servers can use. */ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent?: string): FullDocumentDiagnosticReport { @@ -28,7 +31,7 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent const tokens = (isSyx ? tokenizeSyx : tokenizeSys)(content); const ast = (isSyx ? syxparser : sysparser).parseTokens(tokens, filePath); - items.push(...exportableCheck(ast, filePath)); + items.push(...exportableCheck(ast.body, filePath)); items.push(...ruleConflictCheck(ast, filePath)); items.push(...sameRuleCheck(ast, filePath)); items.push(...importedExistentCheck(ast, filePath)); @@ -41,6 +44,8 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent source: 'syntax-script', data: error.actions }); + } else { + items.push({message:`Parser Error: ${error.message}`,range:{end:{line:0,character:1},start:{line:0,character:0}},severity:DiagnosticSeverity.Warning}); } } finally { return { items, kind: DocumentDiagnosticReportKind.Full }; @@ -214,20 +219,11 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos } // Checks if every exported statement it actually exportable -// TODO this doesnt work for some reason -function exportableCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { +function exportableCheck(statements: Statement[], filePath: string): Diagnostic[] { const items: Diagnostic[] = []; - ast.body.forEach(stmt => { - - items.push({ - message: `${stmt.modifiers.map(r => r.type).join(',')}l`, - range: subRange(stmt.range), - severity: DiagnosticSeverity.Error, - source: 'syntax-script', - data: [] - }); + statements.forEach(stmt => { if (stmt.modifiers.some(t => t.type === TokenType.ExportKeyword) && !dictionary.ExportableNodeTypes.includes(stmt.type)) items.push({ message: 'This statement cannot be exported.', @@ -251,7 +247,7 @@ function exportableCheck(ast: ProgramStatement, filePath: string): Diagnostic[] ] as CodeAction[] }); - // if (dictionary.ExportableNodeTypes.includes(stmt.type)) c((stmt as GlobalStatement).body); + if (dictionary.StatementTypesWithBody.includes(stmt.type)) items.push(...exportableCheck((stmt as GlobalStatement).body,filePath)); }); return items; diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index bb43924..720acdb 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -54,7 +54,7 @@ export namespace dictionary { export const PrimitiveTypes: string[] = ['int', 'decimal', 'boolean', 'string']; export const Keywords: string[] = ['export', 'rule', 'keyword', 'import', 'operator', 'function', 'global']; export const Functionaries: Functionary[] = func; - export const ExportableNodeTypes: NodeType[] = [NodeType.Function,NodeType.Operator,NodeType.Keyword,NodeType.Rule]; + export const ExportableNodeTypes: NodeType[] = [NodeType.Function,NodeType.Operator,NodeType.Keyword,NodeType.Rule,NodeType.Global]; export const StatementTypesWithBody: NodeType[] = [NodeType.Operator,NodeType.Function,NodeType.Global]; } \ No newline at end of file From 17705d1fb6d61fbd21366c3d05311a5b6152f199 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 16:26:20 +0300 Subject: [PATCH 22/47] a few changes in 'Compiler module should provide correct tokenizaton' --- src/test/compiler.test.ts | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 22349b5..1617683 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -29,23 +29,21 @@ describe('Compiler module', () => { }); it('should provide correct tokenization', () => { - const t = tokenizeSyx('class } > ) ] , compile " export function global random import imports 1 keyword { < ( [ operator * rule ; \' | +s'); + const t = tokenizeSyx('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s'); const tList = [ - TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote,TokenType.DoubleQuote, TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword, - TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, + TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote,TokenType.SingleQuote, TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile ]; expect(t).to.be.a('array'); - expect(t).to.have.lengthOf(tList.length); expect(t.map(tt => tt.type)).to.be.deep.equal(tList); - const sys = tokenizeSys('import \' " ; :::'); - const sysList = [TokenType.ImportKeyword, TokenType.SingleQuote, TokenType.DoubleQuote, TokenType.Semicolon, TokenType.EndOfFile]; + const sys = tokenizeSys('import "" \'\' ; :::'); + const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote,TokenType.DoubleQuote, TokenType.SingleQuote,TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile]; expect(sys).to.be.a('array'); - expect(sys).to.have.lengthOf(sysList.length); expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList); }); From 7fd29dfd18769f989ceb9e5d66b57cd208ab5457 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 16:26:37 +0300 Subject: [PATCH 23/47] fix string quotes breaking the whole code (not really, single quotes are broke) --- src/lexer.ts | 58 ++++++++++++++++++++++++++-------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/src/lexer.ts b/src/lexer.ts index a609031..8b3b485 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -53,7 +53,7 @@ function isInt(src: string) { } function opr(line: number, character: number): Range { - return { end: { line, character:character+1 }, start: { line, character:character } }; + return { end: { line, character: character + 1 }, start: { line, character: character } }; } function pos(line: number, character: number): Position { @@ -69,7 +69,7 @@ function tpr(start: Position, end: Position): Range { * @param {string} source Source string. * @returns A list of tokens generated from source string. * @author efekos - * @version 1.0.8 + * @version 1.0.9 * @since 0.0.2-alpha */ export function tokenizeSyx(source: string): Token[] { @@ -77,34 +77,34 @@ export function tokenizeSyx(source: string): Token[] { const src = source.split(''); let lastString = 'n'; let inString = false; - function t(s:string){ - if(lastString==='\''&&s==='\'') {lastString='n';inString = !inString;} - if(lastString==='"'&&s==='"') {lastString='n';inString = !inString;} - if(lastString==='n') {lastString=s;inString=!inString;} + //TODO single quotes aren't working + function t(s: string) { + if (lastString === 'n') { lastString = s; inString = true; } + else if (lastString === '\'' && s === '\'' || (lastString === '"' && s === '"')) { lastString = 'n'; inString = false; }; } let curPos = 1; let curLine = 1; while (src.length > 0) { - if (src[0] === '/' && src[1] === '/'&&!inString) { + if (src[0] === '/' && src[1] === '/' && !inString) { while (src.length > 0 && src[0] as string !== '\n') { src.shift(); curPos++; } } - if (src[0] === '(') tokens.push({ type: inString?20:TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ')') tokens.push({ type: inString?20:TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '{') tokens.push({ type: inString?20:TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '}') tokens.push({ type: inString?20:TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '[') tokens.push({ type: inString?20:TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ']') tokens.push({ type: inString?20:TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ',') tokens.push({ type: inString?20:TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '<') tokens.push({ type: inString?20:TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '>') tokens.push({ type: inString?20:TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '\'') {tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('\'');} - else if (src[0] === '"') {tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('"');} - else if (src[0] === '|') tokens.push({ type: inString?20:TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === '(') tokens.push({ type: TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ')') tokens.push({ type: TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '{') tokens.push({ type: TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '}') tokens.push({ type: TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '[') tokens.push({ type: TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ']') tokens.push({ type: TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ',') tokens.push({ type: TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '<') tokens.push({ type: TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '>') tokens.push({ type: TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } + else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } + else if (src[0] === '|') tokens.push({ type: TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '+' && chars.includes(src[1])) { if (src[1] === 's') tokens.push({ type: TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); curPos += 2; @@ -128,7 +128,7 @@ export function tokenizeSyx(source: string): Token[] { const reserved = keywords[ident]; tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); - } else if (isSkippable(src[0])&&!inString) { + } else if (isSkippable(src[0]) && !inString) { src.shift(); curPos++; if (src[0] === '\n') { curLine++; curPos = 0; }; @@ -153,19 +153,19 @@ export function tokenizeSys(source: string): Token[] { const tokens: Token[] = []; let lastString = 'n'; let inString = false; - function t(s:string){ - if(lastString==='\''&&s==='\'') {lastString='n';inString = !inString;} - if(lastString==='"'&&s==='"') {lastString='n';inString = !inString;} - if(lastString==='n') {lastString=s;inString=!inString;} + function t(s: string) { + if (lastString === '\'' && s === '\'') { lastString = 'n'; inString = !inString; } + if (lastString === '"' && s === '"') { lastString = 'n'; inString = !inString; } + if (lastString === 'n') { lastString = s; inString = !inString; } } let curPos = 0; let curLine = 1; while (src.length > 0 && `${src[0]}${src[1]}${src[2]}` !== ':::') { - if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '\'') {tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('\'');} - else if (src[0] === '"') {tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) });t('"');} + if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } + else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } else if (isAlphabetic(src[0])) { let ident = ''; const startPos = curPos; @@ -176,7 +176,7 @@ export function tokenizeSys(source: string): Token[] { const reserved = keywords[ident]; tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); - } else if (isSkippable(src[0])&&!inString) { + } else if (isSkippable(src[0]) && !inString) { src.shift(); curPos++; if (src[0] === '\n') curLine++; From c97603514d9c3acdceee8c1bb2e44db8f4551d84 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 16:31:21 +0300 Subject: [PATCH 24/47] update all packages to latest version --- package-lock.json | 96 +++++++++++++++++++++++------------------------ package.json | 8 ++-- 2 files changed, 52 insertions(+), 52 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4f8b653..6b65755 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,12 +12,12 @@ "js-levenshtein": "^1.1.6" }, "devDependencies": { - "@efekos/es-test": "^1.0.2", + "@efekos/es-test": "^1.0.3", "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", - "@typescript-eslint/eslint-plugin": "^7.6.0", - "@typescript-eslint/parser": "^7.6.0", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", "chai": "^5.1.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", @@ -34,9 +34,9 @@ } }, "node_modules/@efekos/es-test": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.2.tgz", - "integrity": "sha512-0CmdQSyifFyuG8AWpO6nDnkvYXH2Q9kfCbXUl5RUZTcLWS7x0rj7cinG8cFEmneGxl3MGdvA4gzCF6goPkmVGA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.3.tgz", + "integrity": "sha512-Fp4LTWCbn4izb7ySbQA1jX0WIzuWPV4b6ioh5MH7ehhOI661tKL/GzvHmUEMawhldE0tpTGR2rUFkHhrhtSTZQ==", "dev": true, "dependencies": { "chalk": "^5.3.0", @@ -274,16 +274,16 @@ "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.6.0.tgz", - "integrity": "sha512-gKmTNwZnblUdnTIJu3e9kmeRRzV2j1a/LUO27KNNAnIC5zjy1aSvXSRp4rVNlmAoHlQ7HzX42NbKpcSr4jF80A==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.0.tgz", + "integrity": "sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "7.6.0", - "@typescript-eslint/type-utils": "7.6.0", - "@typescript-eslint/utils": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/type-utils": "7.7.0", + "@typescript-eslint/utils": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.3.1", @@ -309,15 +309,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.6.0.tgz", - "integrity": "sha512-usPMPHcwX3ZoPWnBnhhorc14NJw9J4HpSXQX4urF2TPKG0au0XhJoZyX62fmvdHONUkmyUe74Hzm1//XA+BoYg==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.0.tgz", + "integrity": "sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.6.0", - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/typescript-estree": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4" }, "engines": { @@ -337,13 +337,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.6.0.tgz", - "integrity": "sha512-ngttyfExA5PsHSx0rdFgnADMYQi+Zkeiv4/ZxGYUWd0nLs63Ha0ksmp8VMxAIC0wtCFxMos7Lt3PszJssG/E6w==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz", + "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0" + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -354,13 +354,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.6.0.tgz", - "integrity": "sha512-NxAfqAPNLG6LTmy7uZgpK8KcuiS2NZD/HlThPXQRGwz6u7MDBWRVliEEl1Gj6U7++kVJTpehkhZzCJLMK66Scw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.0.tgz", + "integrity": "sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.6.0", - "@typescript-eslint/utils": "7.6.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/utils": "7.7.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -381,9 +381,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.6.0.tgz", - "integrity": "sha512-h02rYQn8J+MureCvHVVzhl69/GAfQGPQZmOMjG1KfCl7o3HtMSlPaPUAPu6lLctXI5ySRGIYk94clD/AUMCUgQ==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz", + "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -394,13 +394,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.6.0.tgz", - "integrity": "sha512-+7Y/GP9VuYibecrCQWSKgl3GvUM5cILRttpWtnAu8GNL9j11e4tbuGZmZjJ8ejnKYyBRb2ddGQ3rEFCq3QjMJw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz", + "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -422,17 +422,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.6.0.tgz", - "integrity": "sha512-x54gaSsRRI+Nwz59TXpCsr6harB98qjXYzsRxGqvA5Ue3kQH+FxS7FYU81g/omn22ML2pZJkisy6Q+ElK8pBCA==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.0.tgz", + "integrity": "sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.15", "@types/semver": "^7.5.8", - "@typescript-eslint/scope-manager": "7.6.0", - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/typescript-estree": "7.6.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.0", "semver": "^7.6.0" }, "engines": { @@ -447,12 +447,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.6.0.tgz", - "integrity": "sha512-4eLB7t+LlNUmXzfOu1VAIAdkjbu5xNSerURS9X/S5TUKWFRpXRQZbmtPqgKmYx8bj3J0irtQXSiWAOY82v+cgw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz", + "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.6.0", + "@typescript-eslint/types": "7.7.0", "eslint-visitor-keys": "^3.4.3" }, "engines": { diff --git a/package.json b/package.json index b3caa58..ee4489c 100644 --- a/package.json +++ b/package.json @@ -39,12 +39,12 @@ }, "homepage": "https://github.com/syntaxs/compiler#readme", "devDependencies": { - "@efekos/es-test": "^1.0.2", + "@efekos/es-test": "^1.0.3", "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", - "@typescript-eslint/eslint-plugin": "^7.6.0", - "@typescript-eslint/parser": "^7.6.0", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", "chai": "^5.1.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", @@ -53,4 +53,4 @@ "dependencies": { "js-levenshtein": "^1.1.6" } -} +} \ No newline at end of file From 5245f8d5fd13c3a1038c7f6881bc02e0469b0b15 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 17:13:04 +0300 Subject: [PATCH 25/47] update es-test module --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6b65755..9e681bf 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,7 +12,7 @@ "js-levenshtein": "^1.1.6" }, "devDependencies": { - "@efekos/es-test": "^1.0.3", + "@efekos/es-test": "^1.0.4", "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", @@ -34,9 +34,9 @@ } }, "node_modules/@efekos/es-test": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.3.tgz", - "integrity": "sha512-Fp4LTWCbn4izb7ySbQA1jX0WIzuWPV4b6ioh5MH7ehhOI661tKL/GzvHmUEMawhldE0tpTGR2rUFkHhrhtSTZQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.4.tgz", + "integrity": "sha512-fdvqlwC7QpmMboB7it3jH7yHGZpykFbErPsWiKXi71qh0jbLtVy+8gQfuPguMHTsXz2fX6jOXI9jZoSgAKU7uA==", "dev": true, "dependencies": { "chalk": "^5.3.0", diff --git a/package.json b/package.json index ee4489c..9795943 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,7 @@ }, "homepage": "https://github.com/syntaxs/compiler#readme", "devDependencies": { - "@efekos/es-test": "^1.0.3", + "@efekos/es-test": "^1.0.4", "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", From 0dc90e4a2e16d7de58f706e2d415e1fbd1791136 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 17:15:28 +0300 Subject: [PATCH 26/47] update 'Compiler module should provide correct ranges' test with 3 different cases --- src/test/compiler.test.ts | 71 +++++++++++++++++++++++++++++++-------- 1 file changed, 57 insertions(+), 14 deletions(-) diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 1617683..ef2ecdb 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -1,6 +1,6 @@ import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType, isCompilerError } from '../types.js'; import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types'; -import { describe, it, onError } from '@efekos/es-test/bin/testRunner.js'; +import { describe, inst, it, onError } from '@efekos/es-test/bin/testRunner.js'; import { tokenizeSys, tokenizeSyx } from '../lexer.js'; import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js'; import { expect } from 'chai'; @@ -19,21 +19,64 @@ describe('Compiler module', () => { it('should provide correct ranges', () => { - const tokens = tokenizeSyx('keyword hello;'); + inst(() => { - rangeExpectations(tokens[0].range); - expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } }); - expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } }); - expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); + const tokens = tokenizeSyx('keyword hello;'); - }); + tokens.map(r=>r.range).forEach(r=>rangeExpectations(r)); + expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } }); + expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); + + }); + + inst(() => { + + const tokens = tokenizeSyx('rule "imports-keyword": cray;'); + + expect(tokens).to.be.a('array').to.have.lengthOf(10); + tokens.map(r=>r.range).forEach(r=>rangeExpectations(r)); + expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } }); + expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 7 } }); + expect(tokens[3].range).to.be.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); + expect(tokens[4].range).to.be.deep.equal({ end: { line: 1, character: 22 }, start: { line: 1, character: 15 } }); + expect(tokens[5].range).to.be.deep.equal({ end: { line: 1, character: 23 }, start: { line: 1, character: 22 } }); + expect(tokens[6].range).to.be.deep.equal({ end: { line: 1, character: 24 }, start: { line: 1, character: 23 } }); + expect(tokens[7].range).to.be.deep.equal({ end: { line: 1, character: 29 }, start: { line: 1, character: 25 } }); + expect(tokens[8].range).to.be.deep.equal({ end: { line: 1, character: 30 }, start: { line: 1, character: 29 } }); + + }); + + inst(()=>{ + const tokens = tokenizeSyx('rule "return-function-value-enabled":true;'); + + expect(tokens).to.be.a('array').to.have.lengthOf(14); + tokens.map(r=>r.range).forEach(r=>rangeExpectations(r)); + expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } }); + expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 13 }, start: { line: 1, character: 7 } }); + expect(tokens[3].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 13 } }); + expect(tokens[4].range).to.be.deep.equal({ end: { line: 1, character: 22 }, start: { line: 1, character: 14 } }); + expect(tokens[5].range).to.be.deep.equal({ end: { line: 1, character: 23 }, start: { line: 1, character: 22 } }); + expect(tokens[6].range).to.be.deep.equal({ end: { line: 1, character: 28 }, start: { line: 1, character: 23 } }); + expect(tokens[7].range).to.be.deep.equal({ end: { line: 1, character: 29 }, start: { line: 1, character: 28 } }); + expect(tokens[8].range).to.be.deep.equal({ end: { line: 1, character: 36 }, start: { line: 1, character: 29 } }); + expect(tokens[9].range).to.be.deep.equal({ end: { line: 1, character: 37 }, start: { line: 1, character: 36 } }); + expect(tokens[10].range).to.be.deep.equal({ end: { line: 1, character: 38 }, start: { line: 1, character: 37 } }); + expect(tokens[11].range).to.be.deep.equal({ end: { line: 1, character: 42 }, start: { line: 1, character: 38 } }); + expect(tokens[12].range).to.be.deep.equal({ end: { line: 1, character: 43 }, start: { line: 1, character: 42 } }); + + }); + + }, true); it('should provide correct tokenization', () => { const t = tokenizeSyx('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s'); const tList = [ - TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote,TokenType.DoubleQuote, + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword, - TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote,TokenType.SingleQuote, + TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile ]; @@ -41,7 +84,7 @@ describe('Compiler module', () => { expect(t.map(tt => tt.type)).to.be.deep.equal(tList); const sys = tokenizeSys('import "" \'\' ; :::'); - const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote,TokenType.DoubleQuote, TokenType.SingleQuote,TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile]; + const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile]; expect(sys).to.be.a('array'); expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList); @@ -139,7 +182,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('export keyword ruleish;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{range:{end:{line:1,character:7},start:{line:1,character:1}},type:TokenType.ExportKeyword,value:'export'}], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: 'ruleish' }; + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{ range: { end: { line: 1, character: 7 }, start: { line: 1, character: 1 } }, type: TokenType.ExportKeyword, value: 'export' }], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: 'ruleish' }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -148,16 +191,16 @@ describe('Compiler module', () => { }); - it('should provide correct diagnostic reports',()=>{ + it('should provide correct diagnostic reports', () => { - const report = createSyntaxScriptDiagnosticReport('TEST_FILE.syx','keyword ruleis'); + const report = createSyntaxScriptDiagnosticReport('TEST_FILE.syx', 'keyword ruleis'); expect(report).to.be.a('object'); expect(report).to.have.property('items').to.be.a('array').to.have.lengthOf(1); expect(report).to.have.property('kind').to.be.a('string').to.be.equal(DocumentDiagnosticReportKind.Full); const diag = report.items[0]; - const item: Diagnostic = {message:'Expected \';\' after statement, found \'EOF\'.',range:{start:{line:0,character:0},end:{line:0,character:0}},severity:DiagnosticSeverity.Error,source:'syntax-script',data:[]}; + const item: Diagnostic = { message: 'Expected \';\' after statement, found \'EOF\'.', range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Error, source: 'syntax-script', data: [] }; expect(diag).to.have.property('message').to.be.a('string'); expect(diag).to.have.property('range'); From f03c6ffa30d8cf0550a6a69473679bdae5fc4614 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 18:30:46 +0300 Subject: [PATCH 27/47] format code --- src/compiler.ts | 2 +- src/diagnostic.ts | 4 ++-- src/dictionary/dictionary.ts | 14 +++++++------- src/index.ts | 6 +++--- src/test/compiler.test.ts | 10 +++++----- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/compiler.ts b/src/compiler.ts index 4d4dda6..76486a5 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -160,7 +160,7 @@ export class SyntaxScriptCompiler { out.push(statementExport); } else if (statementIsA(statement, NodeType.Keyword)) { out.push({ type: ExportType.Keyword, word: statement.word }); - } else if (statementIsA(statement,NodeType.Global)) { + } else if (statementIsA(statement, NodeType.Global)) { //TODO } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`, file); diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 7f55fc7..313493a 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -45,7 +45,7 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent data: error.actions }); } else { - items.push({message:`Parser Error: ${error.message}`,range:{end:{line:0,character:1},start:{line:0,character:0}},severity:DiagnosticSeverity.Warning}); + items.push({ message: `Parser Error: ${error.message}`, range: { end: { line: 0, character: 1 }, start: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Warning }); } } finally { return { items, kind: DocumentDiagnosticReportKind.Full }; @@ -247,7 +247,7 @@ function exportableCheck(statements: Statement[], filePath: string): Diagnostic[ ] as CodeAction[] }); - if (dictionary.StatementTypesWithBody.includes(stmt.type)) items.push(...exportableCheck((stmt as GlobalStatement).body,filePath)); + if (dictionary.StatementTypesWithBody.includes(stmt.type)) items.push(...exportableCheck((stmt as GlobalStatement).body, filePath)); }); return items; diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index 720acdb..f9dd2a6 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -6,31 +6,31 @@ const rules: Rule[] = [ name: 'imports-keyword', type: 'keyword', default: 'import', - conflicts:[] + conflicts: [] }, { name: 'function-value-return-enabled', type: 'boolean', default: false, - conflicts:[] + conflicts: [] }, { name: 'function-value-return-keyword', type: 'keyword', default: 'return', - conflicts:[] + conflicts: [] }, { name: 'enforce-single-string-quotes', type: 'boolean', default: false, - conflicts:['enforge-double-string-quotes'] + conflicts: ['enforge-double-string-quotes'] }, { name: 'enforce-double-string-quotes', type: 'boolean', default: false, - conflicts:['enforce-single-string-quotes'] + conflicts: ['enforce-single-string-quotes'] } ]; @@ -54,7 +54,7 @@ export namespace dictionary { export const PrimitiveTypes: string[] = ['int', 'decimal', 'boolean', 'string']; export const Keywords: string[] = ['export', 'rule', 'keyword', 'import', 'operator', 'function', 'global']; export const Functionaries: Functionary[] = func; - export const ExportableNodeTypes: NodeType[] = [NodeType.Function,NodeType.Operator,NodeType.Keyword,NodeType.Rule,NodeType.Global]; - export const StatementTypesWithBody: NodeType[] = [NodeType.Operator,NodeType.Function,NodeType.Global]; + export const ExportableNodeTypes: NodeType[] = [NodeType.Function, NodeType.Operator, NodeType.Keyword, NodeType.Rule, NodeType.Global]; + export const StatementTypesWithBody: NodeType[] = [NodeType.Operator, NodeType.Function, NodeType.Global]; } \ No newline at end of file diff --git a/src/index.ts b/src/index.ts index cdffeba..7eaaa68 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,7 +1,7 @@ -import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; +import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; -import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression,Statement,StringExpression,SyxConfig,SyxConfigCompile,Token,TokenType,VariableExpression,WhitespaceIdentifierExpression,isCompilerError,statementIsA } from './types.js'; -import { createSyntaxScriptDiagnosticReport,subRange } from './diagnostic.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, SyxConfig, SyxConfigCompile, Token, TokenType, VariableExpression, WhitespaceIdentifierExpression, isCompilerError, statementIsA } from './types.js'; +import { createSyntaxScriptDiagnosticReport, subRange } from './diagnostic.js'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index ef2ecdb..e459db9 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -1,6 +1,6 @@ import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType, isCompilerError } from '../types.js'; import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types'; -import { describe, inst, it, onError } from '@efekos/es-test/bin/testRunner.js'; +import { describe, inst, it } from '@efekos/es-test/bin/testRunner.js'; import { tokenizeSys, tokenizeSyx } from '../lexer.js'; import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js'; import { expect } from 'chai'; @@ -23,7 +23,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('keyword hello;'); - tokens.map(r=>r.range).forEach(r=>rangeExpectations(r)); + tokens.map(r => r.range).forEach(r => rangeExpectations(r)); expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } }); expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } }); expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); @@ -35,7 +35,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('rule "imports-keyword": cray;'); expect(tokens).to.be.a('array').to.have.lengthOf(10); - tokens.map(r=>r.range).forEach(r=>rangeExpectations(r)); + tokens.map(r => r.range).forEach(r => rangeExpectations(r)); expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } }); expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } }); expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 7 } }); @@ -48,11 +48,11 @@ describe('Compiler module', () => { }); - inst(()=>{ + inst(() => { const tokens = tokenizeSyx('rule "return-function-value-enabled":true;'); expect(tokens).to.be.a('array').to.have.lengthOf(14); - tokens.map(r=>r.range).forEach(r=>rangeExpectations(r)); + tokens.map(r => r.range).forEach(r => rangeExpectations(r)); expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } }); expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } }); expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 13 }, start: { line: 1, character: 7 } }); From 9dae5f20c0b3e398290ba1eae1b06448f068fd75 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 18:47:16 +0300 Subject: [PATCH 28/47] fix tokenizeSys not tokenizing strings correctly --- src/lexer.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/lexer.ts b/src/lexer.ts index 8b3b485..e69ecd8 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -77,7 +77,6 @@ export function tokenizeSyx(source: string): Token[] { const src = source.split(''); let lastString = 'n'; let inString = false; - //TODO single quotes aren't working function t(s: string) { if (lastString === 'n') { lastString = s; inString = true; } else if (lastString === '\'' && s === '\'' || (lastString === '"' && s === '"')) { lastString = 'n'; inString = false; }; @@ -145,7 +144,7 @@ export function tokenizeSyx(source: string): Token[] { * @param {string} source Source string. * @returns A list of tokens generated from the source file. * @author efekos - * @version 1.0.5 + * @version 1.0.6 * @since 0.0.2-alpha */ export function tokenizeSys(source: string): Token[] { @@ -154,9 +153,8 @@ export function tokenizeSys(source: string): Token[] { let lastString = 'n'; let inString = false; function t(s: string) { - if (lastString === '\'' && s === '\'') { lastString = 'n'; inString = !inString; } - if (lastString === '"' && s === '"') { lastString = 'n'; inString = !inString; } - if (lastString === 'n') { lastString = s; inString = !inString; } + if (lastString === 'n') { lastString = s; inString = true; } + else if (lastString === '\'' && s === '\'' || (lastString === '"' && s === '"')) { lastString = 'n'; inString = false; }; } let curPos = 0; From d70b852496db483fcbce8c6ddd95da80f7c6a560 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 19:05:49 +0300 Subject: [PATCH 29/47] extract generating operator regex to a namespace to make it public --- src/compiler.ts | 49 ++++++++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 15 deletions(-) diff --git a/src/compiler.ts b/src/compiler.ts index 76486a5..fbdfaf9 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -1,4 +1,4 @@ -import { CompileStatement, CompilerError, ImportStatement, ImportsStatement, NodeType, PrimitiveTypeExpression, StringExpression, TokenType, VariableExpression, statementIsA } from './types.js'; +import { CompileStatement, CompilerError, ImportStatement, ImportsStatement, NodeType, OperatorStatement, PrimitiveTypeExpression, StringExpression, TokenType, VariableExpression, statementIsA } from './types.js'; import { dirname, join } from 'path'; import { existsSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; @@ -80,20 +80,7 @@ export class SyntaxScriptCompiler { //# Generate regexMatcher - let regexMatcher: RegExp = new RegExp(''); - statement.regex.forEach(regexStatement => { - - if (regexStatement.type === NodeType.PrimitiveType) { - regexMatcher = new RegExp(regexMatcher.source + regexes[(regexStatement as PrimitiveTypeExpression).value].source); - } - if (regexStatement.type === NodeType.WhitespaceIdentifier) { - regexMatcher = new RegExp(regexMatcher.source + regexes['+s'].source); - } - if (regexStatement.type === NodeType.String) { - regexMatcher = new RegExp(regexMatcher.source + escapeRegex((regexStatement as StringExpression).value)); - } - - }); + const regexMatcher: RegExp = CompilerFunctions.generateRegexMatcher(statement); const operatorStmtExport: ExportedOperator = { imports: {}, outputGenerators: {}, regexMatcher, type: ExportType.Operator }; @@ -397,4 +384,36 @@ export const regexes: Record = { */ export function escapeRegex(src: string): string { return src.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +export namespace CompilerFunctions { + + + /** + * Generates {@link RegExp} of the given operator statement. + * @param statement An operator statement. + * @returns A regular expression generated from regex of the operator statement. + * @author efekos + * @version 1.0.0 + * @since 0.0.2-alpha + */ + export function generateRegexMatcher(statement:OperatorStatement):RegExp{ + let regexMatcher = new RegExp(''); + statement.regex.forEach(regexStatement => { + + if (regexStatement.type === NodeType.PrimitiveType) { + regexMatcher = new RegExp(regexMatcher.source + regexes[(regexStatement as PrimitiveTypeExpression).value].source); + } + if (regexStatement.type === NodeType.WhitespaceIdentifier) { + regexMatcher = new RegExp(regexMatcher.source + regexes['+s'].source); + } + if (regexStatement.type === NodeType.String) { + regexMatcher = new RegExp(regexMatcher.source + escapeRegex((regexStatement as StringExpression).value)); + } + + }); + + return regexMatcher; + } + } \ No newline at end of file From 28ecf90066648b1d616b6cb54c926f98fae5704f Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 19:06:06 +0300 Subject: [PATCH 30/47] add checks for same regex on operators --- src/diagnostic.ts | 43 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 313493a..6d341d3 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,8 +1,9 @@ import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; -import { GlobalStatement, ImportStatement, NodeType, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; +import { GlobalStatement, ImportStatement, NodeType, OperatorStatement, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; import { existsSync, readFileSync, statSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; +import { CompilerFunctions } from './compiler.js'; import { dictionary } from './dictionary/index.js'; import { fileURLToPath } from 'url'; import { join } from 'path'; @@ -35,6 +36,7 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent items.push(...ruleConflictCheck(ast, filePath)); items.push(...sameRuleCheck(ast, filePath)); items.push(...importedExistentCheck(ast, filePath)); + items.push(...sameRegexCheck(ast,filePath)); } catch (error) { if (isCompilerError(error)) { items.push({ @@ -218,6 +220,45 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos return items; } +// Checks if there are multiple operators with the same regex +function sameRegexCheck(ast:ProgramStatement, filePath:string): Diagnostic[] { + const items:Diagnostic[] = []; + + const encounteredRegexes:RegExp[] = []; + + ast.body.filter(r=>statementIsA(r,NodeType.Operator)).map(r => r as OperatorStatement).forEach(stmt=>{ + + const regex = new RegExp(CompilerFunctions.generateRegexMatcher(stmt)); + + if(encounteredRegexes.some(r=>r.source===regex.source)) items.push({ + message:'Regex of this operator is same with another operator.', + range:subRange(syxparser.combineTwo(stmt.regex[0].range,stmt.regex[stmt.regex.length-1].range)), + severity:DiagnosticSeverity.Error, + source:'syntax-script', + data:[ + { + title:'Remove this operator', + kind:CodeActionKind.QuickFix, + edit:{ + changes:{ + [filePath]:[ + { + newText:'', + range:subRange(stmt.range) + } + ] + } + } + } + ] as CodeAction[] + }); + else encounteredRegexes.push(regex); + + }); + + return items; +} + // Checks if every exported statement it actually exportable function exportableCheck(statements: Statement[], filePath: string): Diagnostic[] { From c36cce5094f94a798f6e40bbfbe015d30bba8763 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 19:18:20 +0300 Subject: [PATCH 31/47] add sameNameCheck (works for function,keywords and globals) --- src/diagnostic.ts | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 6d341d3..962a716 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,5 +1,5 @@ import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; -import { GlobalStatement, ImportStatement, NodeType, OperatorStatement, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; +import { FunctionStatement, GlobalStatement, ImportStatement, KeywordStatement, NodeType, OperatorStatement, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; import { existsSync, readFileSync, statSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; @@ -37,6 +37,7 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent items.push(...sameRuleCheck(ast, filePath)); items.push(...importedExistentCheck(ast, filePath)); items.push(...sameRegexCheck(ast,filePath)); + items.push(...sameNameCheck(ast.body,filePath)); } catch (error) { if (isCompilerError(error)) { items.push({ @@ -294,6 +295,41 @@ function exportableCheck(statements: Statement[], filePath: string): Diagnostic[ return items; } +// Check if everything has a unique name +function sameNameCheck(statements: Statement[], filePath: string): Diagnostic[] { + const items:Diagnostic[] = []; + + function c(s:Statement[]){ + const encounteredNames = []; + + s + .filter(r=>statementIsA(r,NodeType.Function)||statementIsA(r,NodeType.Global)||statementIsA(r,NodeType.Keyword)) + .map(r=>{ + if(statementIsA(r,NodeType.Function))return r as FunctionStatement; + if(statementIsA(r,NodeType.Global))return r as GlobalStatement; + if(statementIsA(r,NodeType.Keyword))return r as KeywordStatement; + }).forEach(stmt=>{ + + const n = stmt[statementIsA(stmt,NodeType.Keyword)?'word':'name']; + + if(encounteredNames.includes(n)) items.push({ + message:`Name '${n}' is already seen before.`, + range:subRange(stmt.range), + source:'syntax-script', + severity:DiagnosticSeverity.Error + }); + else encounteredNames.push(n); + + if(statementIsA(stmt,NodeType.Global)) c(stmt.body); + }); + + } + + c(statements); + + return items; +} + /** * Modifies the given range to be zero-based. * @param {Range} r Any range. From 8eebda802ed00ce145e909925df056cbc52c20fc Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 19:20:35 +0300 Subject: [PATCH 32/47] map items with souce instead of defining it every time --- src/diagnostic.ts | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 962a716..1fda389 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -44,14 +44,13 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent message: error.message, range: subRange(error.range), severity: DiagnosticSeverity.Error, - source: 'syntax-script', data: error.actions }); } else { items.push({ message: `Parser Error: ${error.message}`, range: { end: { line: 0, character: 1 }, start: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Warning }); } } finally { - return { items, kind: DocumentDiagnosticReportKind.Full }; + return { items:items.map(r=>{return {...r,source:'syntax-script'};}), kind: DocumentDiagnosticReportKind.Full }; } } @@ -68,7 +67,6 @@ function ruleConflictCheck(ast: ProgramStatement, filePath: string): Diagnostic[ if (dictRule.conflicts.includes(otherRules.rule)) items.push({ message: `Rule '${otherRules.rule}' conflicts with '${stmt.rule}', Both of them should not be defined.`, range: subRange(otherRules.range), - source: 'syntax-script', severity: DiagnosticSeverity.Warning, data: [ { @@ -118,7 +116,6 @@ function sameRuleCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { if (otherRules.rule === stmt.rule) items.push({ message: `Rule '${stmt.rule}' is already defined.`, range: subRange(stmt.range), - source: 'syntax-script', severity: DiagnosticSeverity.Error, data: [ { @@ -156,7 +153,6 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos message: `Can't find file '${fullPath}' imported from '${filePathButPath}'`, severity: DiagnosticSeverity.Error, range: subRange(stmt.range), - source: 'syntax-script', data: [ { title: 'Remove this import statement', @@ -179,7 +175,6 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos message: `'${fullPath}' imported from '${filePathButPath}' doesn't seem to be a file.`, severity: DiagnosticSeverity.Error, range: subRange(stmt.range), - source: 'syntax-script', data: [ { title: 'Remove this import statement', @@ -199,7 +194,6 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos message: `'${fullPath}' imported from '${filePathButPath}' cannot be imported.`, severity: DiagnosticSeverity.Error, range: subRange(stmt.range), - source: 'syntax-script', data: [ { title: 'Remove this import statement', @@ -235,7 +229,6 @@ function sameRegexCheck(ast:ProgramStatement, filePath:string): Diagnostic[] { message:'Regex of this operator is same with another operator.', range:subRange(syxparser.combineTwo(stmt.regex[0].range,stmt.regex[stmt.regex.length-1].range)), severity:DiagnosticSeverity.Error, - source:'syntax-script', data:[ { title:'Remove this operator', @@ -271,7 +264,6 @@ function exportableCheck(statements: Statement[], filePath: string): Diagnostic[ message: 'This statement cannot be exported.', range: subRange(stmt.range), severity: DiagnosticSeverity.Error, - source: 'syntax-script', data: [ { title: 'Remove export keyword', @@ -315,7 +307,6 @@ function sameNameCheck(statements: Statement[], filePath: string): Diagnostic[] if(encounteredNames.includes(n)) items.push({ message:`Name '${n}' is already seen before.`, range:subRange(stmt.range), - source:'syntax-script', severity:DiagnosticSeverity.Error }); else encounteredNames.push(n); From 2cd3a34c2676fb749be6a78ad059d3f0f587f577 Mon Sep 17 00:00:00 2001 From: efekos Date: Sun, 21 Apr 2024 19:20:57 +0300 Subject: [PATCH 33/47] format code --- src/compiler.ts | 2 +- src/diagnostic.ts | 78 +++++++++++++++++++++++------------------------ 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/src/compiler.ts b/src/compiler.ts index fbdfaf9..b7b3d3d 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -397,7 +397,7 @@ export namespace CompilerFunctions { * @version 1.0.0 * @since 0.0.2-alpha */ - export function generateRegexMatcher(statement:OperatorStatement):RegExp{ + export function generateRegexMatcher(statement: OperatorStatement): RegExp { let regexMatcher = new RegExp(''); statement.regex.forEach(regexStatement => { diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 1fda389..4895722 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -36,8 +36,8 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent items.push(...ruleConflictCheck(ast, filePath)); items.push(...sameRuleCheck(ast, filePath)); items.push(...importedExistentCheck(ast, filePath)); - items.push(...sameRegexCheck(ast,filePath)); - items.push(...sameNameCheck(ast.body,filePath)); + items.push(...sameRegexCheck(ast, filePath)); + items.push(...sameNameCheck(ast.body, filePath)); } catch (error) { if (isCompilerError(error)) { items.push({ @@ -50,7 +50,7 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent items.push({ message: `Parser Error: ${error.message}`, range: { end: { line: 0, character: 1 }, start: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Warning }); } } finally { - return { items:items.map(r=>{return {...r,source:'syntax-script'};}), kind: DocumentDiagnosticReportKind.Full }; + return { items: items.map(r => { return { ...r, source: 'syntax-script' }; }), kind: DocumentDiagnosticReportKind.Full }; } } @@ -216,29 +216,29 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos } // Checks if there are multiple operators with the same regex -function sameRegexCheck(ast:ProgramStatement, filePath:string): Diagnostic[] { - const items:Diagnostic[] = []; +function sameRegexCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; - const encounteredRegexes:RegExp[] = []; + const encounteredRegexes: RegExp[] = []; - ast.body.filter(r=>statementIsA(r,NodeType.Operator)).map(r => r as OperatorStatement).forEach(stmt=>{ + ast.body.filter(r => statementIsA(r, NodeType.Operator)).map(r => r as OperatorStatement).forEach(stmt => { const regex = new RegExp(CompilerFunctions.generateRegexMatcher(stmt)); - if(encounteredRegexes.some(r=>r.source===regex.source)) items.push({ - message:'Regex of this operator is same with another operator.', - range:subRange(syxparser.combineTwo(stmt.regex[0].range,stmt.regex[stmt.regex.length-1].range)), - severity:DiagnosticSeverity.Error, - data:[ + if (encounteredRegexes.some(r => r.source === regex.source)) items.push({ + message: 'Regex of this operator is same with another operator.', + range: subRange(syxparser.combineTwo(stmt.regex[0].range, stmt.regex[stmt.regex.length - 1].range)), + severity: DiagnosticSeverity.Error, + data: [ { - title:'Remove this operator', - kind:CodeActionKind.QuickFix, - edit:{ - changes:{ - [filePath]:[ + title: 'Remove this operator', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ { - newText:'', - range:subRange(stmt.range) + newText: '', + range: subRange(stmt.range) } ] } @@ -289,30 +289,30 @@ function exportableCheck(statements: Statement[], filePath: string): Diagnostic[ // Check if everything has a unique name function sameNameCheck(statements: Statement[], filePath: string): Diagnostic[] { - const items:Diagnostic[] = []; - - function c(s:Statement[]){ + const items: Diagnostic[] = []; + + function c(s: Statement[]) { const encounteredNames = []; s - .filter(r=>statementIsA(r,NodeType.Function)||statementIsA(r,NodeType.Global)||statementIsA(r,NodeType.Keyword)) - .map(r=>{ - if(statementIsA(r,NodeType.Function))return r as FunctionStatement; - if(statementIsA(r,NodeType.Global))return r as GlobalStatement; - if(statementIsA(r,NodeType.Keyword))return r as KeywordStatement; - }).forEach(stmt=>{ - - const n = stmt[statementIsA(stmt,NodeType.Keyword)?'word':'name']; - - if(encounteredNames.includes(n)) items.push({ - message:`Name '${n}' is already seen before.`, - range:subRange(stmt.range), - severity:DiagnosticSeverity.Error - }); - else encounteredNames.push(n); + .filter(r => statementIsA(r, NodeType.Function) || statementIsA(r, NodeType.Global) || statementIsA(r, NodeType.Keyword)) + .map(r => { + if (statementIsA(r, NodeType.Function)) return r as FunctionStatement; + if (statementIsA(r, NodeType.Global)) return r as GlobalStatement; + if (statementIsA(r, NodeType.Keyword)) return r as KeywordStatement; + }).forEach(stmt => { - if(statementIsA(stmt,NodeType.Global)) c(stmt.body); - }); + const n = stmt[statementIsA(stmt, NodeType.Keyword) ? 'word' : 'name']; + + if (encounteredNames.includes(n)) items.push({ + message: `Name '${n}' is already seen before.`, + range: subRange(stmt.range), + severity: DiagnosticSeverity.Error + }); + else encounteredNames.push(n); + + if (statementIsA(stmt, NodeType.Global)) c(stmt.body); + }); } From e2b06d04f502e3c2619b57805210ca563dd97ea6 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 08:02:59 +0300 Subject: [PATCH 34/47] make everyhing inside strings raw to make sure strings are correctly tokenized --- src/lexer.ts | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/lexer.ts b/src/lexer.ts index e69ecd8..6748d54 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -91,21 +91,21 @@ export function tokenizeSyx(source: string): Token[] { curPos++; } } - if (src[0] === '(') tokens.push({ type: TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ')') tokens.push({ type: TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '{') tokens.push({ type: TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '}') tokens.push({ type: TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '[') tokens.push({ type: TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ']') tokens.push({ type: TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ',') tokens.push({ type: TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '<') tokens.push({ type: TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '>') tokens.push({ type: TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === '(') tokens.push({ type: inString?20:TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ')') tokens.push({ type: inString?20:TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '{') tokens.push({ type: inString?20:TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '}') tokens.push({ type: inString?20:TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '[') tokens.push({ type: inString?20:TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ']') tokens.push({ type: inString?20:TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ',') tokens.push({ type: inString?20:TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '<') tokens.push({ type: inString?20:TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '>') tokens.push({ type: inString?20:TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } - else if (src[0] === '|') tokens.push({ type: TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '|') tokens.push({ type: inString?20:TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '+' && chars.includes(src[1])) { - if (src[1] === 's') tokens.push({ type: TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); + if (src[1] === 's') tokens.push({ type: inString?20:TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); curPos += 2; src.shift(); src.shift(); } else if (isInt(src[0])) { @@ -116,7 +116,7 @@ export function tokenizeSyx(source: string): Token[] { } curPos += ident.length; - tokens.push({ type: TokenType.IntNumber, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); + tokens.push({ type: inString?20:TokenType.IntNumber, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); } else if (isAlphabetic(src[0])) { let ident = ''; const startPos = curPos; @@ -126,7 +126,7 @@ export function tokenizeSyx(source: string): Token[] { } const reserved = keywords[ident]; - tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); + tokens.push({ type: inString?20:reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); } else if (isSkippable(src[0]) && !inString) { src.shift(); curPos++; @@ -161,7 +161,7 @@ export function tokenizeSys(source: string): Token[] { let curLine = 1; while (src.length > 0 && `${src[0]}${src[1]}${src[2]}` !== ':::') { - if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } else if (isAlphabetic(src[0])) { From 92639f32b4e4e28554cb7b2c1fc1e1eb08e6ca0b Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 08:17:40 +0300 Subject: [PATCH 35/47] add 12 cases to 'Compiler module should provide correct tokenization' (all passed) --- src/test/compiler.test.ts | 131 ++++++++++++++++++++++++++++++++------ 1 file changed, 113 insertions(+), 18 deletions(-) diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index e459db9..03f3619 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -1,7 +1,8 @@ -import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, TokenType, isCompilerError } from '../types.js'; +import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, Token, TokenType, isCompilerError } from '../types.js'; import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types'; import { describe, inst, it } from '@efekos/es-test/bin/testRunner.js'; import { tokenizeSys, tokenizeSyx } from '../lexer.js'; +import { HandlerFn } from '@efekos/es-test/bin/types.js'; import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js'; import { expect } from 'chai'; import { syxparser } from '../ast.js'; @@ -17,6 +18,14 @@ describe('Compiler module', () => { expect(r.end).to.have.property('line').to.be.a('number').to.be.greaterThanOrEqual(0); } + function tokenExpectations(t: Token) { + expect(t).to.have.property('range').to.be.a('object'); + rangeExpectations(t.range); + + expect(t).to.have.property('type').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(t).to.have.property('value').to.be.a('string').to.be.not.equal(undefined); + } + it('should provide correct ranges', () => { inst(() => { @@ -72,23 +81,109 @@ describe('Compiler module', () => { }, true); it('should provide correct tokenization', () => { - const t = tokenizeSyx('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s'); - const tList = [ - TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, - TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword, - TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, - TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile - ]; - - expect(t).to.be.a('array'); - expect(t.map(tt => tt.type)).to.be.deep.equal(tList); - - const sys = tokenizeSys('import "" \'\' ; :::'); - const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile]; - - expect(sys).to.be.a('array'); - expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList); - }); + + function _case(src: string, types: TokenType[]): HandlerFn { + return () => { + const ts = tokenizeSyx(src); + + expect(ts).to.be.a('array'); + ts.forEach(t => tokenExpectations(t)); + expect(ts.map(t => t.type)).to.be.deep.equal(types); + }; + } + + inst( + _case('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s', [ + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, + TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword, + TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, + TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile + ]) + ); + + inst( + _case('class}>)],compile""exportfunctionglobalrandomimportimports1keyword{<([operator*rule;\'\'|+s', [ + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, + TokenType.Identifier, TokenType.IntNumber, TokenType.KeywordKeyword, TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, + TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile + ]) + ); + + inst( + _case( + '+s+s+s+s+s+s+s', + [TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'operator "+" {', + [TokenType.OperatorKeyword,TokenType.OpenDiamond,TokenType.Identifier,TokenType.CloseDiamond,TokenType.DoubleQuote,TokenType.Raw,TokenType.DoubleQuote,TokenType.OpenDiamond,TokenType.Identifier,TokenType.CloseDiamond,TokenType.OpenBrace,TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'o-+?', + [TokenType.Identifier,TokenType.Raw,TokenType.Raw,TokenType.Raw,TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'rmh09345kg9', + [TokenType.Identifier,TokenType.IntNumber,TokenType.Identifier,TokenType.IntNumber, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'rule \'custom-random-rule?\';', + [TokenType.RuleKeyword,TokenType.SingleQuote,20,20,20,20,20,20,TokenType.SingleQuote,TokenType.Semicolon, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'keyword pray;rule\'imports-keyword\': pray;', + [TokenType.KeywordKeyword,TokenType.Identifier,TokenType.Semicolon,TokenType.RuleKeyword,TokenType.SingleQuote,20,20,20,TokenType.SingleQuote,TokenType.Raw,TokenType.Identifier,TokenType.Semicolon, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'çş', + [TokenType.Raw,TokenType.Raw, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'keyword altınasıçĞ;', + [TokenType.KeywordKeyword,TokenType.Identifier,20,TokenType.Identifier,20,20,20,TokenType.Semicolon, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'keyword imsodonewiththistest12casesisenough', + [TokenType.KeywordKeyword,TokenType.Identifier,TokenType.IntNumber,TokenType.Identifier, TokenType.EndOfFile] + ) + ); + + inst(() => { + + const sys = tokenizeSys('import "" \'\' ; :::'); + const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile]; + + sys.forEach(t => tokenExpectations(t)); + expect(sys).to.be.a('array'); + expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList); + + }); + + }, true); describe('should provide correct parsing', () => { From 7138d1aecc3a5ed6390b1a0b1a824a9d027a96b7 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 08:22:09 +0300 Subject: [PATCH 36/47] add descriptions to rules --- src/dictionary/dictionary.ts | 15 ++++++++++----- src/dictionary/rules.ts | 5 +++-- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index f9dd2a6..2fd07b0 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -6,31 +6,36 @@ const rules: Rule[] = [ name: 'imports-keyword', type: 'keyword', default: 'import', - conflicts: [] + conflicts: [], + description:'Determines which keyword should be used to import modules using defined in an imports statement.' }, { name: 'function-value-return-enabled', type: 'boolean', default: false, - conflicts: [] + conflicts: [], + description: 'Determines whether is it possible to return a value from a function using a keyword.' }, { name: 'function-value-return-keyword', type: 'keyword', default: 'return', - conflicts: [] + conflicts: [], + description: 'Determines the keyword used to return a function from a keyword. Must be used with `function-value-return-enabled` set to true to make a difference.' }, { name: 'enforce-single-string-quotes', type: 'boolean', default: false, - conflicts: ['enforge-double-string-quotes'] + conflicts: ['enforge-double-string-quotes'], + description: 'Enforces string values to have single quotes in output. Useful for languages like Java where quote type matters.' }, { name: 'enforce-double-string-quotes', type: 'boolean', default: false, - conflicts: ['enforce-single-string-quotes'] + conflicts: ['enforce-single-string-quotes'], + description: 'Enforces string values to have double quotes in output. Useful for languages like Java where quote type matters.' } ]; diff --git a/src/dictionary/rules.ts b/src/dictionary/rules.ts index d5fa1bd..001ac36 100644 --- a/src/dictionary/rules.ts +++ b/src/dictionary/rules.ts @@ -6,13 +6,14 @@ export type RuleType = 'keyword' | 'boolean'; /** * Base interface for rules. Represents a rule that can be modified by any file using `rule` modifier. * @author efekos - * @version 1.0.0 - * @since 0.0.1-alpha + * @version 1.0.1 + * @since 0.0.2-alpha */ export interface BaseRule { name: string; type: RuleType; conflicts: string[]; + description: string; } /** From ff71a5a35645ac3409a9192e128b77f398f1eeee Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 16:11:05 +0300 Subject: [PATCH 37/47] add a few rules --- src/dictionary/dictionary.ts | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index 2fd07b0..8717078 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -9,6 +9,13 @@ const rules: Rule[] = [ conflicts: [], description:'Determines which keyword should be used to import modules using defined in an imports statement.' }, + { + name:'imports-with-quotes', + type: 'boolean', + default: true, + conflicts: [], + description: 'Determines whether to import modules defined with import statements with quotes or not.' + }, { name: 'function-value-return-enabled', type: 'boolean', @@ -36,6 +43,27 @@ const rules: Rule[] = [ default: false, conflicts: ['enforce-single-string-quotes'], description: 'Enforces string values to have double quotes in output. Useful for languages like Java where quote type matters.' + }, + { + name: 'export-required', + type: 'boolean', + default : false, + conflicts: [], + description: 'Determines whether is it required to export a definable in order to reach it from another file.' + }, + { + name: 'export-keyword', + type: 'keyword', + default: 'export', + conflicts: [], + description: 'Determines the keyword used to export a definable.' + }, + { + name: 'export-enabled', + type: 'boolean', + default: true, + conflicts: [], + description : 'Determines whether is it possible to export a definable with a keyword.' } ]; From 25db5992ec7ac58ee12d80ace05fddbe70ef1dea Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 16:53:12 +0300 Subject: [PATCH 38/47] update es-test --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 9e681bf..24e6b52 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,7 +12,7 @@ "js-levenshtein": "^1.1.6" }, "devDependencies": { - "@efekos/es-test": "^1.0.4", + "@efekos/es-test": "^1.0.5", "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", @@ -34,9 +34,9 @@ } }, "node_modules/@efekos/es-test": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.4.tgz", - "integrity": "sha512-fdvqlwC7QpmMboB7it3jH7yHGZpykFbErPsWiKXi71qh0jbLtVy+8gQfuPguMHTsXz2fX6jOXI9jZoSgAKU7uA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.5.tgz", + "integrity": "sha512-7aT2Q/9Cs7rvPUxOzFvrGcRtr1Wrnf5eMpwzEf2u3f/zUPUV/sdoCf7OIV1yHfck1/tvfq5ix2ytJI18wXz4eg==", "dev": true, "dependencies": { "chalk": "^5.3.0", diff --git a/package.json b/package.json index 9795943..3f37b19 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,7 @@ }, "homepage": "https://github.com/syntaxs/compiler#readme", "devDependencies": { - "@efekos/es-test": "^1.0.4", + "@efekos/es-test": "^1.0.5", "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", "@types/node": "^20.12.7", From c3363e70449de4d06a093d051168927e9c85534a Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:09:20 +0300 Subject: [PATCH 39/47] format code --- src/dictionary/dictionary.ts | 8 ++++---- src/lexer.ts | 30 +++++++++++++++--------------- src/test/compiler.test.ts | 16 ++++++++-------- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index 8717078..86cd804 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -7,10 +7,10 @@ const rules: Rule[] = [ type: 'keyword', default: 'import', conflicts: [], - description:'Determines which keyword should be used to import modules using defined in an imports statement.' + description: 'Determines which keyword should be used to import modules using defined in an imports statement.' }, { - name:'imports-with-quotes', + name: 'imports-with-quotes', type: 'boolean', default: true, conflicts: [], @@ -47,7 +47,7 @@ const rules: Rule[] = [ { name: 'export-required', type: 'boolean', - default : false, + default: false, conflicts: [], description: 'Determines whether is it required to export a definable in order to reach it from another file.' }, @@ -63,7 +63,7 @@ const rules: Rule[] = [ type: 'boolean', default: true, conflicts: [], - description : 'Determines whether is it possible to export a definable with a keyword.' + description: 'Determines whether is it possible to export a definable with a keyword.' } ]; diff --git a/src/lexer.ts b/src/lexer.ts index 6748d54..167103e 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -91,21 +91,21 @@ export function tokenizeSyx(source: string): Token[] { curPos++; } } - if (src[0] === '(') tokens.push({ type: inString?20:TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ')') tokens.push({ type: inString?20:TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '{') tokens.push({ type: inString?20:TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '}') tokens.push({ type: inString?20:TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '[') tokens.push({ type: inString?20:TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ']') tokens.push({ type: inString?20:TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ',') tokens.push({ type: inString?20:TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '<') tokens.push({ type: inString?20:TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '>') tokens.push({ type: inString?20:TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === '(') tokens.push({ type: inString ? 20 : TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ')') tokens.push({ type: inString ? 20 : TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '{') tokens.push({ type: inString ? 20 : TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '}') tokens.push({ type: inString ? 20 : TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '[') tokens.push({ type: inString ? 20 : TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ']') tokens.push({ type: inString ? 20 : TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ',') tokens.push({ type: inString ? 20 : TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ';') tokens.push({ type: inString ? 20 : TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '<') tokens.push({ type: inString ? 20 : TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '>') tokens.push({ type: inString ? 20 : TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } - else if (src[0] === '|') tokens.push({ type: inString?20:TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '|') tokens.push({ type: inString ? 20 : TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '+' && chars.includes(src[1])) { - if (src[1] === 's') tokens.push({ type: inString?20:TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); + if (src[1] === 's') tokens.push({ type: inString ? 20 : TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); curPos += 2; src.shift(); src.shift(); } else if (isInt(src[0])) { @@ -116,7 +116,7 @@ export function tokenizeSyx(source: string): Token[] { } curPos += ident.length; - tokens.push({ type: inString?20:TokenType.IntNumber, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); + tokens.push({ type: inString ? 20 : TokenType.IntNumber, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); } else if (isAlphabetic(src[0])) { let ident = ''; const startPos = curPos; @@ -126,7 +126,7 @@ export function tokenizeSyx(source: string): Token[] { } const reserved = keywords[ident]; - tokens.push({ type: inString?20:reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); + tokens.push({ type: inString ? 20 : reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); } else if (isSkippable(src[0]) && !inString) { src.shift(); curPos++; @@ -161,7 +161,7 @@ export function tokenizeSys(source: string): Token[] { let curLine = 1; while (src.length > 0 && `${src[0]}${src[1]}${src[2]}` !== ':::') { - if (src[0] === ';') tokens.push({ type: inString?20:TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === ';') tokens.push({ type: inString ? 20 : TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } else if (isAlphabetic(src[0])) { diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 03f3619..5ceb332 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -119,56 +119,56 @@ describe('Compiler module', () => { inst( _case( 'operator "+" {', - [TokenType.OperatorKeyword,TokenType.OpenDiamond,TokenType.Identifier,TokenType.CloseDiamond,TokenType.DoubleQuote,TokenType.Raw,TokenType.DoubleQuote,TokenType.OpenDiamond,TokenType.Identifier,TokenType.CloseDiamond,TokenType.OpenBrace,TokenType.EndOfFile] + [TokenType.OperatorKeyword, TokenType.OpenDiamond, TokenType.Identifier, TokenType.CloseDiamond, TokenType.DoubleQuote, TokenType.Raw, TokenType.DoubleQuote, TokenType.OpenDiamond, TokenType.Identifier, TokenType.CloseDiamond, TokenType.OpenBrace, TokenType.EndOfFile] ) ); inst( _case( 'o-+?', - [TokenType.Identifier,TokenType.Raw,TokenType.Raw,TokenType.Raw,TokenType.EndOfFile] + [TokenType.Identifier, TokenType.Raw, TokenType.Raw, TokenType.Raw, TokenType.EndOfFile] ) ); inst( _case( 'rmh09345kg9', - [TokenType.Identifier,TokenType.IntNumber,TokenType.Identifier,TokenType.IntNumber, TokenType.EndOfFile] + [TokenType.Identifier, TokenType.IntNumber, TokenType.Identifier, TokenType.IntNumber, TokenType.EndOfFile] ) ); inst( _case( 'rule \'custom-random-rule?\';', - [TokenType.RuleKeyword,TokenType.SingleQuote,20,20,20,20,20,20,TokenType.SingleQuote,TokenType.Semicolon, TokenType.EndOfFile] + [TokenType.RuleKeyword, TokenType.SingleQuote, 20, 20, 20, 20, 20, 20, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile] ) ); inst( _case( 'keyword pray;rule\'imports-keyword\': pray;', - [TokenType.KeywordKeyword,TokenType.Identifier,TokenType.Semicolon,TokenType.RuleKeyword,TokenType.SingleQuote,20,20,20,TokenType.SingleQuote,TokenType.Raw,TokenType.Identifier,TokenType.Semicolon, TokenType.EndOfFile] + [TokenType.KeywordKeyword, TokenType.Identifier, TokenType.Semicolon, TokenType.RuleKeyword, TokenType.SingleQuote, 20, 20, 20, TokenType.SingleQuote, TokenType.Raw, TokenType.Identifier, TokenType.Semicolon, TokenType.EndOfFile] ) ); inst( _case( 'çş', - [TokenType.Raw,TokenType.Raw, TokenType.EndOfFile] + [TokenType.Raw, TokenType.Raw, TokenType.EndOfFile] ) ); inst( _case( 'keyword altınasıçĞ;', - [TokenType.KeywordKeyword,TokenType.Identifier,20,TokenType.Identifier,20,20,20,TokenType.Semicolon, TokenType.EndOfFile] + [TokenType.KeywordKeyword, TokenType.Identifier, 20, TokenType.Identifier, 20, 20, 20, TokenType.Semicolon, TokenType.EndOfFile] ) ); inst( _case( 'keyword imsodonewiththistest12casesisenough', - [TokenType.KeywordKeyword,TokenType.Identifier,TokenType.IntNumber,TokenType.Identifier, TokenType.EndOfFile] + [TokenType.KeywordKeyword, TokenType.Identifier, TokenType.IntNumber, TokenType.Identifier, TokenType.EndOfFile] ) ); From 967390f7c4ef31ef9798282554322b54838c2179 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:17:49 +0300 Subject: [PATCH 40/47] convert every string into an expression --- src/types.ts | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/src/types.ts b/src/types.ts index 8edd771..f05a739 100644 --- a/src/types.ts +++ b/src/types.ts @@ -259,7 +259,12 @@ export enum NodeType { /** * {@link SquareExpression}. */ - Square + Square, + + /** + * {@link IdentifierExpression}. + */ + Identifier } /** @@ -341,6 +346,17 @@ export interface StringExpression extends Expression { value: string; } +/** + * An expression that represents an alphabetical identifier. Uses type {@link NodeType.Identifier}. Contains the name of something. + * @author efekos + * @version 1.0.0 + * @since 0.0.2-alpha + */ +export interface IdentifierExpression extends Expression { + type: NodeType.Identifier; + name: string; +} + /** * An expression that represents multiple statements inside braces (`{}`). Uses type {@link NodeType.Brace}. * @author efekos @@ -395,7 +411,7 @@ export interface OperatorStatement extends Statement { * @since 0.0.1-alpha */ export interface KeywordStatement extends Statement { - word: string; + word: IdentifierExpression; type: NodeType.Keyword; } @@ -408,7 +424,7 @@ export interface KeywordStatement extends Statement { */ export interface ImportsStatement extends Statement { type: NodeType.Imports, - formats: string[]; + formats: IdentifierExpression[]; module: string; } @@ -421,7 +437,7 @@ export interface ImportsStatement extends Statement { */ export interface CompileStatement extends Statement { type: NodeType.Compile, - formats: string[], + formats: IdentifierExpression[], body: Expression[]; } @@ -434,7 +450,7 @@ export interface CompileStatement extends Statement { */ export interface RuleStatement extends Statement { type: NodeType.Rule; - rule: string; + rule: StringExpression; value: unknown; } @@ -447,7 +463,7 @@ export interface RuleStatement extends Statement { */ export interface ImportStatement extends Statement { type: NodeType.Import, - path: string; + path: StringExpression; } /** @@ -459,7 +475,7 @@ export interface ImportStatement extends Statement { */ export interface FunctionStatement extends Statement { type: NodeType.Function, - name: string, + name: IdentifierExpression, arguments: string[]; body: Statement[]; } @@ -473,7 +489,7 @@ export interface FunctionStatement extends Statement { */ export interface GlobalStatement extends Statement { body: Statement[]; - name: string; + name: IdentifierExpression; } From 89a6076945063647056ab4ee532eed6654d807fd Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:20:21 +0300 Subject: [PATCH 41/47] update versions --- src/types.ts | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/types.ts b/src/types.ts index f05a739..2823a13 100644 --- a/src/types.ts +++ b/src/types.ts @@ -173,7 +173,7 @@ export interface Token { * Every node type a syntax script declaration file can contain. * @author efekos * @since 0.0.1-alpha - * @version 1.0.0 + * @version 1.0.1 */ export enum NodeType { @@ -407,7 +407,7 @@ export interface OperatorStatement extends Statement { /** * Keyword statement that registers an identifier as a keyword. This keyword can be used in several places. Uses type {@link NodeType.Keyword}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface KeywordStatement extends Statement { @@ -419,20 +419,20 @@ export interface KeywordStatement extends Statement { * Imports statements indicate that a certain module should be imported to the file if the parent statement is used in .sys file. * Uses type {@link NodeType.Imports}. * @author efekos - * @version 1.0.0 + * @version 1.0.2 * @since 0.0.1-alpha */ export interface ImportsStatement extends Statement { type: NodeType.Imports, formats: IdentifierExpression[]; - module: string; + module: StringExpression; } /** * Compile statements determine what should be the result of an operator or a function when compiling to certain languages. * Uses typq {@link NodeType.Compile}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface CompileStatement extends Statement { @@ -445,7 +445,7 @@ export interface CompileStatement extends Statement { * Rule statements define a specific rule about the source language, such as keyword usages or enabling/disabling certain * features of the language. Uses type {@link NodeType.Rule}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface RuleStatement extends Statement { @@ -458,7 +458,7 @@ export interface RuleStatement extends Statement { * Import statements are used to import a .syx file from a .sys file. They can be used to import other .syx files from a * .syx file as well. Uses type {@link NodeType.Import} * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface ImportStatement extends Statement { @@ -470,13 +470,13 @@ export interface ImportStatement extends Statement { * Function statements are used to define possible function calls. How the function is called depends on the place this statement is * used. Uses type {@link NodeType.Function}. * @author efekos - * @version 1.0.0 + * @version 1.0.2 * @since 0.0.1-alpha */ export interface FunctionStatement extends Statement { type: NodeType.Function, name: IdentifierExpression, - arguments: string[]; + arguments: PrimitiveTypeExpression[]; body: Statement[]; } @@ -484,7 +484,7 @@ export interface FunctionStatement extends Statement { * Global statements are used to define values that are global. They can be global classes, interfaces, or just global methods depending on * the language. But the only thing that matters here is that they are global, and can be used from anywhere. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.2-alpha */ export interface GlobalStatement extends Statement { @@ -496,12 +496,12 @@ export interface GlobalStatement extends Statement { /** * Represents any interface that is a node. * @author efekos - * @version 1.0.0 + * @version 1.0.3 * @since 0.0.1-alpha */ export type Node = ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | GlobalStatement | - StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression; + StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression | IdentifierExpression; /** * Represents a syxconfig.json file. This file contains a few properties for the compiler. @@ -584,6 +584,7 @@ interface NodeTypes { [NodeType.Variable]: VariableExpression; [NodeType.WhitespaceIdentifier]: WhitespaceIdentifierExpression; [NodeType.Global]: GlobalStatement; + [NodeType.Identifier]: IdentifierExpression; } /** From 4be46c0f6696401f9c371c0b27bd02957e232e4a Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:32:47 +0300 Subject: [PATCH 42/47] change ast and compiler to use expressions instead of string values --- src/ast.ts | 41 +++++++++++++++++++++-------------------- src/compiler.ts | 24 ++++++++++++------------ src/types.ts | 1 - 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 2b002d0..959561d 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -10,7 +10,7 @@ const caf = { .filter(r => statementIsA(r, NodeType.Keyword)) .map(r => r as KeywordStatement) .map(r => r.word) - .sort(a => levenshtein(keyword, a)); + .sort(a => levenshtein(keyword, a.value)); return existingKeywords.map(word => { return { @@ -20,7 +20,7 @@ const caf = { changes: { [filePath]: [{ range: subRange(range), - newText: word + newText: word.value }] } } @@ -44,7 +44,7 @@ export namespace syxparser { if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: ex.value, range: combineTwo(token, ex.range), modifiers: [] }, put); + return node({ type: NodeType.Import, path: ex, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -65,15 +65,15 @@ export namespace syxparser { if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, boolEx.range), modifiers: [] }, put); + return node({ type: NodeType.Rule, rule: ruleExpr, value: boolEx.value, range: combineTwo(token, boolEx.range), modifiers: [] }, put); } else if (rule.type === 'keyword') { const keyEx = parseExpression(false, false, true); if (!statementIsA(keyEx, NodeType.String)) throw new CompilerError(keyEx.range, 'Excepted keyword.', filePath); - if (!program.body.some(s => statementIsA(s, NodeType.Keyword) && s.word === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); + if (!program.body.some(s => statementIsA(s, NodeType.Keyword) && s.word.value === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found ${at().value}.`, filePath); tokens.shift(); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, keyEx.range), modifiers: [] }, put); + return node({ type: NodeType.Rule, rule: ruleExpr, value: keyEx.value, range: combineTwo(token, keyEx.range), modifiers: [] }, put); } } @@ -83,12 +83,12 @@ export namespace syxparser { */ export function parseKeywordStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false, true); - if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected identifier after keyword statement.', filePath); + if (!statementIsA(ex, NodeType.Identifier)) throw new CompilerError(ex.range, 'Expected identifier after keyword statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after statement, found '${at().value}'.`, filePath); tokens.shift(); // skip semicolon - return node({ type: NodeType.Keyword, word: ex.value, range: combineTwo(token, ex.range), modifiers: [] }, put); + return node({ type: NodeType.Keyword, word: ex, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -107,16 +107,16 @@ export namespace syxparser { * @returns Parsed node. */ export function parseFunctionStatement(token: Token, put: boolean): Node { - const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: '', body: [], range: defaultRange, modifiers: [] }; + const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: {type:NodeType.Identifier,modifiers:[],value:'',range:defaultRange}, body: [], range: defaultRange, modifiers: [] }; if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); - statement.name = at().value; + statement.name = {type:NodeType.Identifier,modifiers:[],range:at().range,value:at().value}; tokens.shift(); while (at().type !== TokenType.OpenBrace) { const expr = parseExpression(false, false) as Expression; if (!statementIsA(expr, NodeType.PrimitiveType)) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); - statement.arguments.push(expr.value); + statement.arguments.push(expr); } const braceExpr = parseExpression(false); @@ -134,7 +134,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseImportsStatement(token: Token, put: boolean) { - const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: '', range: defaultRange, modifiers: [] }; + const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: {type:NodeType.String,modifiers:[],range:defaultRange,value:''}, range: defaultRange, modifiers: [] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Imports statement require parens.', filePath); @@ -145,7 +145,7 @@ export namespace syxparser { if (t.type === TokenType.Comma && at().type !== TokenType.Identifier) throw new CompilerError(t.range, 'Expected identifier after comma.', filePath); else if (t.type === TokenType.Comma && statement.formats.length === 0) throw new CompilerError(t.range, 'Can\'t start with comma.', filePath); else if (t.type === TokenType.Comma) { } - else if (t.type === TokenType.Identifier) statement.formats.push(t.value); + else if (t.type === TokenType.Identifier) statement.formats.push({type:NodeType.Identifier,modifiers:[],range:t.range,value:t.value}); else throw new CompilerError(t.range, `Expected comma or identifier, found '${t.value}'.`, filePath); } tokens.shift(); // skip CloseParen @@ -157,7 +157,7 @@ export namespace syxparser { if (!statementIsA(moduleExpr, NodeType.String)) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); - statement.module = moduleExpr.value; + statement.module = moduleExpr; statement.range = combineTwo(token, moduleExpr.range); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after imports statement, found '${at().value}'.`, filePath); @@ -182,7 +182,7 @@ export namespace syxparser { if (t.type === TokenType.Comma && at().type !== TokenType.Identifier) throw new CompilerError(t.range, 'Expected identifier after comma.', filePath); else if (t.type === TokenType.Comma && statement.formats.length === 0) throw new CompilerError(t.range, 'Can\'t start with comma.', filePath); else if (t.type === TokenType.Comma) { } - else if (t.type === TokenType.Identifier) statement.formats.push(t.value); + else if (t.type === TokenType.Identifier) statement.formats.push({type:NodeType.Identifier,modifiers:[],range:t.range,value:t.value}); else throw new CompilerError(t.range, `Expected comma or identifier, found '${t.value}'.`, filePath); } tokens.shift(); // skip CloseParen @@ -227,10 +227,11 @@ export namespace syxparser { * @returns Parsed node. */ export function parseGlobalStatement(token: Token, put: boolean) { - const stmt: GlobalStatement = { type: NodeType.Global, range: token.range, body: [], modifiers: [], name: '' }; + const stmt: GlobalStatement = { type: NodeType.Global, range: token.range, body: [], modifiers: [], name: {type:NodeType.Identifier,modifiers:[],range:defaultRange,value:''} }; if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); - stmt.name = tokens.shift().value; + const {range,value} = tokens.shift(); + stmt.name = {modifiers:[],type:NodeType.Identifier,range,value}; const braceExpr = parseExpression(false, false, false); if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after global name.', filePath); @@ -493,7 +494,7 @@ export namespace syxparser { * @param {boolean} expectIdentifier Whether identifiers should be allowed. Unknown identifiers will stop the function with this value set to `false`, returning the identifier as a {@link StringExpression} otherwise. * @returns The parsed node. * @author efekos - * @version 1.0.9 + * @version 1.1.0 * @since 0.0.2-alpha */ export function parseExpression(put: boolean = true, statements: boolean = true, expectIdentifier: boolean = false): Node { @@ -514,7 +515,7 @@ export namespace syxparser { return parseStatement(); } else if (expectIdentifier) { const { value, range } = tokens.shift(); - return node({ type: NodeType.String, value, range, modifiers: [] }, put); + return node({ type: NodeType.Identifier, value, range, modifiers: [] }, put); } } @@ -546,7 +547,7 @@ export namespace sysparser { if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range), modifiers: [] }, put); + return node({ type: NodeType.Import, path: ex, range: combineTwo(token, ex.range), modifiers: [] }, put); } //# diff --git a/src/compiler.ts b/src/compiler.ts index b7b3d3d..abf0687 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -65,7 +65,7 @@ export class SyntaxScriptCompiler { * Compiles one .syx file from the path given. * @param {string} file Path to a file to compile. * @author efekos - * @version 1.0.5 + * @version 1.0.6 * @since 0.0.2-alpha */ public compileSyx(file: string) { @@ -90,9 +90,9 @@ export class SyntaxScriptCompiler { const compileStmt = stmt as CompileStatement; compileStmt.formats.forEach(frmt => { - if (operatorStmtExport.outputGenerators[frmt] !== undefined) throw new CompilerError(compileStmt.range, `Duplicate file format at compile statement \'${frmt}\'`); + if (operatorStmtExport.outputGenerators[frmt.value] !== undefined) throw new CompilerError(compileStmt.range, `Duplicate file format at compile statement \'${frmt}\'`); - operatorStmtExport.outputGenerators[frmt] = (src) => { + operatorStmtExport.outputGenerators[frmt.value] = (src) => { let out = ''; compileStmt.body.forEach(e => { @@ -115,8 +115,8 @@ export class SyntaxScriptCompiler { const importStmt = stmt as ImportsStatement; importStmt.formats.forEach(frmt => { - if (operatorStmtExport.imports[frmt] !== undefined) throw new CompilerError(importStmt.range, `Duplicate file format at imports statement \'${frmt}\'`); - operatorStmtExport.imports[frmt] = importStmt.module; + if (operatorStmtExport.imports[frmt.value] !== undefined) throw new CompilerError(importStmt.range, `Duplicate file format at imports statement \'${frmt}\'`); + operatorStmtExport.imports[frmt.value] = importStmt.module.value; }); } else throw new CompilerError(stmt.range, `Unexpected \'${stmt.type}\' statement insdie operator statement.`); @@ -124,20 +124,20 @@ export class SyntaxScriptCompiler { out.push(operatorStmtExport); } else if (statementIsA(statement, NodeType.Function)) { - const statementExport: ExportedFunction = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s]), name: statement.name, formatNames: {}, imports: {} }; + const statementExport: ExportedFunction = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s.value]), name: statement.name.value, formatNames: {}, imports: {} }; statement.body.forEach(stmt => { if (statementIsA(stmt, NodeType.Compile)) { if (stmt.body[0].type !== NodeType.String) throw new CompilerError(stmt.range, 'Expected a string after compile statement parens'); stmt.formats.forEach(each => { - if (statementExport.formatNames[each] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple compile statements for target language '${each}'`); - statementExport.formatNames[each] = stmt.body[0].value; + if (statementExport.formatNames[each.value] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple compile statements for target language '${each}'`); + statementExport.formatNames[each.value] = stmt.body[0].value; }); } else if (statementIsA(stmt, NodeType.Imports)) { stmt.formats.forEach(each => { - if (statementExport.imports[each] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple import statements for target language '${each}'`); - statementExport.imports[each] = stmt.module; + if (statementExport.imports[each.value] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple import statements for target language '${each}'`); + statementExport.imports[each.value] = stmt.module.value; }); } @@ -146,7 +146,7 @@ export class SyntaxScriptCompiler { out.push(statementExport); } else if (statementIsA(statement, NodeType.Keyword)) { - out.push({ type: ExportType.Keyword, word: statement.word }); + out.push({ type: ExportType.Keyword, word: statement.word.value }); } else if (statementIsA(statement, NodeType.Global)) { //TODO } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`, file); @@ -189,7 +189,7 @@ export class SyntaxScriptCompiler { if (stmt.type === NodeType.Import) { const importStmt = stmt as ImportStatement; - const pathToImport = join(dirname(file), importStmt.path.endsWith('.syx') ? importStmt.path : importStmt.path + '.syx'); + const pathToImport = join(dirname(file), importStmt.path.value.endsWith('.syx') ? importStmt.path.value : importStmt.path.value + '.syx'); if (!existsSync(pathToImport)) throw new CompilerError(importStmt.range, `File \'${pathToImport}\' imported from \'${file}\' does not exist.`); this.exportData[pathToImport].forEach(exported => { if (exported.type === ExportType.Operator) diff --git a/src/types.ts b/src/types.ts index 2823a13..3298165 100644 --- a/src/types.ts +++ b/src/types.ts @@ -354,7 +354,6 @@ export interface StringExpression extends Expression { */ export interface IdentifierExpression extends Expression { type: NodeType.Identifier; - name: string; } /** From d9099fb9cc45dcf07102617afd2f1681d1b9de28 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:40:32 +0300 Subject: [PATCH 43/47] fix errors in tests --- src/test/compiler.test.ts | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 5ceb332..427b56d 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -7,6 +7,10 @@ import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js'; import { expect } from 'chai'; import { syxparser } from '../ast.js'; +function r(sc:number,ec:number):Range { + return {start:{line:1,character:sc},end:{line:1,character:ec}}; +} + describe('Compiler module', () => { function rangeExpectations(r: Range) { @@ -200,7 +204,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('keyword ruleish;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 16 }, start: { line: 1, character: 1 } }, word: 'ruleish' }; + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 16 }, start: { line: 1, character: 1 } }, word: {value:'ruleish',type:NodeType.Identifier,modifiers:[],range:{start:{line:1,character:9},end:{line:1,character:16}}} }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -211,7 +215,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: RuleStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 43 } }, modifiers: [], rule: 'function-value-return-enabled', value: 'true', type: NodeType.Rule }; + const stmt: RuleStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 43 } }, modifiers: [], rule: {range:r(6,38),type:NodeType.String,value:'function-value-return-enabled',modifiers:[]}, value: 'true', type: NodeType.Rule }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -222,7 +226,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('compile(ts,js) \'test\';'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: CompileStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: ['ts', 'js'], type: NodeType.Compile, modifiers: [], body: [{ type: NodeType.String, modifiers: [], range: { start: { line: 1, character: 16 }, end: { line: 1, character: 22 } }, value: 'test' }] }; + const stmt: CompileStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: [{modifiers:[],type:NodeType.Identifier,range:r(9,11),value:'ts'},{modifiers:[],type:NodeType.Identifier,range:r(12,14),value:'js'}], type: NodeType.Compile, modifiers: [], body: [{ type: NodeType.String, modifiers: [], range: { start: { line: 1, character: 16 }, end: { line: 1, character: 22 } }, value: 'test' }] }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -233,7 +237,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('imports(ts,js) \'math\';'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: ImportsStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: ['ts', 'js'], type: NodeType.Imports, modifiers: [], module: 'math' }; + const stmt: ImportsStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: [{modifiers:[],type:NodeType.Identifier,range:r(9,11),value:'ts'},{modifiers:[],type:NodeType.Identifier,range:r(12,14),value:'js'}], type: NodeType.Imports, modifiers: [], module: {range:r(16,22),modifiers:[],type:NodeType.String,value:'math'} }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -244,7 +248,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('global randomizer {}'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: GlobalStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 21 } }, name: 'randomizer', type: NodeType.Global, modifiers: [], body: [] }; + const stmt: GlobalStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 21 } }, name: {type:NodeType.Identifier,modifiers:[],range:r(8,18),value:'randomizer'}, type: NodeType.Global, modifiers: [], body: [] }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -255,7 +259,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('function randomizer {}'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: FunctionStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 29 } }, name: 'randomizer', type: NodeType.Function, modifiers: [], body: [], arguments: ['int'] }; + const stmt: FunctionStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 29 } }, name: {type:NodeType.Identifier,modifiers:[],range:r(10,20),value:'randomizer'}, type: NodeType.Function, modifiers: [], body: [], arguments: [{modifiers:[],range:r(21,26),type:NodeType.PrimitiveType,value:'int'}] }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -266,7 +270,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('import \'./math\';'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: ImportStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 16 } }, type: NodeType.Import, modifiers: [], path: './math' }; + const stmt: ImportStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 16 } }, type: NodeType.Import, modifiers: [], path: {range:r(8,16),value:'./math',modifiers:[],type:NodeType.String} }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); @@ -277,7 +281,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('export keyword ruleish;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{ range: { end: { line: 1, character: 7 }, start: { line: 1, character: 1 } }, type: TokenType.ExportKeyword, value: 'export' }], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: 'ruleish' }; + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{ range: { end: { line: 1, character: 7 }, start: { line: 1, character: 1 } }, type: TokenType.ExportKeyword, value: 'export' }], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: {range:r(16,23),modifiers:[],type:NodeType.Identifier,value:'ruleish'} }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); From d03396d15f58d9c270ba450cf6e70f05195e57d2 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:41:07 +0300 Subject: [PATCH 44/47] fix errors in diagnostics --- src/diagnostic.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 4895722..97a9218 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -61,10 +61,10 @@ function ruleConflictCheck(ast: ProgramStatement, filePath: string): Diagnostic[ ast.body.forEach(stmt => { if (statementIsA(stmt, NodeType.Rule)) { - const dictRule = dictionary.Rules.find(r => r.name === stmt.rule); + const dictRule = dictionary.Rules.find(r => r.name === stmt.rule.value); ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { - if (dictRule.conflicts.includes(otherRules.rule)) items.push({ + if (dictRule.conflicts.includes(otherRules.rule.value)) items.push({ message: `Rule '${otherRules.rule}' conflicts with '${stmt.rule}', Both of them should not be defined.`, range: subRange(otherRules.range), severity: DiagnosticSeverity.Warning, @@ -148,7 +148,7 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos ast.body.filter(r => statementIsA(r, NodeType.Import)).map(r => r as ImportStatement).forEach(stmt => { const filePathButPath = fileURLToPath(filePath); - const fullPath = join(filePathButPath, '../', stmt.path); + const fullPath = join(filePathButPath, '../', stmt.path.value); if (!existsSync(fullPath)) items.push({ message: `Can't find file '${fullPath}' imported from '${filePathButPath}'`, severity: DiagnosticSeverity.Error, From 0a17de8838c0d4e8ac7cd93b255a466628ff838f Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:44:08 +0300 Subject: [PATCH 45/47] pass 'Compiler module should provide correct parsing for rule statements' --- src/ast.ts | 2 +- src/test/compiler.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ast.ts b/src/ast.ts index 959561d..5a1221d 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -61,7 +61,7 @@ export namespace syxparser { if (rule.type === 'boolean') { const boolEx = parseExpression(false, false, true) as Expression; - if (!(statementIsA(boolEx, NodeType.String) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); + if (!(statementIsA(boolEx, NodeType.Identifier) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); tokens.shift(); diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts index 427b56d..ac0c79b 100644 --- a/src/test/compiler.test.ts +++ b/src/test/compiler.test.ts @@ -215,7 +215,7 @@ describe('Compiler module', () => { const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;'); const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); - const stmt: RuleStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 43 } }, modifiers: [], rule: {range:r(6,38),type:NodeType.String,value:'function-value-return-enabled',modifiers:[]}, value: 'true', type: NodeType.Rule }; + const stmt: RuleStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 43 } }, modifiers: [], rule: {range:r(6,37),type:NodeType.String,value:'function-value-return-enabled',modifiers:[]}, value: 'true', type: NodeType.Rule }; astTypeExpectations(ast); expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); From 92faf269d2699a1b87730e2fa44dd7e5932d6222 Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:49:07 +0300 Subject: [PATCH 46/47] provide better ranges and fix messages --- src/diagnostic.ts | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 97a9218..329fe2c 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,5 +1,5 @@ import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; -import { FunctionStatement, GlobalStatement, ImportStatement, KeywordStatement, NodeType, OperatorStatement, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; +import { FunctionStatement, GlobalStatement, IdentifierExpression, ImportStatement, KeywordStatement, NodeType, OperatorStatement, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; import { existsSync, readFileSync, statSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; @@ -65,12 +65,12 @@ function ruleConflictCheck(ast: ProgramStatement, filePath: string): Diagnostic[ ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { if (dictRule.conflicts.includes(otherRules.rule.value)) items.push({ - message: `Rule '${otherRules.rule}' conflicts with '${stmt.rule}', Both of them should not be defined.`, - range: subRange(otherRules.range), + message: `Rule '${otherRules.rule.value}' conflicts with '${stmt.rule.value}', Both of them should not be defined.`, + range: subRange(otherRules.rule.range), severity: DiagnosticSeverity.Warning, data: [ { - title: `Remove ${stmt.rule} definition`, + title: `Remove ${stmt.rule.value} definition`, kind: CodeActionKind.QuickFix, edit: { changes: { @@ -84,7 +84,7 @@ function ruleConflictCheck(ast: ProgramStatement, filePath: string): Diagnostic[ } }, { - title: `Remove ${otherRules.rule} definition`, + title: `Remove ${otherRules.rule.value} definition`, kind: CodeActionKind.QuickFix, edit: { changes: { @@ -114,8 +114,8 @@ function sameRuleCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { if (statementIsA(stmt, NodeType.Rule)) { ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { if (otherRules.rule === stmt.rule) items.push({ - message: `Rule '${stmt.rule}' is already defined.`, - range: subRange(stmt.range), + message: `Rule '${stmt.rule.value}' is already defined.`, + range: subRange(stmt.rule.range), severity: DiagnosticSeverity.Error, data: [ { @@ -152,7 +152,7 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos if (!existsSync(fullPath)) items.push({ message: `Can't find file '${fullPath}' imported from '${filePathButPath}'`, severity: DiagnosticSeverity.Error, - range: subRange(stmt.range), + range: subRange(stmt.path.range), data: [ { title: 'Remove this import statement', @@ -174,7 +174,7 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos if (!status.isFile()) items.push({ message: `'${fullPath}' imported from '${filePathButPath}' doesn't seem to be a file.`, severity: DiagnosticSeverity.Error, - range: subRange(stmt.range), + range: subRange(stmt.path.range), data: [ { title: 'Remove this import statement', @@ -193,7 +193,7 @@ function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnos if (!fullPath.endsWith('.syx')) items.push({ message: `'${fullPath}' imported from '${filePathButPath}' cannot be imported.`, severity: DiagnosticSeverity.Error, - range: subRange(stmt.range), + range: subRange(stmt.path.range), data: [ { title: 'Remove this import statement', @@ -262,7 +262,7 @@ function exportableCheck(statements: Statement[], filePath: string): Diagnostic[ if (stmt.modifiers.some(t => t.type === TokenType.ExportKeyword) && !dictionary.ExportableNodeTypes.includes(stmt.type)) items.push({ message: 'This statement cannot be exported.', - range: subRange(stmt.range), + range: subRange(stmt.modifiers.find(r=>r.type===TokenType.ExportKeyword).range), severity: DiagnosticSeverity.Error, data: [ { @@ -302,14 +302,14 @@ function sameNameCheck(statements: Statement[], filePath: string): Diagnostic[] if (statementIsA(r, NodeType.Keyword)) return r as KeywordStatement; }).forEach(stmt => { - const n = stmt[statementIsA(stmt, NodeType.Keyword) ? 'word' : 'name']; - - if (encounteredNames.includes(n)) items.push({ - message: `Name '${n}' is already seen before.`, - range: subRange(stmt.range), + const n:IdentifierExpression = stmt[statementIsA(stmt, NodeType.Keyword) ? 'word' : 'name']; + + if (encounteredNames.includes(n.value)) items.push({ + message: `Name '${n.value}' is already seen before.`, + range: subRange(n.range), severity: DiagnosticSeverity.Error }); - else encounteredNames.push(n); + else encounteredNames.push(n.value); if (statementIsA(stmt, NodeType.Global)) c(stmt.body); }); From f4e05b4fb89eea0129694ca1d762b88d0e52cc1e Mon Sep 17 00:00:00 2001 From: efekos Date: Mon, 22 Apr 2024 23:50:00 +0300 Subject: [PATCH 47/47] update readme for 0.0.2-alpha --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 57be9ea..b25a7b7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# @syntaxs/compiler `v0.0.1-alpha` +# @syntaxs/compiler `v0.0.2-alpha` ![Stars](https://badgen.net/github/stars/syntax-script/compiler) ![Releases](https://badgen.net/github/release/syntax-script/compiler) ![Version](https://badgen.net/npm/v/@syntaxs/compiler) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy