diff --git a/.npmignore b/.npmignore index b4e4e00..2cb1551 100644 --- a/.npmignore +++ b/.npmignore @@ -1,6 +1,7 @@ # Folders /src /package +/dist/test # Dev-only or unrelated files .gitignore diff --git a/README.md b/README.md index 57be9ea..b25a7b7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# @syntaxs/compiler `v0.0.1-alpha` +# @syntaxs/compiler `v0.0.2-alpha` ![Stars](https://badgen.net/github/stars/syntax-script/compiler) ![Releases](https://badgen.net/github/release/syntax-script/compiler) ![Version](https://badgen.net/npm/v/@syntaxs/compiler) diff --git a/package-lock.json b/package-lock.json index 76d750e..24e6b52 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,21 +1,24 @@ { "name": "@syntaxs/compiler", - "version": "0.0.1-alpha", + "version": "0.0.2-alpha", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@syntaxs/compiler", - "version": "0.0.1-alpha", + "version": "0.0.2-alpha", "license": "MIT", "dependencies": { "js-levenshtein": "^1.1.6" }, "devDependencies": { + "@efekos/es-test": "^1.0.5", + "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", - "@types/node": "^20.12.5", - "@typescript-eslint/eslint-plugin": "^7.4.0", - "@typescript-eslint/parser": "^7.4.0", + "@types/node": "^20.12.7", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", + "chai": "^5.1.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", "lsp-types": "^3.17.0-f3" @@ -30,6 +33,31 @@ "node": ">=0.10.0" } }, + "node_modules/@efekos/es-test": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@efekos/es-test/-/es-test-1.0.5.tgz", + "integrity": "sha512-7aT2Q/9Cs7rvPUxOzFvrGcRtr1Wrnf5eMpwzEf2u3f/zUPUV/sdoCf7OIV1yHfck1/tvfq5ix2ytJI18wXz4eg==", + "dev": true, + "dependencies": { + "chalk": "^5.3.0", + "log-update": "^6.0.0" + }, + "bin": { + "estest": "bin/index.js" + } + }, + "node_modules/@efekos/es-test/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@es-joy/jsdoccomment": { "version": "0.42.0", "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.42.0.tgz", @@ -212,6 +240,12 @@ "node": ">= 8" } }, + "node_modules/@types/chai": { + "version": "4.3.14", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.14.tgz", + "integrity": "sha512-Wj71sXE4Q4AkGdG9Tvq1u/fquNz9EdG4LIJMwVVII7ashjD/8cf8fyIfJAjRr6YcsXnSE8cOGQPq1gqeR8z+3w==", + "dev": true + }, "node_modules/@types/js-levenshtein": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@types/js-levenshtein/-/js-levenshtein-1.1.3.tgz", @@ -240,16 +274,16 @@ "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.6.0.tgz", - "integrity": "sha512-gKmTNwZnblUdnTIJu3e9kmeRRzV2j1a/LUO27KNNAnIC5zjy1aSvXSRp4rVNlmAoHlQ7HzX42NbKpcSr4jF80A==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.0.tgz", + "integrity": "sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "7.6.0", - "@typescript-eslint/type-utils": "7.6.0", - "@typescript-eslint/utils": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/type-utils": "7.7.0", + "@typescript-eslint/utils": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.3.1", @@ -275,15 +309,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.6.0.tgz", - "integrity": "sha512-usPMPHcwX3ZoPWnBnhhorc14NJw9J4HpSXQX4urF2TPKG0au0XhJoZyX62fmvdHONUkmyUe74Hzm1//XA+BoYg==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.0.tgz", + "integrity": "sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.6.0", - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/typescript-estree": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4" }, "engines": { @@ -303,13 +337,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.6.0.tgz", - "integrity": "sha512-ngttyfExA5PsHSx0rdFgnADMYQi+Zkeiv4/ZxGYUWd0nLs63Ha0ksmp8VMxAIC0wtCFxMos7Lt3PszJssG/E6w==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz", + "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0" + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -320,13 +354,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.6.0.tgz", - "integrity": "sha512-NxAfqAPNLG6LTmy7uZgpK8KcuiS2NZD/HlThPXQRGwz6u7MDBWRVliEEl1Gj6U7++kVJTpehkhZzCJLMK66Scw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.0.tgz", + "integrity": "sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.6.0", - "@typescript-eslint/utils": "7.6.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/utils": "7.7.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -347,9 +381,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.6.0.tgz", - "integrity": "sha512-h02rYQn8J+MureCvHVVzhl69/GAfQGPQZmOMjG1KfCl7o3HtMSlPaPUAPu6lLctXI5ySRGIYk94clD/AUMCUgQ==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz", + "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -360,13 +394,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.6.0.tgz", - "integrity": "sha512-+7Y/GP9VuYibecrCQWSKgl3GvUM5cILRttpWtnAu8GNL9j11e4tbuGZmZjJ8ejnKYyBRb2ddGQ3rEFCq3QjMJw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz", + "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/visitor-keys": "7.6.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -388,17 +422,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.6.0.tgz", - "integrity": "sha512-x54gaSsRRI+Nwz59TXpCsr6harB98qjXYzsRxGqvA5Ue3kQH+FxS7FYU81g/omn22ML2pZJkisy6Q+ElK8pBCA==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.0.tgz", + "integrity": "sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.15", "@types/semver": "^7.5.8", - "@typescript-eslint/scope-manager": "7.6.0", - "@typescript-eslint/types": "7.6.0", - "@typescript-eslint/typescript-estree": "7.6.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.0", "semver": "^7.6.0" }, "engines": { @@ -413,12 +447,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.6.0.tgz", - "integrity": "sha512-4eLB7t+LlNUmXzfOu1VAIAdkjbu5xNSerURS9X/S5TUKWFRpXRQZbmtPqgKmYx8bj3J0irtQXSiWAOY82v+cgw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz", + "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.6.0", + "@typescript-eslint/types": "7.7.0", "eslint-visitor-keys": "^3.4.3" }, "engines": { @@ -472,6 +506,18 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ansi-escapes": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-6.2.1.tgz", + "integrity": "sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -520,6 +566,15 @@ "node": ">=8" } }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -568,6 +623,22 @@ "node": ">=6" } }, + "node_modules/chai": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.0.tgz", + "integrity": "sha512-kDZ7MZyM6Q1DhR9jy7dalKohXQ2yrlXkk59CR52aRKxJrobmlBNqnFQxX9xOX8w+4mz8SYlKJa/7D7ddltFXCw==", + "dev": true, + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.0.0", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -584,6 +655,30 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/check-error": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.0.0.tgz", + "integrity": "sha512-tjLAOBHKVxtPoHe/SA7kNOMvhCRdCJ3vETdeY0RuAc9popf+hyaSV6ZEg9hr4cpWF7jmo/JSWEnLDrnijS9Tog==", + "dev": true, + "engines": { + "node": ">= 16" + } + }, + "node_modules/cli-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", + "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "dev": true, + "dependencies": { + "restore-cursor": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -648,6 +743,15 @@ } } }, + "node_modules/deep-eql": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.1.tgz", + "integrity": "sha512-nwQCf6ne2gez3o1MxWifqkciwt0zhl0LO1/UwVu4uMBuPmflWM4oQ70XMqHqnBJA+nhzncaqL9HVL6KkHJ28lw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -678,6 +782,12 @@ "node": ">=6.0.0" } }, + "node_modules/emoji-regex": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz", + "integrity": "sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==", + "dev": true + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -998,6 +1108,27 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, + "node_modules/get-east-asian-width": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.2.0.tgz", + "integrity": "sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -1176,6 +1307,21 @@ "node": ">=0.10.0" } }, + "node_modules/is-fullwidth-code-point": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", + "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", + "dev": true, + "dependencies": { + "get-east-asian-width": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -1302,6 +1448,61 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "node_modules/log-update": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.0.0.tgz", + "integrity": "sha512-niTvB4gqvtof056rRIrTZvjNYE4rCUzO6X/X+kYjd7WFxXeJ0NwEFnRxX6ehkvv3jTwrXnNdtAak5XYZuIyPFw==", + "dev": true, + "dependencies": { + "ansi-escapes": "^6.2.0", + "cli-cursor": "^4.0.0", + "slice-ansi": "^7.0.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/log-update/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/loupe": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.0.tgz", + "integrity": "sha512-qKl+FrLXUhFuHUoDJG7f8P8gEMHq9NFS0c6ghXG1J0rldmZFQZoNVv/vyirE9qwCIhWZDsvEFd1sbFu3GvRQFg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -1342,6 +1543,15 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/minimatch": { "version": "9.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", @@ -1378,6 +1588,21 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/optionator": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", @@ -1473,6 +1698,15 @@ "node": ">=8" } }, + "node_modules/pathval": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "dev": true, + "engines": { + "node": ">= 14.16" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -1532,6 +1766,22 @@ "node": ">=4" } }, + "node_modules/restore-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", + "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -1616,6 +1866,12 @@ "node": ">=8" } }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -1625,6 +1881,34 @@ "node": ">=8" } }, + "node_modules/slice-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", + "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/spdx-exceptions": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", @@ -1647,6 +1931,50 @@ "integrity": "sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg==", "dev": true }, + "node_modules/string-width": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.1.0.tgz", + "integrity": "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw==", + "dev": true, + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -1781,6 +2109,62 @@ "node": ">= 8" } }, + "node_modules/wrap-ansi": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/package.json b/package.json index bb98339..3f37b19 100644 --- a/package.json +++ b/package.json @@ -1,15 +1,19 @@ { "name": "@syntaxs/compiler", - "version": "0.0.1-alpha", + "version": "0.0.2-alpha", "description": "Compiler used to compile Syntax Script projects.", "main": "dist/index.js", "types": "dist/index.d.ts", + "type": "module", "scripts": { "lint": "eslint ./src/**/*.ts --fix", "prebuild": "npm run lint && node distDeletor.mjs", "build": "tsc", "postbuild": "cls && echo Builded", - "prepack": "npm run build" + "prepack": "npm run build", + "test": "estest", + "pretest": "tsc", + "postpack": "cd ..&&cd ..&&cd .\\LanguageServer\\syntax-script\\server\\&&npm update @syntaxs/compiler" }, "repository": { "type": "git", @@ -35,10 +39,13 @@ }, "homepage": "https://github.com/syntaxs/compiler#readme", "devDependencies": { + "@efekos/es-test": "^1.0.5", + "@types/chai": "^4.3.14", "@types/js-levenshtein": "^1.1.3", - "@types/node": "^20.12.5", - "@typescript-eslint/eslint-plugin": "^7.4.0", - "@typescript-eslint/parser": "^7.4.0", + "@types/node": "^20.12.7", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", + "chai": "^5.1.0", "eslint": "^8.57.0", "eslint-plugin-jsdoc": "^48.2.3", "lsp-types": "^3.17.0-f3" @@ -46,4 +53,4 @@ "dependencies": { "js-levenshtein": "^1.1.6" } -} +} \ No newline at end of file diff --git a/src/ast.ts b/src/ast.ts index 23ced50..5a1221d 100644 --- a/src/ast.ts +++ b/src/ast.ts @@ -1,4 +1,4 @@ -import { BraceExpression, CompileStatement, CompilerError, ExportStatement, Expression, FunctionStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression } from './types.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, ProgramStatement, SquareExpression, StringExpression, Token, TokenType, VariableExpression, statementIsA } from './types.js'; import { CodeAction, CodeActionKind, Range } from 'lsp-types'; import { dictionary } from './dictionary/dictionary.js'; import levenshtein from 'js-levenshtein'; @@ -7,9 +7,10 @@ import { subRange } from './diagnostic.js'; const caf = { mk: (keyword: string, program: ProgramStatement, range: Range, filePath: string): CodeAction[] => { const existingKeywords = program.body - .filter(r => r.type === NodeType.Keyword || (r.type === NodeType.Export && (r as ExportStatement).body.type === NodeType.Keyword)) - .map(stmt => stmt.type === NodeType.Export ? ((stmt as ExportStatement).body as KeywordStatement).word : (stmt as KeywordStatement).word) - .filter(a => levenshtein(a, keyword)); + .filter(r => statementIsA(r, NodeType.Keyword)) + .map(r => r as KeywordStatement) + .map(r => r.word) + .sort(a => levenshtein(keyword, a.value)); return existingKeywords.map(word => { return { @@ -19,7 +20,7 @@ const caf = { changes: { [filePath]: [{ range: subRange(range), - newText: word + newText: word.value }] } } @@ -40,10 +41,10 @@ export namespace syxparser { */ export function parseImportStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false); - if (ex.type !== NodeType.String) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); + if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range) }, put); + return node({ type: NodeType.Import, path: ex, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -52,31 +53,27 @@ export namespace syxparser { */ export function parseRuleStatement(token: Token, put: boolean): Node { const ruleExpr = parseExpression(false, false) as Expression; - if (ruleExpr.type !== NodeType.String) { throw new CompilerError(ruleExpr.range, `Expected rule name as string after 'rule', found ${ruleExpr.value}.`, filePath); } + if (!statementIsA(ruleExpr, NodeType.String)) throw new CompilerError(ruleExpr.range, `Expected rule name as string after 'rule', found ${ruleExpr.value}.`, filePath); if (at().value !== ':') throw new CompilerError(at().range, `Expected \':\' after rule name, found ${at().value}.`, filePath); tokens.shift(); - if (!dictionary.Rules.find(r => r.name === ruleExpr.value)) throw new CompilerError(ruleExpr.range, `Unknown rule '${ruleExpr.value}'.`, filePath); + if (!dictionary.Rules.some(r => r.name === ruleExpr.value)) throw new CompilerError(ruleExpr.range, `Unknown rule '${ruleExpr.value}'.`, filePath); const rule = dictionary.Rules.find(r => r.name === ruleExpr.value); if (rule.type === 'boolean') { const boolEx = parseExpression(false, false, true) as Expression; - if (!(boolEx.type === NodeType.String && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) { throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); } - + if (!(statementIsA(boolEx, NodeType.Identifier) && dictionary.RuleTypeRegexes.boolean.test(boolEx.value))) throw new CompilerError(boolEx.range, `Rule '${rule.name}' requires a boolean value, found '${boolEx.value}'.`, filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found '${at().value}'.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: boolEx.value, range: combineTwo(token, tokens.shift()) }, put); + tokens.shift(); + return node({ type: NodeType.Rule, rule: ruleExpr, value: boolEx.value, range: combineTwo(token, boolEx.range), modifiers: [] }, put); } else if (rule.type === 'keyword') { - const keyEx = parseExpression(false, false, true) as Expression; - if (!( - keyEx.type === NodeType.String && - program.body.some(s => - (s.type === NodeType.Keyword && (s as KeywordStatement).word === keyEx.value) || - (s.type === NodeType.Export && (s as ExportStatement).body.type === NodeType.Keyword && ((s as ExportStatement).body as KeywordStatement).word === keyEx.value) - ) - )) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); + const keyEx = parseExpression(false, false, true); + if (!statementIsA(keyEx, NodeType.String)) throw new CompilerError(keyEx.range, 'Excepted keyword.', filePath); + if (!program.body.some(s => statementIsA(s, NodeType.Keyword) && s.word.value === keyEx.value)) throw new CompilerError(keyEx.range, `Can't find keyword '${keyEx.value}'.`, filePath, caf.mk(keyEx.value, program, keyEx.range, filePath)); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected semicolon after rule statement, found ${at().value}.`, filePath); - return node({ type: NodeType.Rule, rule: ruleExpr.value, value: keyEx.value, range: combineTwo(token, tokens.shift()) }, put); + tokens.shift(); + return node({ type: NodeType.Rule, rule: ruleExpr, value: keyEx.value, range: combineTwo(token, keyEx.range), modifiers: [] }, put); } } @@ -85,11 +82,13 @@ export namespace syxparser { * @returns Parsed node. */ export function parseKeywordStatement(put: boolean, token: Token): Node { - const ex = parseExpression(false, false, true) as Expression; - if (ex.type !== NodeType.String) throw new CompilerError(ex.range, `Expected identifier after keyword statement, found '${ex.value}'.`, filePath); + const ex = parseExpression(false, false, true); + if (!statementIsA(ex, NodeType.Identifier)) throw new CompilerError(ex.range, 'Expected identifier after keyword statement.', filePath); + if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after statement, found '${at().value}'.`, filePath); tokens.shift(); // skip semicolon - return node({ type: NodeType.Keyword, word: ex.value, range: combineTwo(token, ex.range) }, put); + + return node({ type: NodeType.Keyword, word: ex, range: combineTwo(token, ex.range), modifiers: [] }, put); } /** @@ -98,8 +97,9 @@ export namespace syxparser { */ export function parseExportStatement(token: Token, put: boolean): Node { const stmt = parseStatement(false); - if (!exportable.includes(stmt.type)) throw new CompilerError(stmt.range, 'Expected exportable statement after \'export\'.', filePath); - return node({ type: NodeType.Export, body: stmt, range: combineTwo(token, stmt.range) }, put); + stmt.range = combineTwo(token, stmt.range); + stmt.modifiers.push(token); + return node(stmt, put); } /** @@ -107,20 +107,20 @@ export namespace syxparser { * @returns Parsed node. */ export function parseFunctionStatement(token: Token, put: boolean): Node { - const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: '', body: [], range: defaultRange }; + const statement: FunctionStatement = { type: NodeType.Function, arguments: [], name: {type:NodeType.Identifier,modifiers:[],value:'',range:defaultRange}, body: [], range: defaultRange, modifiers: [] }; if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); - statement.name = at().value; + statement.name = {type:NodeType.Identifier,modifiers:[],range:at().range,value:at().value}; tokens.shift(); while (at().type !== TokenType.OpenBrace) { const expr = parseExpression(false, false) as Expression; - if (expr.type !== NodeType.PrimitiveType) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); - statement.arguments.push((expr as PrimitiveTypeExpression).value); + if (!statementIsA(expr, NodeType.PrimitiveType)) throw new CompilerError(expr.range, `Expected argument types after function name, found ${expr.value}.`, filePath); + statement.arguments.push(expr); } const braceExpr = parseExpression(false); - if (braceExpr.type !== NodeType.Brace) throw new CompilerError(braceExpr.range, 'Function statement requires braces.', filePath); + if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Function statement requires braces.', filePath); braceExpr.body.forEach(s => { if (!([NodeType.Compile, NodeType.Imports].includes(s.type))) throw new CompilerError(s.range, 'Statement not allowed inside a function statement.', filePath); }); statement.body = braceExpr.body; @@ -134,7 +134,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseImportsStatement(token: Token, put: boolean) { - const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: '', range: defaultRange }; + const statement: ImportsStatement = { type: NodeType.Imports, formats: [], module: {type:NodeType.String,modifiers:[],range:defaultRange,value:''}, range: defaultRange, modifiers: [] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Imports statement require parens.', filePath); @@ -145,7 +145,7 @@ export namespace syxparser { if (t.type === TokenType.Comma && at().type !== TokenType.Identifier) throw new CompilerError(t.range, 'Expected identifier after comma.', filePath); else if (t.type === TokenType.Comma && statement.formats.length === 0) throw new CompilerError(t.range, 'Can\'t start with comma.', filePath); else if (t.type === TokenType.Comma) { } - else if (t.type === TokenType.Identifier) statement.formats.push(t.value); + else if (t.type === TokenType.Identifier) statement.formats.push({type:NodeType.Identifier,modifiers:[],range:t.range,value:t.value}); else throw new CompilerError(t.range, `Expected comma or identifier, found '${t.value}'.`, filePath); } tokens.shift(); // skip CloseParen @@ -155,9 +155,9 @@ export namespace syxparser { const moduleExpr = parseExpression(false, false) as Expression; - if (moduleExpr.type !== NodeType.String) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); + if (!statementIsA(moduleExpr, NodeType.String)) throw new CompilerError(moduleExpr.range, `Expected string after parens of imports statement, found '${moduleExpr.value}'.`, filePath); - statement.module = moduleExpr.value; + statement.module = moduleExpr; statement.range = combineTwo(token, moduleExpr.range); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after imports statement, found '${at().value}'.`, filePath); @@ -171,7 +171,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseCompileStatement(token: Token, put: boolean): Node { - const statement: CompileStatement = { type: NodeType.Compile, formats: [], body: [], range: defaultRange }; + const statement: CompileStatement = { type: NodeType.Compile, formats: [], body: [], range: defaultRange, modifiers: [] }; if (at().type !== TokenType.OpenParen) throw new CompilerError(at().range, 'Compile statement require parens.', filePath); @@ -182,7 +182,7 @@ export namespace syxparser { if (t.type === TokenType.Comma && at().type !== TokenType.Identifier) throw new CompilerError(t.range, 'Expected identifier after comma.', filePath); else if (t.type === TokenType.Comma && statement.formats.length === 0) throw new CompilerError(t.range, 'Can\'t start with comma.', filePath); else if (t.type === TokenType.Comma) { } - else if (t.type === TokenType.Identifier) statement.formats.push(t.value); + else if (t.type === TokenType.Identifier) statement.formats.push({type:NodeType.Identifier,modifiers:[],range:t.range,value:t.value}); else throw new CompilerError(t.range, `Expected comma or identifier, found '${t.value}'.`, filePath); } tokens.shift(); // skip CloseParen @@ -192,8 +192,9 @@ export namespace syxparser { while (at().type !== TokenType.Semicolon) { const expr = parseExpression(false, false); statement.body.push(expr as Expression); + statement.range = combineTwo(token, expr.range); } - statement.range = combineTwo(token, tokens.shift()); // Skip semicolon and make it the end of the range. + tokens.shift(); return node(statement, put); } @@ -203,7 +204,7 @@ export namespace syxparser { * @returns Parsed node. */ export function parseOperatorStatement(token: Token, put: boolean) { - const statement: OperatorStatement = { type: NodeType.Operator, regex: [], body: [], range: defaultRange }; + const statement: OperatorStatement = { type: NodeType.Operator, regex: [], body: [], range: defaultRange, modifiers: [] }; while (at().type !== TokenType.OpenBrace) { @@ -212,7 +213,7 @@ export namespace syxparser { } const braceExpr = parseExpression(false); - if (braceExpr.type !== NodeType.Brace) throw new CompilerError(braceExpr.range, 'Expected braces after operator regex.', filePath); + if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after operator regex.', filePath); braceExpr.body.forEach(s => { if (!([NodeType.Compile, NodeType.Imports].includes(s.type))) throw new CompilerError(s.range, 'Statement not allowed inside of operator statement.'); }, filePath); statement.body = braceExpr.body; @@ -221,6 +222,25 @@ export namespace syxparser { return node(statement, put); } + /** + * Parses an operator statement. Parameters are related to the environment of {@link syxparser.parseStatement} or {@link sysparser.parseStatement}. + * @returns Parsed node. + */ + export function parseGlobalStatement(token: Token, put: boolean) { + const stmt: GlobalStatement = { type: NodeType.Global, range: token.range, body: [], modifiers: [], name: {type:NodeType.Identifier,modifiers:[],range:defaultRange,value:''} }; + + if (at().type !== TokenType.Identifier) throw new CompilerError(at().range, `Expected identifier after function statement, found '${at().value}'.`, filePath); + const {range,value} = tokens.shift(); + stmt.name = {modifiers:[],type:NodeType.Identifier,range,value}; + + const braceExpr = parseExpression(false, false, false); + if (!statementIsA(braceExpr, NodeType.Brace)) throw new CompilerError(braceExpr.range, 'Expected braces after global name.', filePath); + + stmt.body = braceExpr.body; + stmt.range = combineTwo(token, braceExpr.range); + return node(stmt, put); + } + //# //# EXPRESSION PARSERS @@ -241,7 +261,7 @@ export namespace syxparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } /** @@ -260,7 +280,7 @@ export namespace syxparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } /** @@ -274,7 +294,7 @@ export namespace syxparser { if (at(2).type !== TokenType.CloseDiamond) throw new CompilerError(at(2).range, `Expected '>' after primitive type identifier, found '${at(2).value}'`, filePath); const t = tokens.shift(); tokens.shift(); - return node({ type: NodeType.PrimitiveType, value: newToken.value, range: combineTwo(t, tokens.shift()) }, put); + return node({ type: NodeType.PrimitiveType, value: newToken.value, range: combineTwo(t, tokens.shift()), modifiers: [] }, put); } /** @@ -283,7 +303,7 @@ export namespace syxparser { */ export function parseWhitespaceIdentifier(put: boolean): Node { const { range } = tokens.shift(); - return node({ type: NodeType.WhitespaceIdentifier, value: '+s', range }, put); + return node({ type: NodeType.WhitespaceIdentifier, value: '+s', range, modifiers: [] }, put); } /** @@ -293,7 +313,7 @@ export namespace syxparser { export function parseBraceExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: BraceExpression = { type: NodeType.Brace, body: [], value: '{', range: dr }; + const expr: BraceExpression = { type: NodeType.Brace, body: [], value: '{', range: dr, modifiers: [] }; while (at().type !== TokenType.CloseBrace) { const stmt = parseStatement(false); @@ -310,7 +330,7 @@ export namespace syxparser { export function parseSquareExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: SquareExpression = { type: NodeType.Square, body: [], value: '[', range: dr }; + const expr: SquareExpression = { type: NodeType.Square, body: [], value: '[', range: dr, modifiers: [] }; while (at().type !== TokenType.CloseSquare) { const stmt = parseStatement(false); @@ -327,7 +347,7 @@ export namespace syxparser { export function parseParenExpression(put: boolean, dr: Range) { const { range } = tokens.shift(); - const expr: ParenExpression = { type: NodeType.Paren, body: [], value: '(', range: dr }; + const expr: ParenExpression = { type: NodeType.Paren, body: [], value: '(', range: dr, modifiers: [] }; while (at().type !== TokenType.CloseParen) { const stmt = parseStatement(false); @@ -348,7 +368,7 @@ export namespace syxparser { const id = tokens.shift(); // id tokens.shift(); // sep const index = tokens.shift(); // index - const expr: VariableExpression = { index: parseInt(index.value), type: NodeType.Variable, value: id.value, range: combineTwo(id, index) }; + const expr: VariableExpression = { index: parseInt(index.value), type: NodeType.Variable, value: id.value, range: combineTwo(id, index), modifiers: [] }; return node(expr, put); } @@ -379,14 +399,14 @@ export namespace syxparser { * @param {string} _filePath Path of the file that is being parsed. * @returns Main {@link ProgramStatement} containing all other statements. * @author efekos - * @version 1.0.3 - * @since 0.0.1-alpha + * @version 1.0.4 + * @since 0.0.2-alpha */ export function parseTokens(t: Token[], _filePath: string): ProgramStatement { tokens = t; const eof = t.find(r => r.type === TokenType.EndOfFile); - program = { body: [], type: NodeType.Program, range: { end: eof.range.end, start: { line: 0, character: 0 } } }; + program = { body: [], type: NodeType.Program, range: { end: eof.range.end, start: { line: 0, character: 0 } }, modifiers: [] }; filePath = _filePath; while (canGo()) { @@ -409,7 +429,6 @@ export namespace syxparser { return tokens[i]; } - const exportable = [NodeType.Operator, NodeType.Function, NodeType.Keyword]; const defaultRange: Range = { end: { line: 0, character: 0 }, start: { character: 0, line: 0 } }; @@ -430,22 +449,25 @@ export namespace syxparser { * @param {boolean} put Whether the result should be added to the program statement. * @returns A node that is either a statement or an expression if a statement wasn't present. * @author efekos - * @version 1.0.9 - * @since 0.0.1-alpha + * @version 1.1.0 + * @since 0.0.2-alpha */ export function parseStatement(put: boolean = true): Node { if (keywords.includes(at().type)) { const token = at(); tokens.shift(); - if (token.type === TokenType.ImportKeyword) return parseImportStatement(put, token); - else if (token.type === TokenType.OperatorKeyword) return parseOperatorStatement(token, put); - else if (token.type === TokenType.CompileKeyword) return parseCompileStatement(token, put); - else if (token.type === TokenType.ExportKeyword) return parseExportStatement(token, put); - else if (token.type === TokenType.ImportsKeyword) return parseImportsStatement(token, put); - else if (token.type === TokenType.FunctionKeyword) return parseFunctionStatement(token, put); - else if (token.type === TokenType.KeywordKeyword) return parseKeywordStatement(put, token); - else if (token.type === TokenType.RuleKeyword) return parseRuleStatement(token, put); + switch (token.type) { + case TokenType.ImportKeyword: return parseImportStatement(put, token); + case TokenType.OperatorKeyword: return parseOperatorStatement(token, put); + case TokenType.CompileKeyword: return parseCompileStatement(token, put); + case TokenType.ExportKeyword: return parseExportStatement(token, put); + case TokenType.ImportsKeyword: return parseImportsStatement(token, put); + case TokenType.FunctionKeyword: return parseFunctionStatement(token, put); + case TokenType.KeywordKeyword: return parseKeywordStatement(put, token); + case TokenType.RuleKeyword: return parseRuleStatement(token, put); + case TokenType.GlobalKeyword: return parseGlobalStatement(token, put); + } } else parseExpression(); @@ -472,28 +494,32 @@ export namespace syxparser { * @param {boolean} expectIdentifier Whether identifiers should be allowed. Unknown identifiers will stop the function with this value set to `false`, returning the identifier as a {@link StringExpression} otherwise. * @returns The parsed node. * @author efekos - * @version 1.0.8 - * @since 0.0.1-alpha + * @version 1.1.0 + * @since 0.0.2-alpha */ export function parseExpression(put: boolean = true, statements: boolean = true, expectIdentifier: boolean = false): Node { const tt = at().type; - if (tt === TokenType.SingleQuote) return parseSingleQuotedString(put); - else if (tt === TokenType.DoubleQuote) return parseDoubleQuotedString(put); - else if (tt === TokenType.OpenDiamond) return parsePrimitiveType(primitiveTypes, put); - else if (tt === TokenType.WhitespaceIdentifier) return parseWhitespaceIdentifier(put); - else if (tt === TokenType.OpenBrace) return parseBraceExpression(put, defaultRange); - else if (tt === TokenType.OpenSquare) return parseSquareExpression(put, defaultRange); - else if (tt === TokenType.OpenParen) return parseParenExpression(put, defaultRange); - else if (tt === TokenType.Identifier && at(1).type === TokenType.VarSeperator) return parsePrimitiveVariable(put); - else if (keywords.includes(tt)) { - if (!statements) throw new CompilerError(at().range, 'Statement not allowed here.', filePath); - return parseStatement(); - } else if (tt === TokenType.Identifier && expectIdentifier) { - const { value, range } = tokens.shift(); - return node({ type: NodeType.String, value, range }, put); + switch (tt) { + case TokenType.SingleQuote: return parseSingleQuotedString(put); + case TokenType.DoubleQuote: return parseDoubleQuotedString(put); + case TokenType.OpenDiamond: return parsePrimitiveType(primitiveTypes, put); + case TokenType.WhitespaceIdentifier: return parseWhitespaceIdentifier(put); + case TokenType.OpenBrace: return parseBraceExpression(put, defaultRange); + case TokenType.OpenSquare: return parseSquareExpression(put, defaultRange); + case TokenType.OpenParen: return parseParenExpression(put, defaultRange); + case TokenType.Identifier: + if (at(1).type === TokenType.VarSeperator) return parsePrimitiveVariable(put); + else if (keywords.includes(tt)) { + if (!statements) throw new CompilerError(at().range, 'Statement not allowed here.', filePath); + return parseStatement(); + } else if (expectIdentifier) { + const { value, range } = tokens.shift(); + return node({ type: NodeType.Identifier, value, range, modifiers: [] }, put); + } } - else throw new CompilerError(at().range, `Unexpected expression: '${at().value}'`, filePath); + + throw new CompilerError(at().range, `Unexpected expression: '${at().value}'`, filePath); } @@ -518,10 +544,10 @@ export namespace sysparser { */ export function parseImportStatement(put: boolean, token: Token): Node { const ex = parseExpression(false, false); - if (ex.type !== NodeType.String) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); + if (!statementIsA(ex, NodeType.String)) throw new CompilerError(ex.range, 'Expected file path after import statement.', filePath); if (at().type !== TokenType.Semicolon) throw new CompilerError(at().range, `Expected ';' after import statement, found '${at().value}'.`, filePath); tokens.shift(); - return node({ type: NodeType.Import, path: (ex as Expression).value, range: combineTwo(token, ex.range) }, put); + return node({ type: NodeType.Import, path: ex, range: combineTwo(token, ex.range), modifiers: [] }, put); } //# @@ -543,7 +569,7 @@ export namespace sysparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } /** @@ -562,7 +588,7 @@ export namespace sysparser { s += _t.value; } - return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()) }, put); + return node({ type: NodeType.String, value: s, range: combineTwo(range, tokens.shift()), modifiers: [] }, put); } @@ -600,14 +626,14 @@ export namespace sysparser { * @param {Token[]} t Token list to parse. * @returns Main {@link ProgramStatement} containing all other statements. * @author efekos - * @version 1.0.2 - * @since 0.0.1-alpha + * @version 1.0.3 + * @since 0.0.2-alpha */ export function parseTokens(t: Token[], _filePath: string): ProgramStatement { tokens = t; const eof = t.find(r => r.type === TokenType.EndOfFile); - program = { body: [], type: NodeType.Program, range: { start: { character: 0, line: 0 }, end: eof.range.end } }; + program = { body: [], type: NodeType.Program, range: { start: { character: 0, line: 0 }, end: eof.range.end }, modifiers: [] }; filePath = _filePath; while (canGo()) { diff --git a/src/compiler.ts b/src/compiler.ts index 967de98..abf0687 100644 --- a/src/compiler.ts +++ b/src/compiler.ts @@ -1,4 +1,4 @@ -import { CompileStatement, CompilerError, ExportStatement, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, OperatorStatement, PrimitiveTypeExpression, StringExpression, VariableExpression } from './types.js'; +import { CompileStatement, CompilerError, ImportStatement, ImportsStatement, NodeType, OperatorStatement, PrimitiveTypeExpression, StringExpression, TokenType, VariableExpression, statementIsA } from './types.js'; import { dirname, join } from 'path'; import { existsSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; @@ -65,47 +65,34 @@ export class SyntaxScriptCompiler { * Compiles one .syx file from the path given. * @param {string} file Path to a file to compile. * @author efekos - * @version 1.0.4 - * @since 0.0.1-alpha + * @version 1.0.6 + * @since 0.0.2-alpha */ public compileSyx(file: string) { const ast = syxparser.parseTokens(tokenizeSyx(readFileSync(file).toString()), file); const out: AnyExportable[] = []; ast.body.forEach(statement => { - if (statement.type !== NodeType.Export) return; - const exported = (statement as ExportStatement).body; + if (!statement.modifiers.some(token => token.type === TokenType.ExportKeyword)) return; - if (exported.type === NodeType.Operator) { - const operatorStmt = exported as OperatorStatement; - //# Generate regexMatcher - let regexMatcher: RegExp = new RegExp(''); - operatorStmt.regex.forEach(regexStatement => { + if (statementIsA(statement, NodeType.Operator)) { - if (regexStatement.type === NodeType.PrimitiveType) { - regexMatcher = new RegExp(regexMatcher.source + regexes[(regexStatement as PrimitiveTypeExpression).value].source); - } - if (regexStatement.type === NodeType.WhitespaceIdentifier) { - regexMatcher = new RegExp(regexMatcher.source + regexes['+s'].source); - } - if (regexStatement.type === NodeType.String) { - regexMatcher = new RegExp(regexMatcher.source + escapeRegex((regexStatement as StringExpression).value)); - } - }); + //# Generate regexMatcher + const regexMatcher: RegExp = CompilerFunctions.generateRegexMatcher(statement); - const operatorStmtExport: Operator = { imports: {}, outputGenerators: {}, regexMatcher, type: ExportType.Operator }; + const operatorStmtExport: ExportedOperator = { imports: {}, outputGenerators: {}, regexMatcher, type: ExportType.Operator }; //# Handle statements - operatorStmt.body.forEach(stmt => { + statement.body.forEach(stmt => { if (stmt.type === NodeType.Compile) { const compileStmt = stmt as CompileStatement; compileStmt.formats.forEach(frmt => { - if (operatorStmtExport.outputGenerators[frmt] !== undefined) throw new CompilerError(compileStmt.range, `Duplicate file format at compile statement \'${frmt}\'`); + if (operatorStmtExport.outputGenerators[frmt.value] !== undefined) throw new CompilerError(compileStmt.range, `Duplicate file format at compile statement \'${frmt}\'`); - operatorStmtExport.outputGenerators[frmt] = (src) => { + operatorStmtExport.outputGenerators[frmt.value] = (src) => { let out = ''; compileStmt.body.forEach(e => { @@ -128,32 +115,29 @@ export class SyntaxScriptCompiler { const importStmt = stmt as ImportsStatement; importStmt.formats.forEach(frmt => { - if (operatorStmtExport.imports[frmt] !== undefined) throw new CompilerError(importStmt.range, `Duplicate file format at imports statement \'${frmt}\'`); - operatorStmtExport.imports[frmt] = importStmt.module; + if (operatorStmtExport.imports[frmt.value] !== undefined) throw new CompilerError(importStmt.range, `Duplicate file format at imports statement \'${frmt}\'`); + operatorStmtExport.imports[frmt.value] = importStmt.module.value; }); } else throw new CompilerError(stmt.range, `Unexpected \'${stmt.type}\' statement insdie operator statement.`); }); out.push(operatorStmtExport); - } else if (exported.type === NodeType.Function) { - const stmt = exported as FunctionStatement; - const statementExport: Function = { type: ExportType.Function, args: stmt.arguments.map(s => regexes[s]), name: stmt.name, formatNames: {}, imports: {} }; - - stmt.body.forEach(statement => { - - if (statement.type === NodeType.Compile) { - const compileStatement = statement as CompileStatement; - if (compileStatement.body[0].type !== NodeType.String) throw new CompilerError(compileStatement.range, 'Expected a string after compile statement parens'); - compileStatement.formats.forEach(each => { - if (statementExport.formatNames[each] !== undefined) throw new CompilerError(compileStatement.range, `Encountered multiple compile statements for target language '${each}'`); - statementExport.formatNames[each] = compileStatement.body[0].value; + } else if (statementIsA(statement, NodeType.Function)) { + const statementExport: ExportedFunction = { type: ExportType.Function, args: statement.arguments.map(s => regexes[s.value]), name: statement.name.value, formatNames: {}, imports: {} }; + + statement.body.forEach(stmt => { + + if (statementIsA(stmt, NodeType.Compile)) { + if (stmt.body[0].type !== NodeType.String) throw new CompilerError(stmt.range, 'Expected a string after compile statement parens'); + stmt.formats.forEach(each => { + if (statementExport.formatNames[each.value] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple compile statements for target language '${each}'`); + statementExport.formatNames[each.value] = stmt.body[0].value; }); - } else if (statement.type === NodeType.Imports) { - const importsStatement = statement as ImportsStatement; - importsStatement.formats.forEach(each => { - if (statementExport.imports[each] !== undefined) throw new CompilerError(importsStatement.range, `Encountered multiple import statements for target language '${each}'`); - statementExport.imports[each] = importsStatement.module; + } else if (statementIsA(stmt, NodeType.Imports)) { + stmt.formats.forEach(each => { + if (statementExport.imports[each.value] !== undefined) throw new CompilerError(stmt.range, `Encountered multiple import statements for target language '${each}'`); + statementExport.imports[each.value] = stmt.module.value; }); } @@ -161,11 +145,11 @@ export class SyntaxScriptCompiler { out.push(statementExport); - } else if (exported.type === NodeType.Keyword) { - const stmt = exported as KeywordStatement; - - out.push({ type: ExportType.Keyword, word: stmt.word }); - } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`); + } else if (statementIsA(statement, NodeType.Keyword)) { + out.push({ type: ExportType.Keyword, word: statement.word.value }); + } else if (statementIsA(statement, NodeType.Global)) { + //TODO + } else throw new CompilerError(statement.range, `Unexpected \'${statement.type}\' statement after export statement.`, file); }); @@ -205,11 +189,11 @@ export class SyntaxScriptCompiler { if (stmt.type === NodeType.Import) { const importStmt = stmt as ImportStatement; - const pathToImport = join(dirname(file), importStmt.path.endsWith('.syx') ? importStmt.path : importStmt.path + '.syx'); + const pathToImport = join(dirname(file), importStmt.path.value.endsWith('.syx') ? importStmt.path.value : importStmt.path.value + '.syx'); if (!existsSync(pathToImport)) throw new CompilerError(importStmt.range, `File \'${pathToImport}\' imported from \'${file}\' does not exist.`); this.exportData[pathToImport].forEach(exported => { if (exported.type === ExportType.Operator) - if (imported.filter(r => r.type === ExportType.Operator).some(i => exported.regexMatcher === (i as Operator).regexMatcher)) throw new CompilerError(importStmt.range, `There are more than one operators with the same syntax imported to \'${file}\'.`); + if (imported.filter(r => r.type === ExportType.Operator).some(i => exported.regexMatcher === (i as ExportedOperator).regexMatcher)) throw new CompilerError(importStmt.range, `There are more than one operators with the same syntax imported to \'${file}\'.`); imported.push(exported); }); } @@ -250,46 +234,51 @@ export class SyntaxScriptCompiler { /** * Type of something that can be exported. - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha * @author efekos */ export enum ExportType { /** - * {@link Operator}. + * {@link ExportedOperator}. */ Operator, /** - * {@link Function}. + * {@link ExportedFunction}. */ Function, /** - * {@link Keyword}. + * {@link ExportedKeyword}. + */ + Keyword, + + /** + * {@link ExportedGlobal}. */ - Keyword + Global } /** * Base exportable interface. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Export { +export interface Exported { type: ExportType; } /** * Represents an exported operator. Uses type {@link ExportType.Operator}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Operator extends Export { +export interface ExportedOperator extends Exported { type: ExportType.Operator, regexMatcher: RegExp; outputGenerators: Record>; @@ -299,10 +288,10 @@ export interface Operator extends Export { /** * Represents an exported function. Uses type {@link ExportType.Function}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Function extends Export { +export interface ExportedFunction extends Exported { type: ExportType.Function; name: string; args: RegExp[]; @@ -313,10 +302,10 @@ export interface Function extends Export { /** * Represents an exported keyword. Uses type {@link ExportType.Keyword}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ -export interface Keyword extends Export { +export interface ExportedKeyword extends Exported { type: ExportType.Keyword; word: string; } @@ -340,7 +329,7 @@ export type ReturnerMethod = () => R; /** * Any interface that represents something exportable. */ -export type AnyExportable = Operator | Function | Keyword; +export type AnyExportable = ExportedOperator | ExportedFunction | ExportedKeyword; export const regexes: Record = { /** @@ -395,4 +384,36 @@ export const regexes: Record = { */ export function escapeRegex(src: string): string { return src.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +export namespace CompilerFunctions { + + + /** + * Generates {@link RegExp} of the given operator statement. + * @param statement An operator statement. + * @returns A regular expression generated from regex of the operator statement. + * @author efekos + * @version 1.0.0 + * @since 0.0.2-alpha + */ + export function generateRegexMatcher(statement: OperatorStatement): RegExp { + let regexMatcher = new RegExp(''); + statement.regex.forEach(regexStatement => { + + if (regexStatement.type === NodeType.PrimitiveType) { + regexMatcher = new RegExp(regexMatcher.source + regexes[(regexStatement as PrimitiveTypeExpression).value].source); + } + if (regexStatement.type === NodeType.WhitespaceIdentifier) { + regexMatcher = new RegExp(regexMatcher.source + regexes['+s'].source); + } + if (regexStatement.type === NodeType.String) { + regexMatcher = new RegExp(regexMatcher.source + escapeRegex((regexStatement as StringExpression).value)); + } + + }); + + return regexMatcher; + } + } \ No newline at end of file diff --git a/src/diagnostic.ts b/src/diagnostic.ts index 6d08d00..329fe2c 100644 --- a/src/diagnostic.ts +++ b/src/diagnostic.ts @@ -1,15 +1,24 @@ import { CodeAction, CodeActionKind, Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, FullDocumentDiagnosticReport, Range } from 'lsp-types'; +import { FunctionStatement, GlobalStatement, IdentifierExpression, ImportStatement, KeywordStatement, NodeType, OperatorStatement, ProgramStatement, RuleStatement, Statement, TokenType, isCompilerError, statementIsA } from './types.js'; +import { existsSync, readFileSync, statSync } from 'fs'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; -import { isCompilerError } from './types.js'; -import { readFileSync } from 'fs'; +import { CompilerFunctions } from './compiler.js'; +import { dictionary } from './dictionary/index.js'; +import { fileURLToPath } from 'url'; +import { join } from 'path'; +// Use with addRange to include semicolons +const semiRange: Range = { end: { line: 0, character: 1 }, start: { line: 0, character: 0 } }; /** * Creates a diagnostic report from the file path given. * @param {string} filePath Path of the file to create a report. * @param {string} fileContent Content of the file if it is already fetched. + * @author efekos + * @version 1.0.1 + * @since 0.0.2-alpha * @returns A diagnostic report language servers can use. */ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent?: string): FullDocumentDiagnosticReport { @@ -21,24 +30,296 @@ export function createSyntaxScriptDiagnosticReport(filePath: string, fileContent const content = fileContent ?? readFileSync(filePath).toString(); const tokens = (isSyx ? tokenizeSyx : tokenizeSys)(content); - (isSyx ? syxparser : sysparser).parseTokens(tokens, filePath); + const ast = (isSyx ? syxparser : sysparser).parseTokens(tokens, filePath); + items.push(...exportableCheck(ast.body, filePath)); + items.push(...ruleConflictCheck(ast, filePath)); + items.push(...sameRuleCheck(ast, filePath)); + items.push(...importedExistentCheck(ast, filePath)); + items.push(...sameRegexCheck(ast, filePath)); + items.push(...sameNameCheck(ast.body, filePath)); } catch (error) { if (isCompilerError(error)) { items.push({ message: error.message, range: subRange(error.range), severity: DiagnosticSeverity.Error, - source: 'syntax-script', data: error.actions }); + } else { + items.push({ message: `Parser Error: ${error.message}`, range: { end: { line: 0, character: 1 }, start: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Warning }); } } finally { - return { items, kind: DocumentDiagnosticReportKind.Full }; + return { items: items.map(r => { return { ...r, source: 'syntax-script' }; }), kind: DocumentDiagnosticReportKind.Full }; } } +// Checks rule conflicts and adds warnings when there is two defined rules that conflict each other +function ruleConflictCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + ast.body.forEach(stmt => { + if (statementIsA(stmt, NodeType.Rule)) { + const dictRule = dictionary.Rules.find(r => r.name === stmt.rule.value); + + ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { + if (dictRule.conflicts.includes(otherRules.rule.value)) items.push({ + message: `Rule '${otherRules.rule.value}' conflicts with '${stmt.rule.value}', Both of them should not be defined.`, + range: subRange(otherRules.rule.range), + severity: DiagnosticSeverity.Warning, + data: [ + { + title: `Remove ${stmt.rule.value} definition`, + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + range: subRange(addRange(stmt.range, semiRange)), + newText: '' + } + ] + } + } + }, + { + title: `Remove ${otherRules.rule.value} definition`, + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + range: subRange(addRange(otherRules.range, semiRange)), + newText: '' + } + ] + } + } + } + ] as CodeAction[] + }); + }); + } + }); + + return items; +} + +// Checks if same rule is defined twice +function sameRuleCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + ast.body.forEach(stmt => { + if (statementIsA(stmt, NodeType.Rule)) { + ast.body.filter(r => statementIsA(r, NodeType.Rule)).filter(r => r.range !== stmt.range).map(r => r as RuleStatement).forEach(otherRules => { + if (otherRules.rule === stmt.rule) items.push({ + message: `Rule '${stmt.rule.value}' is already defined.`, + range: subRange(stmt.rule.range), + severity: DiagnosticSeverity.Error, + data: [ + { + title: 'Remove this definition', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + range: subRange(addRange(stmt.range, semiRange)), + newText: '' + } + ] + } + } + } + ] as CodeAction[] + }); + }); + } + }); + + return items; +} + +// Checks if an import statements refers to an empty file +function importedExistentCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + ast.body.filter(r => statementIsA(r, NodeType.Import)).map(r => r as ImportStatement).forEach(stmt => { + + const filePathButPath = fileURLToPath(filePath); + const fullPath = join(filePathButPath, '../', stmt.path.value); + if (!existsSync(fullPath)) items.push({ + message: `Can't find file '${fullPath}' imported from '${filePathButPath}'`, + severity: DiagnosticSeverity.Error, + range: subRange(stmt.path.range), + data: [ + { + title: 'Remove this import statement', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { range: subRange(addRange(stmt.range, semiRange)), newText: '' } + ] + } + } + } + ] as CodeAction[] + }); + + if (existsSync(fullPath)) { + const status = statSync(fullPath); + + if (!status.isFile()) items.push({ + message: `'${fullPath}' imported from '${filePathButPath}' doesn't seem to be a file.`, + severity: DiagnosticSeverity.Error, + range: subRange(stmt.path.range), + data: [ + { + title: 'Remove this import statement', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { range: subRange(addRange(stmt.range, semiRange)), newText: '' } + ] + } + } + } + ] as CodeAction[] + }); + + if (!fullPath.endsWith('.syx')) items.push({ + message: `'${fullPath}' imported from '${filePathButPath}' cannot be imported.`, + severity: DiagnosticSeverity.Error, + range: subRange(stmt.path.range), + data: [ + { + title: 'Remove this import statement', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { range: subRange(addRange(stmt.range, semiRange)), newText: '' } + ] + } + } + } + ] as CodeAction[] + }); + } + + }); + + return items; +} + +// Checks if there are multiple operators with the same regex +function sameRegexCheck(ast: ProgramStatement, filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + const encounteredRegexes: RegExp[] = []; + + ast.body.filter(r => statementIsA(r, NodeType.Operator)).map(r => r as OperatorStatement).forEach(stmt => { + + const regex = new RegExp(CompilerFunctions.generateRegexMatcher(stmt)); + + if (encounteredRegexes.some(r => r.source === regex.source)) items.push({ + message: 'Regex of this operator is same with another operator.', + range: subRange(syxparser.combineTwo(stmt.regex[0].range, stmt.regex[stmt.regex.length - 1].range)), + severity: DiagnosticSeverity.Error, + data: [ + { + title: 'Remove this operator', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + newText: '', + range: subRange(stmt.range) + } + ] + } + } + } + ] as CodeAction[] + }); + else encounteredRegexes.push(regex); + + }); + + return items; +} + +// Checks if every exported statement it actually exportable +function exportableCheck(statements: Statement[], filePath: string): Diagnostic[] { + + const items: Diagnostic[] = []; + + statements.forEach(stmt => { + + if (stmt.modifiers.some(t => t.type === TokenType.ExportKeyword) && !dictionary.ExportableNodeTypes.includes(stmt.type)) items.push({ + message: 'This statement cannot be exported.', + range: subRange(stmt.modifiers.find(r=>r.type===TokenType.ExportKeyword).range), + severity: DiagnosticSeverity.Error, + data: [ + { + title: 'Remove export keyword', + kind: CodeActionKind.QuickFix, + edit: { + changes: { + [filePath]: [ + { + newText: '', range: subRange(stmt.modifiers.find(r => r.type === TokenType.ExportKeyword).range) + } + ] + } + } + } + ] as CodeAction[] + }); + + if (dictionary.StatementTypesWithBody.includes(stmt.type)) items.push(...exportableCheck((stmt as GlobalStatement).body, filePath)); + }); + + return items; +} + +// Check if everything has a unique name +function sameNameCheck(statements: Statement[], filePath: string): Diagnostic[] { + const items: Diagnostic[] = []; + + function c(s: Statement[]) { + const encounteredNames = []; + + s + .filter(r => statementIsA(r, NodeType.Function) || statementIsA(r, NodeType.Global) || statementIsA(r, NodeType.Keyword)) + .map(r => { + if (statementIsA(r, NodeType.Function)) return r as FunctionStatement; + if (statementIsA(r, NodeType.Global)) return r as GlobalStatement; + if (statementIsA(r, NodeType.Keyword)) return r as KeywordStatement; + }).forEach(stmt => { + + const n:IdentifierExpression = stmt[statementIsA(stmt, NodeType.Keyword) ? 'word' : 'name']; + + if (encounteredNames.includes(n.value)) items.push({ + message: `Name '${n.value}' is already seen before.`, + range: subRange(n.range), + severity: DiagnosticSeverity.Error + }); + else encounteredNames.push(n.value); + + if (statementIsA(stmt, NodeType.Global)) c(stmt.body); + }); + + } + + c(statements); + + return items; +} /** * Modifies the given range to be zero-based. @@ -55,4 +336,8 @@ export function subRange(r: Range): Range { const d = r.end.line; return { start: { character: a === 0 ? 0 : a - 1, line: b === 0 ? 0 : b - 1 }, end: { character: c === 0 ? 0 : c - 1, line: d === 0 ? 0 : d - 1 } }; +} + +function addRange(r: Range, r2: Range): Range { + return { end: { line: r.end.line + r2.end.line, character: r.end.character + r.end.character }, start: { character: r.start.character, line: r.start.line } }; } \ No newline at end of file diff --git a/src/dictionary/dictionary.ts b/src/dictionary/dictionary.ts index ff2ab68..86cd804 100644 --- a/src/dictionary/dictionary.ts +++ b/src/dictionary/dictionary.ts @@ -1,20 +1,69 @@ -import { Functionary, Rule, RuleType } from './index'; +import { Functionary, Rule, RuleType } from './index.js'; +import { NodeType } from '../types.js'; const rules: Rule[] = [ { name: 'imports-keyword', type: 'keyword', - default: 'import' + default: 'import', + conflicts: [], + description: 'Determines which keyword should be used to import modules using defined in an imports statement.' + }, + { + name: 'imports-with-quotes', + type: 'boolean', + default: true, + conflicts: [], + description: 'Determines whether to import modules defined with import statements with quotes or not.' }, { name: 'function-value-return-enabled', type: 'boolean', - default: false + default: false, + conflicts: [], + description: 'Determines whether is it possible to return a value from a function using a keyword.' }, { name: 'function-value-return-keyword', type: 'keyword', - default: 'return' + default: 'return', + conflicts: [], + description: 'Determines the keyword used to return a function from a keyword. Must be used with `function-value-return-enabled` set to true to make a difference.' + }, + { + name: 'enforce-single-string-quotes', + type: 'boolean', + default: false, + conflicts: ['enforge-double-string-quotes'], + description: 'Enforces string values to have single quotes in output. Useful for languages like Java where quote type matters.' + }, + { + name: 'enforce-double-string-quotes', + type: 'boolean', + default: false, + conflicts: ['enforce-single-string-quotes'], + description: 'Enforces string values to have double quotes in output. Useful for languages like Java where quote type matters.' + }, + { + name: 'export-required', + type: 'boolean', + default: false, + conflicts: [], + description: 'Determines whether is it required to export a definable in order to reach it from another file.' + }, + { + name: 'export-keyword', + type: 'keyword', + default: 'export', + conflicts: [], + description: 'Determines the keyword used to export a definable.' + }, + { + name: 'export-enabled', + type: 'boolean', + default: true, + conflicts: [], + description: 'Determines whether is it possible to export a definable with a keyword.' } ]; @@ -38,5 +87,7 @@ export namespace dictionary { export const PrimitiveTypes: string[] = ['int', 'decimal', 'boolean', 'string']; export const Keywords: string[] = ['export', 'rule', 'keyword', 'import', 'operator', 'function', 'global']; export const Functionaries: Functionary[] = func; + export const ExportableNodeTypes: NodeType[] = [NodeType.Function, NodeType.Operator, NodeType.Keyword, NodeType.Rule, NodeType.Global]; + export const StatementTypesWithBody: NodeType[] = [NodeType.Operator, NodeType.Function, NodeType.Global]; } \ No newline at end of file diff --git a/src/dictionary/index.ts b/src/dictionary/index.ts index 998507e..02349ec 100644 --- a/src/dictionary/index.ts +++ b/src/dictionary/index.ts @@ -1,6 +1,6 @@ -import { BaseRule, BooleanRule, Rule, RuleType, StringRule } from './rules'; -import { Functionary, FunctionaryValueType } from './functionaries'; -import { dictionary } from './dictionary'; +import { BaseRule, BooleanRule, Rule, RuleType, StringRule } from './rules.js'; +import { Functionary, FunctionaryValueType } from './functionaries.js'; +import { dictionary } from './dictionary.js'; export { dictionary }; export { BaseRule, RuleType, Functionary, FunctionaryValueType, BooleanRule, Rule, StringRule }; \ No newline at end of file diff --git a/src/dictionary/rules.ts b/src/dictionary/rules.ts index 7b98648..001ac36 100644 --- a/src/dictionary/rules.ts +++ b/src/dictionary/rules.ts @@ -6,12 +6,14 @@ export type RuleType = 'keyword' | 'boolean'; /** * Base interface for rules. Represents a rule that can be modified by any file using `rule` modifier. * @author efekos - * @version 1.0.0 - * @since 0.0.1-alpha + * @version 1.0.1 + * @since 0.0.2-alpha */ export interface BaseRule { name: string; type: RuleType; + conflicts: string[]; + description: string; } /** diff --git a/src/index.ts b/src/index.ts index 6e9d7ca..7eaaa68 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,26 +1,21 @@ -import { AnyExportable, Export, ExportType, Function, Keyword, OneParameterMethod, Operator, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; +import { AnyExportable, ExportType, Exported, ExportedFunction, ExportedKeyword, ExportedOperator, OneParameterMethod, ReturnerMethod, SyntaxScriptCompiler, escapeRegex } from './compiler.js'; import { BaseRule, BooleanRule, Functionary, FunctionaryValueType, Rule, RuleType, StringRule, dictionary } from './dictionary/index.js'; -import { BraceExpression, CompileStatement, ExportStatement, Expression, FunctionStatement, ImportStatement, ImportsStatement, KeywordStatement, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, VariableExpression, WhitespaceIdentifierExpression } from './types.js'; -import { CompilerError, isCompilerError } from './types.js'; -import { Node, NodeType, Token, TokenType } from './types.js'; -import { SyxConfig, SyxConfigCompile } from './types.js'; +import { BraceExpression, CompileStatement, CompilerError, Expression, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, Node, NodeType, OperatorStatement, ParenExpression, PrimitiveTypeExpression, ProgramStatement, RuleStatement, SquareExpression, Statement, StringExpression, SyxConfig, SyxConfigCompile, Token, TokenType, VariableExpression, WhitespaceIdentifierExpression, isCompilerError, statementIsA } from './types.js'; +import { createSyntaxScriptDiagnosticReport, subRange } from './diagnostic.js'; import { sysparser, syxparser } from './ast.js'; import { tokenizeSys, tokenizeSyx } from './lexer.js'; -import { createSyntaxScriptDiagnosticReport } from './diagnostic.js'; - export { sysparser, syxparser, dictionary }; -export { escapeRegex, createSyntaxScriptDiagnosticReport, tokenizeSys, tokenizeSyx, isCompilerError }; +export { escapeRegex, createSyntaxScriptDiagnosticReport, tokenizeSys, tokenizeSyx, isCompilerError, statementIsA, subRange }; export { BaseRule, BooleanRule, Rule, RuleType, StringRule }; export { Functionary, FunctionaryValueType }; - export { SyntaxScriptCompiler, ExportType }; -export { AnyExportable, Export, Function, Keyword, OneParameterMethod, Operator, ReturnerMethod }; +export { AnyExportable, Exported, ExportedFunction, ExportedKeyword, OneParameterMethod, ExportedOperator, ReturnerMethod }; export { CompilerError }; export { Token, TokenType, Node, NodeType }; export { Expression, BraceExpression, ParenExpression, SquareExpression, StringExpression, VariableExpression, PrimitiveTypeExpression, WhitespaceIdentifierExpression }; -export { Statement, RuleStatement, ExportStatement, ImportStatement, ImportsStatement, CompileStatement, OperatorStatement, FunctionStatement, KeywordStatement, ProgramStatement }; +export { Statement, RuleStatement, ImportStatement, ImportsStatement, CompileStatement, OperatorStatement, FunctionStatement, KeywordStatement, ProgramStatement, GlobalStatement }; export { SyxConfig, SyxConfigCompile }; \ No newline at end of file diff --git a/src/lexer.ts b/src/lexer.ts index 90b3b19..167103e 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -1,5 +1,5 @@ -import { CompilerError, Token, TokenType } from './types.js'; import { Position, Range } from 'lsp-types'; +import { Token, TokenType } from './types.js'; const keywords: Record = { operator: TokenType.OperatorKeyword, @@ -53,7 +53,7 @@ function isInt(src: string) { } function opr(line: number, character: number): Range { - return { end: { line, character }, start: { line, character } }; + return { end: { line, character: character + 1 }, start: { line, character: character } }; } function pos(line: number, character: number): Position { @@ -69,37 +69,43 @@ function tpr(start: Position, end: Position): Range { * @param {string} source Source string. * @returns A list of tokens generated from source string. * @author efekos - * @version 1.0.7 - * @since 0.0.1-alpha - * @throws LexerError if an error occurs. + * @version 1.0.9 + * @since 0.0.2-alpha */ export function tokenizeSyx(source: string): Token[] { const tokens: Token[] = []; const src = source.split(''); - let curPos = 0; + let lastString = 'n'; + let inString = false; + function t(s: string) { + if (lastString === 'n') { lastString = s; inString = true; } + else if (lastString === '\'' && s === '\'' || (lastString === '"' && s === '"')) { lastString = 'n'; inString = false; }; + } + let curPos = 1; let curLine = 1; while (src.length > 0) { - if (src[0] === '/' && src[1] === '/') { + if (src[0] === '/' && src[1] === '/' && !inString) { while (src.length > 0 && src[0] as string !== '\n') { src.shift(); + curPos++; } } - if (src[0] === '(') tokens.push({ type: TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ')') tokens.push({ type: TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '{') tokens.push({ type: TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '}') tokens.push({ type: TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '[') tokens.push({ type: TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ']') tokens.push({ type: TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ',') tokens.push({ type: TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '<') tokens.push({ type: TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '>') tokens.push({ type: TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '|') tokens.push({ type: TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === '(') tokens.push({ type: inString ? 20 : TokenType.OpenParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ')') tokens.push({ type: inString ? 20 : TokenType.CloseParen, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '{') tokens.push({ type: inString ? 20 : TokenType.OpenBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '}') tokens.push({ type: inString ? 20 : TokenType.CloseBrace, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '[') tokens.push({ type: inString ? 20 : TokenType.OpenSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ']') tokens.push({ type: inString ? 20 : TokenType.CloseSquare, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ',') tokens.push({ type: inString ? 20 : TokenType.Comma, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === ';') tokens.push({ type: inString ? 20 : TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '<') tokens.push({ type: inString ? 20 : TokenType.OpenDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '>') tokens.push({ type: inString ? 20 : TokenType.CloseDiamond, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } + else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } + else if (src[0] === '|') tokens.push({ type: inString ? 20 : TokenType.VarSeperator, value: src.shift(), range: opr(curLine, curPos++) }); else if (src[0] === '+' && chars.includes(src[1])) { - if (src[1] === 's') tokens.push({ type: TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); + if (src[1] === 's') tokens.push({ type: inString ? 20 : TokenType.WhitespaceIdentifier, value: '+s', range: tpr(pos(curLine, curPos), pos(curLine, curPos + 2)) }); curPos += 2; src.shift(); src.shift(); } else if (isInt(src[0])) { @@ -110,7 +116,7 @@ export function tokenizeSyx(source: string): Token[] { } curPos += ident.length; - tokens.push({ type: TokenType.IntNumber, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); + tokens.push({ type: inString ? 20 : TokenType.IntNumber, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); } else if (isAlphabetic(src[0])) { let ident = ''; const startPos = curPos; @@ -120,8 +126,8 @@ export function tokenizeSyx(source: string): Token[] { } const reserved = keywords[ident]; - tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); - } else if (isSkippable(src[0])) { + tokens.push({ type: inString ? 20 : reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); + } else if (isSkippable(src[0]) && !inString) { src.shift(); curPos++; if (src[0] === '\n') { curLine++; curPos = 0; }; @@ -138,20 +144,26 @@ export function tokenizeSyx(source: string): Token[] { * @param {string} source Source string. * @returns A list of tokens generated from the source file. * @author efekos - * @version 1.0.4 - * @since 0.0.1-alpha + * @version 1.0.6 + * @since 0.0.2-alpha */ export function tokenizeSys(source: string): Token[] { const src = source.split(''); const tokens: Token[] = []; + let lastString = 'n'; + let inString = false; + function t(s: string) { + if (lastString === 'n') { lastString = s; inString = true; } + else if (lastString === '\'' && s === '\'' || (lastString === '"' && s === '"')) { lastString = 'n'; inString = false; }; + } let curPos = 0; let curLine = 1; while (src.length > 0 && `${src[0]}${src[1]}${src[2]}` !== ':::') { - if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); - else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); + if (src[0] === ';') tokens.push({ type: inString ? 20 : TokenType.Semicolon, value: src.shift(), range: opr(curLine, curPos++) }); + else if (src[0] === '\'') { tokens.push({ type: TokenType.SingleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('\''); } + else if (src[0] === '"') { tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), range: opr(curLine, curPos++) }); t('"'); } else if (isAlphabetic(src[0])) { let ident = ''; const startPos = curPos; @@ -162,7 +174,7 @@ export function tokenizeSys(source: string): Token[] { const reserved = keywords[ident]; tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, range: tpr(pos(curLine, startPos), pos(curLine, curPos)) }); - } else if (isSkippable(src[0])) { + } else if (isSkippable(src[0]) && !inString) { src.shift(); curPos++; if (src[0] === '\n') curLine++; diff --git a/src/test/compiler.test.ts b/src/test/compiler.test.ts new file mode 100644 index 0000000..ac0c79b --- /dev/null +++ b/src/test/compiler.test.ts @@ -0,0 +1,313 @@ +import { CompileStatement, FunctionStatement, GlobalStatement, ImportStatement, ImportsStatement, KeywordStatement, NodeType, ProgramStatement, RuleStatement, Token, TokenType, isCompilerError } from '../types.js'; +import { Diagnostic, DiagnosticSeverity, DocumentDiagnosticReportKind, Range } from 'lsp-types'; +import { describe, inst, it } from '@efekos/es-test/bin/testRunner.js'; +import { tokenizeSys, tokenizeSyx } from '../lexer.js'; +import { HandlerFn } from '@efekos/es-test/bin/types.js'; +import { createSyntaxScriptDiagnosticReport } from '../diagnostic.js'; +import { expect } from 'chai'; +import { syxparser } from '../ast.js'; + +function r(sc:number,ec:number):Range { + return {start:{line:1,character:sc},end:{line:1,character:ec}}; +} + +describe('Compiler module', () => { + + function rangeExpectations(r: Range) { + expect(r).to.have.property('start').to.be.a('object'); + expect(r).to.have.property('end').to.be.a('object'); + expect(r.start).to.have.property('line').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(r.start).to.have.property('character').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(r.end).to.have.property('character').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(r.end).to.have.property('line').to.be.a('number').to.be.greaterThanOrEqual(0); + } + + function tokenExpectations(t: Token) { + expect(t).to.have.property('range').to.be.a('object'); + rangeExpectations(t.range); + + expect(t).to.have.property('type').to.be.a('number').to.be.greaterThanOrEqual(0); + expect(t).to.have.property('value').to.be.a('string').to.be.not.equal(undefined); + } + + it('should provide correct ranges', () => { + + inst(() => { + + const tokens = tokenizeSyx('keyword hello;'); + + tokens.map(r => r.range).forEach(r => rangeExpectations(r)); + expect(tokens[0].range).to.deep.equal({ end: { line: 1, character: 8 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 9 } }); + expect(tokens[2].range).to.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); + + }); + + inst(() => { + + const tokens = tokenizeSyx('rule "imports-keyword": cray;'); + + expect(tokens).to.be.a('array').to.have.lengthOf(10); + tokens.map(r => r.range).forEach(r => rangeExpectations(r)); + expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } }); + expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 7 } }); + expect(tokens[3].range).to.be.deep.equal({ end: { line: 1, character: 15 }, start: { line: 1, character: 14 } }); + expect(tokens[4].range).to.be.deep.equal({ end: { line: 1, character: 22 }, start: { line: 1, character: 15 } }); + expect(tokens[5].range).to.be.deep.equal({ end: { line: 1, character: 23 }, start: { line: 1, character: 22 } }); + expect(tokens[6].range).to.be.deep.equal({ end: { line: 1, character: 24 }, start: { line: 1, character: 23 } }); + expect(tokens[7].range).to.be.deep.equal({ end: { line: 1, character: 29 }, start: { line: 1, character: 25 } }); + expect(tokens[8].range).to.be.deep.equal({ end: { line: 1, character: 30 }, start: { line: 1, character: 29 } }); + + }); + + inst(() => { + const tokens = tokenizeSyx('rule "return-function-value-enabled":true;'); + + expect(tokens).to.be.a('array').to.have.lengthOf(14); + tokens.map(r => r.range).forEach(r => rangeExpectations(r)); + expect(tokens[0].range).to.be.deep.equal({ end: { line: 1, character: 5 }, start: { line: 1, character: 1 } }); + expect(tokens[1].range).to.be.deep.equal({ end: { line: 1, character: 7 }, start: { line: 1, character: 6 } }); + expect(tokens[2].range).to.be.deep.equal({ end: { line: 1, character: 13 }, start: { line: 1, character: 7 } }); + expect(tokens[3].range).to.be.deep.equal({ end: { line: 1, character: 14 }, start: { line: 1, character: 13 } }); + expect(tokens[4].range).to.be.deep.equal({ end: { line: 1, character: 22 }, start: { line: 1, character: 14 } }); + expect(tokens[5].range).to.be.deep.equal({ end: { line: 1, character: 23 }, start: { line: 1, character: 22 } }); + expect(tokens[6].range).to.be.deep.equal({ end: { line: 1, character: 28 }, start: { line: 1, character: 23 } }); + expect(tokens[7].range).to.be.deep.equal({ end: { line: 1, character: 29 }, start: { line: 1, character: 28 } }); + expect(tokens[8].range).to.be.deep.equal({ end: { line: 1, character: 36 }, start: { line: 1, character: 29 } }); + expect(tokens[9].range).to.be.deep.equal({ end: { line: 1, character: 37 }, start: { line: 1, character: 36 } }); + expect(tokens[10].range).to.be.deep.equal({ end: { line: 1, character: 38 }, start: { line: 1, character: 37 } }); + expect(tokens[11].range).to.be.deep.equal({ end: { line: 1, character: 42 }, start: { line: 1, character: 38 } }); + expect(tokens[12].range).to.be.deep.equal({ end: { line: 1, character: 43 }, start: { line: 1, character: 42 } }); + + }); + + }, true); + + it('should provide correct tokenization', () => { + + function _case(src: string, types: TokenType[]): HandlerFn { + return () => { + const ts = tokenizeSyx(src); + + expect(ts).to.be.a('array'); + ts.forEach(t => tokenExpectations(t)); + expect(ts.map(t => t.type)).to.be.deep.equal(types); + }; + } + + inst( + _case('class } > ) ] , compile "" export function global random import imports 1 keyword { < ( [ operator * rule ; \'\' | +s', [ + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, + TokenType.ExportKeyword, TokenType.FunctionKeyword, TokenType.GlobalKeyword, TokenType.Identifier, TokenType.ImportKeyword, TokenType.ImportsKeyword, TokenType.IntNumber, TokenType.KeywordKeyword, + TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, + TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile + ]) + ); + + inst( + _case('class}>)],compile""exportfunctionglobalrandomimportimports1keyword{<([operator*rule;\'\'|+s', [ + TokenType.ClassKeyword, TokenType.CloseBrace, TokenType.CloseDiamond, TokenType.CloseParen, TokenType.CloseSquare, TokenType.Comma, TokenType.CompileKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, + TokenType.Identifier, TokenType.IntNumber, TokenType.KeywordKeyword, TokenType.OpenBrace, TokenType.OpenDiamond, TokenType.OpenParen, TokenType.OpenSquare, TokenType.OperatorKeyword, TokenType.Raw, + TokenType.RuleKeyword, TokenType.Semicolon, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.VarSeperator, TokenType.WhitespaceIdentifier, TokenType.EndOfFile + ]) + ); + + inst( + _case( + '+s+s+s+s+s+s+s', + [TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.WhitespaceIdentifier, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'operator "+" {', + [TokenType.OperatorKeyword, TokenType.OpenDiamond, TokenType.Identifier, TokenType.CloseDiamond, TokenType.DoubleQuote, TokenType.Raw, TokenType.DoubleQuote, TokenType.OpenDiamond, TokenType.Identifier, TokenType.CloseDiamond, TokenType.OpenBrace, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'o-+?', + [TokenType.Identifier, TokenType.Raw, TokenType.Raw, TokenType.Raw, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'rmh09345kg9', + [TokenType.Identifier, TokenType.IntNumber, TokenType.Identifier, TokenType.IntNumber, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'rule \'custom-random-rule?\';', + [TokenType.RuleKeyword, TokenType.SingleQuote, 20, 20, 20, 20, 20, 20, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'keyword pray;rule\'imports-keyword\': pray;', + [TokenType.KeywordKeyword, TokenType.Identifier, TokenType.Semicolon, TokenType.RuleKeyword, TokenType.SingleQuote, 20, 20, 20, TokenType.SingleQuote, TokenType.Raw, TokenType.Identifier, TokenType.Semicolon, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'çş', + [TokenType.Raw, TokenType.Raw, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'keyword altınasıçĞ;', + [TokenType.KeywordKeyword, TokenType.Identifier, 20, TokenType.Identifier, 20, 20, 20, TokenType.Semicolon, TokenType.EndOfFile] + ) + ); + + inst( + _case( + 'keyword imsodonewiththistest12casesisenough', + [TokenType.KeywordKeyword, TokenType.Identifier, TokenType.IntNumber, TokenType.Identifier, TokenType.EndOfFile] + ) + ); + + inst(() => { + + const sys = tokenizeSys('import "" \'\' ; :::'); + const sysList = [TokenType.ImportKeyword, TokenType.DoubleQuote, TokenType.DoubleQuote, TokenType.SingleQuote, TokenType.SingleQuote, TokenType.Semicolon, TokenType.EndOfFile]; + + sys.forEach(t => tokenExpectations(t)); + expect(sys).to.be.a('array'); + expect(sys.map(tt => tt.type)).to.be.deep.equal(sysList); + + }); + + }, true); + + describe('should provide correct parsing', () => { + + function astTypeExpectations(ast: ProgramStatement) { + expect(ast).to.be.a('object'); + expect(ast).to.have.property('type').to.be.a('number').to.be.equal(NodeType.Program); + expect(ast).to.have.property('modifiers').to.be.a('array').to.have.lengthOf(0); + expect(ast).to.have.property('body').to.be.a('array').to.have.lengthOf(1); + expect(ast).to.have.property('range').to.be.a('object'); + rangeExpectations(ast.range); + } + + it('for keyword statements', () => { + + const tokens = tokenizeSyx('keyword ruleish;'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [], range: { end: { line: 1, character: 16 }, start: { line: 1, character: 1 } }, word: {value:'ruleish',type:NodeType.Identifier,modifiers:[],range:{start:{line:1,character:9},end:{line:1,character:16}}} }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for rule statements', () => { + + const tokens = tokenizeSyx('rule \'function-value-return-enabled\': true;'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: RuleStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 43 } }, modifiers: [], rule: {range:r(6,37),type:NodeType.String,value:'function-value-return-enabled',modifiers:[]}, value: 'true', type: NodeType.Rule }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for compile statements', () => { + + const tokens = tokenizeSyx('compile(ts,js) \'test\';'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: CompileStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: [{modifiers:[],type:NodeType.Identifier,range:r(9,11),value:'ts'},{modifiers:[],type:NodeType.Identifier,range:r(12,14),value:'js'}], type: NodeType.Compile, modifiers: [], body: [{ type: NodeType.String, modifiers: [], range: { start: { line: 1, character: 16 }, end: { line: 1, character: 22 } }, value: 'test' }] }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for imports statements', () => { + + const tokens = tokenizeSyx('imports(ts,js) \'math\';'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: ImportsStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 22 } }, formats: [{modifiers:[],type:NodeType.Identifier,range:r(9,11),value:'ts'},{modifiers:[],type:NodeType.Identifier,range:r(12,14),value:'js'}], type: NodeType.Imports, modifiers: [], module: {range:r(16,22),modifiers:[],type:NodeType.String,value:'math'} }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for global statements', () => { + + const tokens = tokenizeSyx('global randomizer {}'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: GlobalStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 21 } }, name: {type:NodeType.Identifier,modifiers:[],range:r(8,18),value:'randomizer'}, type: NodeType.Global, modifiers: [], body: [] }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for function statements', () => { + + const tokens = tokenizeSyx('function randomizer {}'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: FunctionStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 29 } }, name: {type:NodeType.Identifier,modifiers:[],range:r(10,20),value:'randomizer'}, type: NodeType.Function, modifiers: [], body: [], arguments: [{modifiers:[],range:r(21,26),type:NodeType.PrimitiveType,value:'int'}] }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for import statements', () => { + + const tokens = tokenizeSyx('import \'./math\';'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: ImportStatement = { range: { start: { line: 1, character: 1 }, end: { line: 1, character: 16 } }, type: NodeType.Import, modifiers: [], path: {range:r(8,16),value:'./math',modifiers:[],type:NodeType.String} }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + it('for export statements', () => { + + const tokens = tokenizeSyx('export keyword ruleish;'); + const ast = syxparser.parseTokens(tokens, 'TEST_FILE'); + const stmt: KeywordStatement = { type: NodeType.Keyword, modifiers: [{ range: { end: { line: 1, character: 7 }, start: { line: 1, character: 1 } }, type: TokenType.ExportKeyword, value: 'export' }], range: { end: { line: 1, character: 23 }, start: { line: 1, character: 1 } }, word: {range:r(16,23),modifiers:[],type:NodeType.Identifier,value:'ruleish'} }; + + astTypeExpectations(ast); + expect(ast.body[0]).to.be.a('object').to.be.deep.equal(stmt); + + }); + + }); + + it('should provide correct diagnostic reports', () => { + + const report = createSyntaxScriptDiagnosticReport('TEST_FILE.syx', 'keyword ruleis'); + + expect(report).to.be.a('object'); + expect(report).to.have.property('items').to.be.a('array').to.have.lengthOf(1); + expect(report).to.have.property('kind').to.be.a('string').to.be.equal(DocumentDiagnosticReportKind.Full); + + const diag = report.items[0]; + const item: Diagnostic = { message: 'Expected \';\' after statement, found \'EOF\'.', range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } }, severity: DiagnosticSeverity.Error, source: 'syntax-script', data: [] }; + + expect(diag).to.have.property('message').to.be.a('string'); + expect(diag).to.have.property('range'); + expect(diag).to.have.property('severity').to.be.a('number').to.be.equal(DiagnosticSeverity.Error); + rangeExpectations(diag.range); + expect(diag).to.have.property('source').to.be.a('string').to.be.equal('syntax-script'); + expect(diag).to.be.a('object').to.be.deep.equal(item); + + }); + +}); \ No newline at end of file diff --git a/src/types.ts b/src/types.ts index e7ac27a..3298165 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { CodeAction, CodeActionKind, Range } from 'lsp-types'; +import { CodeAction, Range } from 'lsp-types'; /** * Every token type a syntax script declaration file can contain. If something can't be recognized as a token, @@ -173,7 +173,7 @@ export interface Token { * Every node type a syntax script declaration file can contain. * @author efekos * @since 0.0.1-alpha - * @version 1.0.0 + * @version 1.0.1 */ export enum NodeType { @@ -204,11 +204,6 @@ export enum NodeType { */ Imports, // imports() method - /** - * {@link ExportStatement}. - */ - Export, - /** * {@link FunctionStatement}. */ @@ -264,7 +259,12 @@ export enum NodeType { /** * {@link SquareExpression}. */ - Square + Square, + + /** + * {@link IdentifierExpression}. + */ + Identifier } /** @@ -281,12 +281,13 @@ export interface ProgramStatement extends Statement { /** * Base statement interface. * @author efekos - * @version 1.0.3 + * @version 1.0.4 * @since 0.0.1-alpha */ export interface Statement { type: NodeType; range: Range; + modifiers: Token[]; } /** @@ -345,6 +346,16 @@ export interface StringExpression extends Expression { value: string; } +/** + * An expression that represents an alphabetical identifier. Uses type {@link NodeType.Identifier}. Contains the name of something. + * @author efekos + * @version 1.0.0 + * @since 0.0.2-alpha + */ +export interface IdentifierExpression extends Expression { + type: NodeType.Identifier; +} + /** * An expression that represents multiple statements inside braces (`{}`). Uses type {@link NodeType.Brace}. * @author efekos @@ -395,11 +406,11 @@ export interface OperatorStatement extends Statement { /** * Keyword statement that registers an identifier as a keyword. This keyword can be used in several places. Uses type {@link NodeType.Keyword}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface KeywordStatement extends Statement { - word: string; + word: IdentifierExpression; type: NodeType.Keyword; } @@ -407,25 +418,25 @@ export interface KeywordStatement extends Statement { * Imports statements indicate that a certain module should be imported to the file if the parent statement is used in .sys file. * Uses type {@link NodeType.Imports}. * @author efekos - * @version 1.0.0 + * @version 1.0.2 * @since 0.0.1-alpha */ export interface ImportsStatement extends Statement { type: NodeType.Imports, - formats: string[]; - module: string; + formats: IdentifierExpression[]; + module: StringExpression; } /** * Compile statements determine what should be the result of an operator or a function when compiling to certain languages. * Uses typq {@link NodeType.Compile}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface CompileStatement extends Statement { type: NodeType.Compile, - formats: string[], + formats: IdentifierExpression[], body: Expression[]; } @@ -433,12 +444,12 @@ export interface CompileStatement extends Statement { * Rule statements define a specific rule about the source language, such as keyword usages or enabling/disabling certain * features of the language. Uses type {@link NodeType.Rule}. * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface RuleStatement extends Statement { type: NodeType.Rule; - rule: string; + rule: StringExpression; value: unknown; } @@ -446,49 +457,50 @@ export interface RuleStatement extends Statement { * Import statements are used to import a .syx file from a .sys file. They can be used to import other .syx files from a * .syx file as well. Uses type {@link NodeType.Import} * @author efekos - * @version 1.0.0 + * @version 1.0.1 * @since 0.0.1-alpha */ export interface ImportStatement extends Statement { type: NodeType.Import, - path: string; + path: StringExpression; } /** - * Export statements are used to export a certain statement, such as an operator or a keyword. Uses type {@link NodeType.Export} + * Function statements are used to define possible function calls. How the function is called depends on the place this statement is + * used. Uses type {@link NodeType.Function}. * @author efekos - * @version 1.0.0 + * @version 1.0.2 * @since 0.0.1-alpha */ -export interface ExportStatement extends Statement { - type: NodeType.Export, - body: Statement; +export interface FunctionStatement extends Statement { + type: NodeType.Function, + name: IdentifierExpression, + arguments: PrimitiveTypeExpression[]; + body: Statement[]; } /** - * Function statements are used to define possible function calls. How the function is called depends on the place this statement is - * used. Uses type {@link NodeType.Function}. + * Global statements are used to define values that are global. They can be global classes, interfaces, or just global methods depending on + * the language. But the only thing that matters here is that they are global, and can be used from anywhere. * @author efekos - * @version 1.0.0 - * @since 0.0.1-alpha + * @version 1.0.1 + * @since 0.0.2-alpha */ -export interface FunctionStatement extends Statement { - type: NodeType.Function, - name: string, - arguments: string[]; +export interface GlobalStatement extends Statement { body: Statement[]; + name: IdentifierExpression; } /** * Represents any interface that is a node. * @author efekos - * @version 1.0.0 + * @version 1.0.3 * @since 0.0.1-alpha */ export type Node = - ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ExportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | - StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression; + ProgramStatement | OperatorStatement | CompileStatement | ImportStatement | ImportsStatement | FunctionStatement | KeywordStatement | RuleStatement | GlobalStatement | + StringExpression | PrimitiveTypeExpression | VariableExpression | WhitespaceIdentifierExpression | BraceExpression | SquareExpression | ParenExpression | IdentifierExpression; /** * Represents a syxconfig.json file. This file contains a few properties for the compiler. @@ -516,7 +528,6 @@ export interface SyxConfigCompile { } - /** * An error that occured while tokenizing, parsing or compiling a file. * @author efekos @@ -553,4 +564,37 @@ export class CompilerError extends Error { */ export function isCompilerError(error: Error): error is CompilerError { return error.name === 'CompilerError'; +} + +interface NodeTypes { + [NodeType.Brace]: BraceExpression; + [NodeType.Compile]: CompileStatement; + [NodeType.Function]: FunctionStatement; + [NodeType.Import]: ImportStatement; + [NodeType.Imports]: ImportsStatement; + [NodeType.Keyword]: KeywordStatement; + [NodeType.Operator]: OperatorStatement; + [NodeType.Paren]: ParenExpression; + [NodeType.PrimitiveType]: PrimitiveTypeExpression; + [NodeType.Program]: ProgramStatement; + [NodeType.Rule]: RuleStatement; + [NodeType.Square]: SquareExpression; + [NodeType.String]: StringExpression; + [NodeType.Variable]: VariableExpression; + [NodeType.WhitespaceIdentifier]: WhitespaceIdentifierExpression; + [NodeType.Global]: GlobalStatement; + [NodeType.Identifier]: IdentifierExpression; +} + +/** + * Determines whether the given node matches the expected node type. + * @param {Node} node Any node. + * @param {NodeType} nodeType Expected node type. + * @returns {boolean} True if the given node is of the expected node type, otherwise false. + * @author efekos + * @since 0.0.2-alpha + * @version 1.0.0 + */ +export function statementIsA(node: Statement, nodeType: T): node is NodeTypes[T] { + return node.type === nodeType; } \ No newline at end of file pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy