diff --git a/package.json b/package.json index 07a42080..dcc5b818 100644 --- a/package.json +++ b/package.json @@ -58,28 +58,30 @@ "parse:grammars:tolk": "cd ./server/src/languages/tolk/tree-sitter-tolk/ && tree-sitter parse -D --open-log main.tolk", "test:e2e": "yarn test:e2e:tolk", "test:e2e:compile": "tsc -p ./server/src/e2e/tsconfig.json", - "test:e2e:tolk": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts", - "test:e2e:tolk:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --update-snapshots", - "test:e2e:tolk:resolving:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite resolving --update-snapshots", - "test:e2e:tolk:references:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite references --update-snapshots", - "test:e2e:tolk:rename:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite rename --update-snapshots", - "test:e2e:tolk:inspections:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite inspections --update-snapshots", - "test:e2e:tolk:foldings:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite foldings --update-snapshots", - "test:e2e:tolk:completion:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite completion --update-snapshots", - "test:e2e:tolk:completion-select:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite completion-select --update-snapshots", - "test:e2e:tolk:document-symbols:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite document-symbols --update-snapshots", - "test:e2e:tolk:inlay-hints:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite inlay-hints --update-snapshots", - "test:e2e:tolk:signature-help:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite signature-help --update-snapshots", - "test:e2e:tolk:intentions:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite intentions --update-snapshots", - "test:e2e:tolk:types:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite types --update-snapshots", - "test:e2e:tolk:types-2:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite types-2 --update-snapshots", - "test:e2e:tolk:documentation:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite documentation --update-snapshots", - "test:e2e:tolk:type-resolving:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite type-resolving --update-snapshots", - "test:e2e:tolk:multifile-resolving:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite multifile-resolving --update-snapshots", - "test:e2e:tolk:multifile-intentions:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite multifile-intentions --update-snapshots", - "test:e2e:tolk:multifile-completion-select:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite multifile-completion-select --update-snapshots", - "test:e2e:tolk:multifile-inspections:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite multifile-inspections --update-snapshots", - "test:e2e:tolk:compiler-tests:update": "yarn test:e2e:compile && ts-node server/src/e2e/runTolkTest.ts --suite compiler-tests --update-snapshots", + "test:e2e:tolk": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts", + "test:e2e:tolk:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --update-snapshots", + "test:e2e:tolk:resolving:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite resolving --update-snapshots", + "test:e2e:tolk:references:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite references --update-snapshots", + "test:e2e:tolk:rename:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite rename --update-snapshots", + "test:e2e:tolk:inspections:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite inspections --update-snapshots", + "test:e2e:tolk:foldings:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite foldings --update-snapshots", + "test:e2e:tolk:completion:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite completion --update-snapshots", + "test:e2e:tolk:completion-select:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite completion-select --update-snapshots", + "test:e2e:tolk:document-symbols:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite document-symbols --update-snapshots", + "test:e2e:tolk:inlay-hints:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite inlay-hints --update-snapshots", + "test:e2e:tolk:signature-help:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite signature-help --update-snapshots", + "test:e2e:tolk:intentions:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite intentions --update-snapshots", + "test:e2e:tolk:types:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite types --update-snapshots", + "test:e2e:tolk:types-2:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite types-2 --update-snapshots", + "test:e2e:tolk:documentation:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite documentation --update-snapshots", + "test:e2e:tolk:type-resolving:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite type-resolving --update-snapshots", + "test:e2e:tolk:multifile-resolving:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite multifile-resolving --update-snapshots", + "test:e2e:tolk:multifile-intentions:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite multifile-intentions --update-snapshots", + "test:e2e:tolk:multifile-completion-select:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite multifile-completion-select --update-snapshots", + "test:e2e:tolk:multifile-inspections:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite multifile-inspections --update-snapshots", + "test:e2e:tolk:compiler-tests:update": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --suite compiler-tests --verbose --update-snapshots", + "test:e2e:func": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --lang FunC --update-snapshots", + "test:e2e:func:impure-inspection": "yarn test:e2e:compile && ts-node server/src/e2e/runLangTest.ts --lang FunC --suite ImpureInspection --update-snapshots", "pack:ls": "cd dist && npm pack", "publish:ls": "cd dist && npm publish", "build-server-package-and-publish": "yarn build && yarn pack:ls && yarn publish:ls", @@ -652,7 +654,8 @@ "unused-parameter", "unused-type-parameter", "unused-variable", - "unused-import" + "unused-import", + "unused-impure" ] }, "default": [], diff --git a/server/src/e2e/FunC/ImpureInspection.test.ts b/server/src/e2e/FunC/ImpureInspection.test.ts new file mode 100644 index 00000000..e0dbd348 --- /dev/null +++ b/server/src/e2e/FunC/ImpureInspection.test.ts @@ -0,0 +1,54 @@ +import * as assert from "node:assert" + +import * as vscode from "vscode" + +import {TestCase} from "../common/TestParser" +import {BaseTestSuite} from "../common/BaseTestSuite" + +suite("Impure inspection test suite", () => { + const testSuite = new (class extends BaseTestSuite { + public async getInspections(): Promise { + await new Promise(resolve => setTimeout(resolve, 200)) + + const diagnostics = vscode.languages.getDiagnostics(this.document.uri) + + const impureDiag = diagnostics + .filter(d => d.code == "unused-impure") + .sort((a, b) => { + if (a.range.start.line !== b.range.start.line) { + return a.range.start.line - b.range.start.line + } + return a.range.start.character - b.range.start.character + }) + + if (impureDiag.length === 0) { + return "no issues" + } + + return impureDiag + .map( + d => + `${d.range.start.line}:${d.range.start.character} to ${d.range.end.line}:${d.range.end.character}`, + ) + .join("\n") + } + + protected runTest(testFile: string, testCase: TestCase): void { + test(`Case: ${testCase.name}`, async () => { + await this.replaceDocumentText(testCase.input) + const inspectionRes = await this.getInspections() + assert.strictEqual(inspectionRes, testCase.expected) + }) + } + })() + suiteSetup(async function () { + this.timeout(10_000) + await testSuite.suiteSetup() + }) + + setup(async () => testSuite.setup()) + teardown(async () => testSuite.teardown()) + suiteTeardown(() => testSuite.suiteTeardown()) + + testSuite.runTestsFromDirectory("impure-inspection") +}) diff --git a/server/src/e2e/FunC/index.ts b/server/src/e2e/FunC/index.ts new file mode 100644 index 00000000..a77215e7 --- /dev/null +++ b/server/src/e2e/FunC/index.ts @@ -0,0 +1,153 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2025 TON Studio +import * as path from "node:path" + +import * as Mocha from "mocha" +import {glob} from "glob" +import {Suite, Test} from "mocha" + +// node.js 20+ builtin +const globSync = (globs: string[], options: {cwd: string}): string[] => { + return globs.flatMap(g => glob.sync(g, options)) +} + +interface TestFilterOptions { + readonly suite?: string + readonly testPattern?: string + readonly verbose?: boolean +} + +function getFilterOptions(): TestFilterOptions { + return { + suite: process.env["TON_TEST_SUITE"], + testPattern: process.env["TON_TEST_PATTERN"], + verbose: process.env["TON_TEST_VERBOSE"] === "true", + } +} + +function getTestFilePattern(options: TestFilterOptions): string { + if (options.suite) { + return `${options.suite}.test.js` + } + return "*.test.js" +} + +function shouldIncludeTest(testName: string, options: TestFilterOptions): boolean { + if (options.testPattern) { + return testName.toLowerCase().includes(options.testPattern.toLowerCase()) + } + return true +} + +export async function run(): Promise { + const options = getFilterOptions() + + if (options.verbose) { + console.log("Test filter options:", options) + } + + const mocha = new Mocha({ + ui: "tdd", + color: true, + timeout: 20_000, + }) + + process.env["TON_TESTS"] = "true" + process.env["TEST_FUNC_STDLIB_PATH"] = "../server/src/e2e/FunC/stdlib" + + const testsRoot = path.resolve(__dirname, ".") + const testFilePattern = getTestFilePattern(options) + + if (options.verbose) { + console.log(`Looking for test files matching: ${testFilePattern}`) + console.log(`In directory: ${testsRoot}`) + } + + return new Promise((resolve, reject) => { + glob(testFilePattern, { + cwd: testsRoot, + }) + .then(files => { + files.sort((a, b) => { + if (a.includes("multifile-") && b.includes("multifile-")) { + return Number(a < b) + } + if (a.includes("multifile-") && !b.includes("multifile-")) { + return 1 + } + if (!a.includes("multifile-") && b.includes("multifile-")) { + return -1 + } + return Number(a < b) + }) + + if (files.length === 0) { + if (options.suite) { + console.error(`No test suite found matching: ${options.suite}`) + console.log("Available test suites:") + const allFiles = globSync(["*.test.js"], {cwd: testsRoot}) + for (const file of allFiles) { + const suiteName = path.basename(file, ".test.js") + console.log(` - ${suiteName}`) + } + reject(new Error(`Test suite '${options.suite}' not found`)) + } else { + reject(new Error("No test files found")) + return + } + } + + if (options.verbose) { + console.log(`Found ${files.length} test file(s):`) + for (const file of files) { + console.log(` - ${file}`) + } + } + + for (const f of files) { + mocha.addFile(path.resolve(testsRoot, f)) + } + + if (options.testPattern) { + const originalRun = mocha.run.bind(mocha) + mocha.run = function (callback: (failures: number) => void) { + const suite = this.suite + filterTestsRecursively(suite, options) + return originalRun(callback) + } + } + + try { + mocha.run(failures => { + if (failures > 0) { + reject(new Error(`${failures} tests failed.`)) + } else { + resolve() + } + }) + } catch (error) { + reject(error instanceof Error ? error : new Error(String(error))) + } + }) + .catch((error: unknown) => { + reject(error instanceof Error ? error : new Error(String(error))) + }) + }) +} + +function filterTestsRecursively(suite: Suite, options: TestFilterOptions): void { + if (!options.testPattern) return + + suite.tests = suite.tests.filter((test: Test) => shouldIncludeTest(test.title, options)) + for (const childSuite of suite.suites) { + filterTestsRecursively(childSuite, options) + } + suite.suites = suite.suites.filter((childSuite: Suite) => hasTests(childSuite)) +} + +function hasTests(suite: Suite): boolean { + if (suite.tests.length > 0) { + return true + } + return suite.suites.some(childSuite => hasTests(childSuite)) +} diff --git a/server/src/e2e/FunC/stdlib/stdlib.fc b/server/src/e2e/FunC/stdlib/stdlib.fc new file mode 100644 index 00000000..8fb27a7e --- /dev/null +++ b/server/src/e2e/FunC/stdlib/stdlib.fc @@ -0,0 +1,639 @@ +;; Standard library for funC +;; + +{- + This file is part of TON FunC Standard Library. + + FunC Standard Library is free software: you can redistribute it and/or modify + it under the terms of the GNU Lesser General Public License as published by + the Free Software Foundation, either version 2 of the License, or + (at your option) any later version. + + FunC Standard Library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Lesser General Public License for more details. + +-} + +{- + # Tuple manipulation primitives + The names and the types are mostly self-explaining. + See [polymorhism with forall](https://ton.org/docs/#/func/functions?id=polymorphism-with-forall) + for more info on the polymorphic functions. + + Note that currently values of atomic type `tuple` can't be cast to composite tuple type (e.g. `[int, cell]`) + and vise versa. +-} + +{- + # Lisp-style lists + + Lists can be represented as nested 2-elements tuples. + Empty list is conventionally represented as TVM `null` value (it can be obtained by calling [null()]). + For example, tuple `(1, (2, (3, null)))` represents list `[1, 2, 3]`. Elements of a list can be of different types. +-} + +;;; Adds an element to the beginning of lisp-style list. +forall X -> tuple cons(X head, tuple tail) asm "CONS"; + +;;; Extracts the head and the tail of lisp-style list. +forall X -> (X, tuple) uncons(tuple list) asm "UNCONS"; + +;;; Extracts the tail and the head of lisp-style list. +forall X -> (tuple, X) list_next(tuple list) asm( -> 1 0) "UNCONS"; + +;;; Returns the head of lisp-style list. +forall X -> X car(tuple list) asm "CAR"; + +;;; Returns the tail of lisp-style list. +tuple cdr(tuple list) asm "CDR"; + +;;; Creates tuple with zero elements. +tuple empty_tuple() asm "NIL"; + +;;; Appends a value `x` to a `Tuple t = (x1, ..., xn)`, but only if the resulting `Tuple t' = (x1, ..., xn, x)` +;;; is of length at most 255. Otherwise throws a type check exception. +forall X -> tuple tpush(tuple t, X value) asm "TPUSH"; +forall X -> (tuple, ()) ~tpush(tuple t, X value) asm "TPUSH"; + +;;; Creates a tuple of length one with given argument as element. +forall X -> [X] single(X x) asm "SINGLE"; + +;;; Unpacks a tuple of length one +forall X -> X unsingle([X] t) asm "UNSINGLE"; + +;;; Creates a tuple of length two with given arguments as elements. +forall X, Y -> [X, Y] pair(X x, Y y) asm "PAIR"; + +;;; Unpacks a tuple of length two +forall X, Y -> (X, Y) unpair([X, Y] t) asm "UNPAIR"; + +;;; Creates a tuple of length three with given arguments as elements. +forall X, Y, Z -> [X, Y, Z] triple(X x, Y y, Z z) asm "TRIPLE"; + +;;; Unpacks a tuple of length three +forall X, Y, Z -> (X, Y, Z) untriple([X, Y, Z] t) asm "UNTRIPLE"; + +;;; Creates a tuple of length four with given arguments as elements. +forall X, Y, Z, W -> [X, Y, Z, W] tuple4(X x, Y y, Z z, W w) asm "4 TUPLE"; + +;;; Unpacks a tuple of length four +forall X, Y, Z, W -> (X, Y, Z, W) untuple4([X, Y, Z, W] t) asm "4 UNTUPLE"; + +;;; Returns the first element of a tuple (with unknown element types). +forall X -> X first(tuple t) asm "FIRST"; + +;;; Returns the second element of a tuple (with unknown element types). +forall X -> X second(tuple t) asm "SECOND"; + +;;; Returns the third element of a tuple (with unknown element types). +forall X -> X third(tuple t) asm "THIRD"; + +;;; Returns the fourth element of a tuple (with unknown element types). +forall X -> X fourth(tuple t) asm "3 INDEX"; + +;;; Returns the first element of a pair tuple. +forall X, Y -> X pair_first([X, Y] p) asm "FIRST"; + +;;; Returns the second element of a pair tuple. +forall X, Y -> Y pair_second([X, Y] p) asm "SECOND"; + +;;; Returns the first element of a triple tuple. +forall X, Y, Z -> X triple_first([X, Y, Z] p) asm "FIRST"; + +;;; Returns the second element of a triple tuple. +forall X, Y, Z -> Y triple_second([X, Y, Z] p) asm "SECOND"; + +;;; Returns the third element of a triple tuple. +forall X, Y, Z -> Z triple_third([X, Y, Z] p) asm "THIRD"; + + +;;; Push null element (casted to given type) +;;; By the TVM type `Null` FunC represents absence of a value of some atomic type. +;;; So `null` can actually have any atomic type. +forall X -> X null() asm "PUSHNULL"; + +;;; Moves a variable [x] to the top of the stack +forall X -> (X, ()) ~impure_touch(X x) impure asm "NOP"; + + + +;;; Returns the current Unix time as an Integer +int now() asm "NOW"; + +;;; Returns the internal address of the current smart contract as a Slice with a `MsgAddressInt`. +;;; If necessary, it can be parsed further using primitives such as [parse_std_addr]. +slice my_address() asm "MYADDR"; + +;;; Returns the balance of the smart contract as a tuple consisting of an int +;;; (balance in nanotoncoins) and a `cell` +;;; (a dictionary with 32-bit keys representing the balance of "extra currencies") +;;; at the start of Computation Phase. +;;; Note that RAW primitives such as [send_raw_message] do not update this field. +[int, cell] get_balance() asm "BALANCE"; + +;;; Returns the logical time of the current transaction. +int cur_lt() asm "LTIME"; + +;;; Returns the starting logical time of the current block. +int block_lt() asm "BLOCKLT"; + +;;; Computes the representation hash of a `cell` [c] and returns it as a 256-bit unsigned integer `x`. +;;; Useful for signing and checking signatures of arbitrary entities represented by a tree of cells. +int cell_hash(cell c) asm "HASHCU"; + +;;; Computes the hash of a `slice s` and returns it as a 256-bit unsigned integer `x`. +;;; The result is the same as if an ordinary cell containing only data and references from `s` had been created +;;; and its hash computed by [cell_hash]. +int slice_hash(slice s) asm "HASHSU"; + +;;; Computes sha256 of the data bits of `slice` [s]. If the bit length of `s` is not divisible by eight, +;;; throws a cell underflow exception. The hash value is returned as a 256-bit unsigned integer `x`. +int string_hash(slice s) asm "SHA256U"; + +{- + # Signature checks +-} + +;;; Checks the Ed25519-`signature` of a `hash` (a 256-bit unsigned integer, usually computed as the hash of some data) +;;; using [public_key] (also represented by a 256-bit unsigned integer). +;;; The signature must contain at least 512 data bits; only the first 512 bits are used. +;;; The result is `−1` if the signature is valid, `0` otherwise. +;;; Note that `CHKSIGNU` creates a 256-bit slice with the hash and calls `CHKSIGNS`. +;;; That is, if [hash] is computed as the hash of some data, these data are hashed twice, +;;; the second hashing occurring inside `CHKSIGNS`. +int check_signature(int hash, slice signature, int public_key) asm "CHKSIGNU"; + +;;; Checks whether [signature] is a valid Ed25519-signature of the data portion of `slice data` using `public_key`, +;;; similarly to [check_signature]. +;;; If the bit length of [data] is not divisible by eight, throws a cell underflow exception. +;;; The verification of Ed25519 signatures is the standard one, +;;; with sha256 used to reduce [data] to the 256-bit number that is actually signed. +int check_data_signature(slice data, slice signature, int public_key) asm "CHKSIGNS"; + +{--- + # Computation of boc size + The primitives below may be useful for computing storage fees of user-provided data. +-} + +;;; Returns `(x, y, z, -1)` or `(null, null, null, 0)`. +;;; Recursively computes the count of distinct cells `x`, data bits `y`, and cell references `z` +;;; in the DAG rooted at `cell` [c], effectively returning the total storage used by this DAG taking into account +;;; the identification of equal cells. +;;; The values of `x`, `y`, and `z` are computed by a depth-first traversal of this DAG, +;;; with a hash table of visited cell hashes used to prevent visits of already-visited cells. +;;; The total count of visited cells `x` cannot exceed non-negative [max_cells]; +;;; otherwise the computation is aborted before visiting the `(max_cells + 1)`-st cell and +;;; a zero flag is returned to indicate failure. If [c] is `null`, returns `x = y = z = 0`. +(int, int, int) compute_data_size(cell c, int max_cells) impure asm "CDATASIZE"; + +;;; Similar to [compute_data_size?], but accepting a `slice` [s] instead of a `cell`. +;;; The returned value of `x` does not take into account the cell that contains the `slice` [s] itself; +;;; however, the data bits and the cell references of [s] are accounted for in `y` and `z`. +(int, int, int) slice_compute_data_size(slice s, int max_cells) impure asm "SDATASIZE"; + +;;; A non-quiet version of [compute_data_size?] that throws a cell overflow exception (`8`) on failure. +(int, int, int, int) compute_data_size?(cell c, int max_cells) asm "CDATASIZEQ NULLSWAPIFNOT2 NULLSWAPIFNOT"; + +;;; A non-quiet version of [slice_compute_data_size?] that throws a cell overflow exception (8) on failure. +(int, int, int, int) slice_compute_data_size?(cell c, int max_cells) asm "SDATASIZEQ NULLSWAPIFNOT2 NULLSWAPIFNOT"; + +;;; Throws an exception with exit_code excno if cond is not 0 (commented since implemented in compilator) +;; () throw_if(int excno, int cond) impure asm "THROWARGIF"; + +{-- + # Debug primitives + Only works for local TVM execution with debug level verbosity +-} +;;; Dumps the stack (at most the top 255 values) and shows the total stack depth. +() dump_stack() impure asm "DUMPSTK"; + +{- + # Persistent storage save and load +-} + +;;; Returns the persistent contract storage cell. It can be parsed or modified with slice and builder primitives later. +cell get_data() asm "c4 PUSH"; + +;;; Sets `cell` [c] as persistent contract data. You can update persistent contract storage with this primitive. +() set_data(cell c) impure asm "c4 POP"; + +{- + # Continuation primitives +-} +;;; Usually `c3` has a continuation initialized by the whole code of the contract. It is used for function calls. +;;; The primitive returns the current value of `c3`. +cont get_c3() impure asm "c3 PUSH"; + +;;; Updates the current value of `c3`. Usually, it is used for updating smart contract code in run-time. +;;; Note that after execution of this primitive the current code +;;; (and the stack of recursive function calls) won't change, +;;; but any other function call will use a function from the new code. +() set_c3(cont c) impure asm "c3 POP"; + +;;; Transforms a `slice` [s] into a simple ordinary continuation `c`, with `c.code = s` and an empty stack and savelist. +cont bless(slice s) impure asm "BLESS"; + +{--- + # Gas related primitives +-} + +;;; Sets current gas limit `gl` to its maximal allowed value `gm`, and resets the gas credit `gc` to zero, +;;; decreasing the value of `gr` by `gc` in the process. +;;; In other words, the current smart contract agrees to buy some gas to finish the current transaction. +;;; This action is required to process external messages, which bring no value (hence no gas) with themselves. +;;; +;;; For more details check [accept_message effects](https://docs.ton.org/develop/smart-contracts/guidelines/accept). +() accept_message() impure asm "ACCEPT"; + +;;; Sets current gas limit `gl` to the minimum of limit and `gm`, and resets the gas credit `gc` to zero. +;;; If the gas consumed so far (including the present instruction) exceeds the resulting value of `gl`, +;;; an (unhandled) out of gas exception is thrown before setting new gas limits. +;;; Notice that [set_gas_limit] with an argument `limit ≥ 2^63 − 1` is equivalent to [accept_message]. +() set_gas_limit(int limit) impure asm "SETGASLIMIT"; + +;;; Commits the current state of registers `c4` (“persistent data”) and `c5` (“actions”) +;;; so that the current execution is considered “successful” with the saved values even if an exception +;;; in Computation Phase is thrown later. +() commit() impure asm "COMMIT"; + +;;; Not implemented +;;() buy_gas(int gram) impure asm "BUYGAS"; + +;;; Computes the amount of gas that can be bought for `amount` nanoTONs, +;;; and sets `gl` accordingly in the same way as [set_gas_limit]. +() buy_gas(int amount) impure asm "BUYGAS"; + +;;; Computes the minimum of two integers [x] and [y]. +int min(int x, int y) asm "MIN"; + +;;; Computes the maximum of two integers [x] and [y]. +int max(int x, int y) asm "MAX"; + +;;; Sorts two integers. +(int, int) minmax(int x, int y) asm "MINMAX"; + +;;; Computes the absolute value of an integer [x]. +int abs(int x) asm "ABS"; + +{- + # Slice primitives + + It is said that a primitive _loads_ some data, + if it returns the data and the remainder of the slice + (so it can also be used as [modifying method](https://docs.ton.org/develop/func/statements#modifying-methods)). + + It is said that a primitive _preloads_ some data, if it returns only the data + (it can be used as [non-modifying method](https://docs.ton.org/develop/func/statements#non-modifying-methods)). + + Unless otherwise stated, loading and preloading primitives read the data from a prefix of the slice. +-} + + +;;; Converts a `cell` [c] into a `slice`. Notice that [c] must be either an ordinary cell, +;;; or an exotic cell (see [TVM.pdf](https://ton-blockchain.github.io/docs/tvm.pdf), 3.1.2) +;;; which is automatically loaded to yield an ordinary cell `c'`, converted into a `slice` afterwards. +slice begin_parse(cell c) asm "CTOS"; + +;;; Checks if [s] is empty. If not, throws an exception. +() end_parse(slice s) impure asm "ENDS"; + +;;; Loads the first reference from the slice. +(slice, cell) load_ref(slice s) asm( -> 1 0) "LDREF"; + +;;; Preloads the first reference from the slice. +cell preload_ref(slice s) asm "PLDREF"; + + {- Functions below are commented because are implemented on compilator level for optimisation -} + +;;; Loads a signed [len]-bit integer from a slice [s]. +;; (slice, int) ~load_int(slice s, int len) asm(s len -> 1 0) "LDIX"; + +;;; Loads an unsigned [len]-bit integer from a slice [s]. +;; (slice, int) ~load_uint(slice s, int len) asm( -> 1 0) "LDUX"; + +;;; Preloads a signed [len]-bit integer from a slice [s]. +;; int preload_int(slice s, int len) asm "PLDIX"; + +;;; Preloads an unsigned [len]-bit integer from a slice [s]. +;; int preload_uint(slice s, int len) asm "PLDUX"; + +;;; Loads the first `0 ≤ len ≤ 1023` bits from slice [s] into a separate `slice s''`. +;; (slice, slice) load_bits(slice s, int len) asm(s len -> 1 0) "LDSLICEX"; + +;;; Preloads the first `0 ≤ len ≤ 1023` bits from slice [s] into a separate `slice s''`. +;; slice preload_bits(slice s, int len) asm "PLDSLICEX"; + +;;; Loads serialized amount of TonCoins (any unsigned integer up to `2^120 - 1`). +(slice, int) load_grams(slice s) asm( -> 1 0) "LDGRAMS"; +(slice, int) load_coins(slice s) asm( -> 1 0) "LDGRAMS"; + +;;; Returns all but the first `0 ≤ len ≤ 1023` bits of `slice` [s]. +slice skip_bits(slice s, int len) asm "SDSKIPFIRST"; +(slice, ()) ~skip_bits(slice s, int len) asm "SDSKIPFIRST"; + +;;; Returns the first `0 ≤ len ≤ 1023` bits of `slice` [s]. +slice first_bits(slice s, int len) asm "SDCUTFIRST"; + +;;; Returns all but the last `0 ≤ len ≤ 1023` bits of `slice` [s]. +slice skip_last_bits(slice s, int len) asm "SDSKIPLAST"; +(slice, ()) ~skip_last_bits(slice s, int len) asm "SDSKIPLAST"; + +;;; Returns the last `0 ≤ len ≤ 1023` bits of `slice` [s]. +slice slice_last(slice s, int len) asm "SDCUTLAST"; + +;;; Loads a dictionary `D` (HashMapE) from `slice` [s]. +;;; (returns `null` if `nothing` constructor is used). +(slice, cell) load_dict(slice s) asm( -> 1 0) "LDDICT"; + +;;; Preloads a dictionary `D` from `slice` [s]. +cell preload_dict(slice s) asm "PLDDICT"; + +;;; Loads a dictionary as [load_dict], but returns only the remainder of the slice. +slice skip_dict(slice s) asm "SKIPDICT"; + +;;; Loads (Maybe ^Cell) from `slice` [s]. +;;; In other words loads 1 bit and if it is true +;;; loads first ref and return it with slice remainder +;;; otherwise returns `null` and slice remainder +(slice, cell) load_maybe_ref(slice s) asm( -> 1 0) "LDOPTREF"; + +;;; Preloads (Maybe ^Cell) from `slice` [s]. +cell preload_maybe_ref(slice s) asm "PLDOPTREF"; + + +;;; Returns the depth of `cell` [c]. +;;; If [c] has no references, then return `0`; +;;; otherwise the returned value is one plus the maximum of depths of cells referred to from [c]. +;;; If [c] is a `null` instead of a cell, returns zero. +int cell_depth(cell c) asm "CDEPTH"; + + +{- + # Slice size primitives +-} + +;;; Returns the number of references in `slice` [s]. +int slice_refs(slice s) asm "SREFS"; + +;;; Returns the number of data bits in `slice` [s]. +int slice_bits(slice s) asm "SBITS"; + +;;; Returns both the number of data bits and the number of references in `slice` [s]. +(int, int) slice_bits_refs(slice s) asm "SBITREFS"; + +;;; Checks whether a `slice` [s] is empty (i.e., contains no bits of data and no cell references). +int slice_empty?(slice s) asm "SEMPTY"; + +;;; Checks whether `slice` [s] has no bits of data. +int slice_data_empty?(slice s) asm "SDEMPTY"; + +;;; Checks whether `slice` [s] has no references. +int slice_refs_empty?(slice s) asm "SREMPTY"; + +;;; Returns the depth of `slice` [s]. +;;; If [s] has no references, then returns `0`; +;;; otherwise the returned value is one plus the maximum of depths of cells referred to from [s]. +int slice_depth(slice s) asm "SDEPTH"; + +{- + # Builder size primitives +-} + +;;; Returns the number of cell references already stored in `builder` [b] +int builder_refs(builder b) asm "BREFS"; + +;;; Returns the number of data bits already stored in `builder` [b]. +int builder_bits(builder b) asm "BBITS"; + +;;; Returns the depth of `builder` [b]. +;;; If no cell references are stored in [b], then returns 0; +;;; otherwise the returned value is one plus the maximum of depths of cells referred to from [b]. +int builder_depth(builder b) asm "BDEPTH"; + +{- + # Builder primitives + It is said that a primitive _stores_ a value `x` into a builder `b` + if it returns a modified version of the builder `b'` with the value `x` stored at the end of it. + It can be used as [non-modifying method](https://docs.ton.org/develop/func/statements#non-modifying-methods). + + All the primitives below first check whether there is enough space in the `builder`, + and only then check the range of the value being serialized. +-} + +;;; Creates a new empty `builder`. +builder begin_cell() asm "NEWC"; + +;;; Converts a `builder` into an ordinary `cell`. +cell end_cell(builder b) asm "ENDC"; + +;;; Stores a reference to `cell` [c] into `builder` [b]. +builder store_ref(builder b, cell c) asm(c b) "STREF"; + +;;; Stores an unsigned [len]-bit integer `x` into `b` for `0 ≤ len ≤ 256`. +;; builder store_uint(builder b, int x, int len) asm(x b len) "STUX"; + +;;; Stores a signed [len]-bit integer `x` into `b` for` 0 ≤ len ≤ 257`. +;; builder store_int(builder b, int x, int len) asm(x b len) "STIX"; + + +;;; Stores `slice` [s] into `builder` [b] +builder store_slice(builder b, slice s) asm "STSLICER"; + +;;; Stores (serializes) an integer [x] in the range `0..2^120 − 1` into `builder` [b]. +;;; The serialization of [x] consists of a 4-bit unsigned big-endian integer `l`, +;;; which is the smallest integer `l ≥ 0`, such that `x < 2^8l`, +;;; followed by an `8l`-bit unsigned big-endian representation of [x]. +;;; If [x] does not belong to the supported range, a range check exception is thrown. +;;; +;;; Store amounts of TonCoins to the builder as VarUInteger 16 +builder store_grams(builder b, int x) asm "STGRAMS"; +builder store_coins(builder b, int x) asm "STGRAMS"; + +;;; Stores dictionary `D` represented by `cell` [c] or `null` into `builder` [b]. +;;; In other words, stores a `1`-bit and a reference to [c] if [c] is not `null` and `0`-bit otherwise. +builder store_dict(builder b, cell c) asm(c b) "STDICT"; + +;;; Stores (Maybe ^Cell) to builder: +;;; if cell is null store 1 zero bit +;;; otherwise store 1 true bit and ref to cell +builder store_maybe_ref(builder b, cell c) asm(c b) "STOPTREF"; + + +{- + # Address manipulation primitives + The address manipulation primitives listed below serialize and deserialize values according to the following TL-B scheme: + ```TL-B + addr_none$00 = MsgAddressExt; + addr_extern$01 len:(## 8) external_address:(bits len) + = MsgAddressExt; + anycast_info$_ depth:(#<= 30) { depth >= 1 } + rewrite_pfx:(bits depth) = Anycast; + addr_std$10 anycast:(Maybe Anycast) + workchain_id:int8 address:bits256 = MsgAddressInt; + addr_var$11 anycast:(Maybe Anycast) addr_len:(## 9) + workchain_id:int32 address:(bits addr_len) = MsgAddressInt; + _ _:MsgAddressInt = MsgAddress; + _ _:MsgAddressExt = MsgAddress; + + int_msg_info$0 ihr_disabled:Bool bounce:Bool bounced:Bool + src:MsgAddress dest:MsgAddressInt + value:CurrencyCollection ihr_fee:Grams fwd_fee:Grams + created_lt:uint64 created_at:uint32 = CommonMsgInfoRelaxed; + ext_out_msg_info$11 src:MsgAddress dest:MsgAddressExt + created_lt:uint64 created_at:uint32 = CommonMsgInfoRelaxed; + ``` + A deserialized `MsgAddress` is represented by a tuple `t` as follows: + + - `addr_none` is represented by `t = (0)`, + i.e., a tuple containing exactly one integer equal to zero. + - `addr_extern` is represented by `t = (1, s)`, + where slice `s` contains the field `external_address`. In other words, ` + t` is a pair (a tuple consisting of two entries), containing an integer equal to one and slice `s`. + - `addr_std` is represented by `t = (2, u, x, s)`, + where `u` is either a `null` (if `anycast` is absent) or a slice `s'` containing `rewrite_pfx` (if anycast is present). + Next, integer `x` is the `workchain_id`, and slice `s` contains the address. + - `addr_var` is represented by `t = (3, u, x, s)`, + where `u`, `x`, and `s` have the same meaning as for `addr_std`. +-} + +;;; Loads from slice [s] the only prefix that is a valid `MsgAddress`, +;;; and returns both this prefix `s'` and the remainder `s''` of [s] as slices. +(slice, slice) load_msg_addr(slice s) asm( -> 1 0) "LDMSGADDR"; + +;;; Decomposes slice [s] containing a valid `MsgAddress` into a `tuple t` with separate fields of this `MsgAddress`. +;;; If [s] is not a valid `MsgAddress`, a cell deserialization exception is thrown. +tuple parse_addr(slice s) asm "PARSEMSGADDR"; + +;;; Parses slice [s] containing a valid `MsgAddressInt` (usually a `msg_addr_std`), +;;; applies rewriting from the anycast (if present) to the same-length prefix of the address, +;;; and returns both the workchain and the 256-bit address as integers. +;;; If the address is not 256-bit, or if [s] is not a valid serialization of `MsgAddressInt`, +;;; throws a cell deserialization exception. +(int, int) parse_std_addr(slice s) asm "REWRITESTDADDR"; + +;;; A variant of [parse_std_addr] that returns the (rewritten) address as a slice [s], +;;; even if it is not exactly 256 bit long (represented by a `msg_addr_var`). +(int, slice) parse_var_addr(slice s) asm "REWRITEVARADDR"; + +{- + # Dictionary primitives +-} + + +;;; Sets the value associated with [key_len]-bit key signed index in dictionary [dict] to [value] (cell), +;;; and returns the resulting dictionary. +cell idict_set_ref(cell dict, int key_len, int index, cell value) asm(value index dict key_len) "DICTISETREF"; +(cell, ()) ~idict_set_ref(cell dict, int key_len, int index, cell value) asm(value index dict key_len) "DICTISETREF"; + +;;; Sets the value associated with [key_len]-bit key unsigned index in dictionary [dict] to [value] (cell), +;;; and returns the resulting dictionary. +cell udict_set_ref(cell dict, int key_len, int index, cell value) asm(value index dict key_len) "DICTUSETREF"; +(cell, ()) ~udict_set_ref(cell dict, int key_len, int index, cell value) asm(value index dict key_len) "DICTUSETREF"; + +cell idict_get_ref(cell dict, int key_len, int index) asm(index dict key_len) "DICTIGETOPTREF"; +(cell, int) idict_get_ref?(cell dict, int key_len, int index) asm(index dict key_len) "DICTIGETREF" "NULLSWAPIFNOT"; +(cell, int) udict_get_ref?(cell dict, int key_len, int index) asm(index dict key_len) "DICTUGETREF" "NULLSWAPIFNOT"; +(cell, cell) idict_set_get_ref(cell dict, int key_len, int index, cell value) asm(value index dict key_len) "DICTISETGETOPTREF"; +(cell, cell) udict_set_get_ref(cell dict, int key_len, int index, cell value) asm(value index dict key_len) "DICTUSETGETOPTREF"; +(cell, int) idict_delete?(cell dict, int key_len, int index) asm(index dict key_len) "DICTIDEL"; +(cell, int) udict_delete?(cell dict, int key_len, int index) asm(index dict key_len) "DICTUDEL"; +(slice, int) idict_get?(cell dict, int key_len, int index) asm(index dict key_len) "DICTIGET" "NULLSWAPIFNOT"; +(slice, int) udict_get?(cell dict, int key_len, int index) asm(index dict key_len) "DICTUGET" "NULLSWAPIFNOT"; +(cell, slice, int) idict_delete_get?(cell dict, int key_len, int index) asm(index dict key_len) "DICTIDELGET" "NULLSWAPIFNOT"; +(cell, slice, int) udict_delete_get?(cell dict, int key_len, int index) asm(index dict key_len) "DICTUDELGET" "NULLSWAPIFNOT"; +(cell, (slice, int)) ~idict_delete_get?(cell dict, int key_len, int index) asm(index dict key_len) "DICTIDELGET" "NULLSWAPIFNOT"; +(cell, (slice, int)) ~udict_delete_get?(cell dict, int key_len, int index) asm(index dict key_len) "DICTUDELGET" "NULLSWAPIFNOT"; +cell udict_set(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTUSET"; +(cell, ()) ~udict_set(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTUSET"; +cell idict_set(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTISET"; +(cell, ()) ~idict_set(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTISET"; +cell dict_set(cell dict, int key_len, slice index, slice value) asm(value index dict key_len) "DICTSET"; +(cell, ()) ~dict_set(cell dict, int key_len, slice index, slice value) asm(value index dict key_len) "DICTSET"; +(cell, int) udict_add?(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTUADD"; +(cell, int) udict_replace?(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTUREPLACE"; +(cell, int) idict_add?(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTIADD"; +(cell, int) idict_replace?(cell dict, int key_len, int index, slice value) asm(value index dict key_len) "DICTIREPLACE"; +cell udict_set_builder(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTUSETB"; +(cell, ()) ~udict_set_builder(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTUSETB"; +cell idict_set_builder(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTISETB"; +(cell, ()) ~idict_set_builder(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTISETB"; +cell dict_set_builder(cell dict, int key_len, slice index, builder value) asm(value index dict key_len) "DICTSETB"; +(cell, ()) ~dict_set_builder(cell dict, int key_len, slice index, builder value) asm(value index dict key_len) "DICTSETB"; +(cell, int) udict_add_builder?(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTUADDB"; +(cell, int) udict_replace_builder?(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTUREPLACEB"; +(cell, int) idict_add_builder?(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTIADDB"; +(cell, int) idict_replace_builder?(cell dict, int key_len, int index, builder value) asm(value index dict key_len) "DICTIREPLACEB"; +(cell, int, slice, int) udict_delete_get_min(cell dict, int key_len) asm(-> 0 2 1 3) "DICTUREMMIN" "NULLSWAPIFNOT2"; +(cell, (int, slice, int)) ~udict::delete_get_min(cell dict, int key_len) asm(-> 0 2 1 3) "DICTUREMMIN" "NULLSWAPIFNOT2"; +(cell, int, slice, int) idict_delete_get_min(cell dict, int key_len) asm(-> 0 2 1 3) "DICTIREMMIN" "NULLSWAPIFNOT2"; +(cell, (int, slice, int)) ~idict::delete_get_min(cell dict, int key_len) asm(-> 0 2 1 3) "DICTIREMMIN" "NULLSWAPIFNOT2"; +(cell, slice, slice, int) dict_delete_get_min(cell dict, int key_len) asm(-> 0 2 1 3) "DICTREMMIN" "NULLSWAPIFNOT2"; +(cell, (slice, slice, int)) ~dict::delete_get_min(cell dict, int key_len) asm(-> 0 2 1 3) "DICTREMMIN" "NULLSWAPIFNOT2"; +(cell, int, slice, int) udict_delete_get_max(cell dict, int key_len) asm(-> 0 2 1 3) "DICTUREMMAX" "NULLSWAPIFNOT2"; +(cell, (int, slice, int)) ~udict::delete_get_max(cell dict, int key_len) asm(-> 0 2 1 3) "DICTUREMMAX" "NULLSWAPIFNOT2"; +(cell, int, slice, int) idict_delete_get_max(cell dict, int key_len) asm(-> 0 2 1 3) "DICTIREMMAX" "NULLSWAPIFNOT2"; +(cell, (int, slice, int)) ~idict::delete_get_max(cell dict, int key_len) asm(-> 0 2 1 3) "DICTIREMMAX" "NULLSWAPIFNOT2"; +(cell, slice, slice, int) dict_delete_get_max(cell dict, int key_len) asm(-> 0 2 1 3) "DICTREMMAX" "NULLSWAPIFNOT2"; +(cell, (slice, slice, int)) ~dict::delete_get_max(cell dict, int key_len) asm(-> 0 2 1 3) "DICTREMMAX" "NULLSWAPIFNOT2"; +(int, slice, int) udict_get_min?(cell dict, int key_len) asm (-> 1 0 2) "DICTUMIN" "NULLSWAPIFNOT2"; +(int, slice, int) udict_get_max?(cell dict, int key_len) asm (-> 1 0 2) "DICTUMAX" "NULLSWAPIFNOT2"; +(int, cell, int) udict_get_min_ref?(cell dict, int key_len) asm (-> 1 0 2) "DICTUMINREF" "NULLSWAPIFNOT2"; +(int, cell, int) udict_get_max_ref?(cell dict, int key_len) asm (-> 1 0 2) "DICTUMAXREF" "NULLSWAPIFNOT2"; +(int, slice, int) idict_get_min?(cell dict, int key_len) asm (-> 1 0 2) "DICTIMIN" "NULLSWAPIFNOT2"; +(int, slice, int) idict_get_max?(cell dict, int key_len) asm (-> 1 0 2) "DICTIMAX" "NULLSWAPIFNOT2"; +(int, cell, int) idict_get_min_ref?(cell dict, int key_len) asm (-> 1 0 2) "DICTIMINREF" "NULLSWAPIFNOT2"; +(int, cell, int) idict_get_max_ref?(cell dict, int key_len) asm (-> 1 0 2) "DICTIMAXREF" "NULLSWAPIFNOT2"; +(int, slice, int) udict_get_next?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTUGETNEXT" "NULLSWAPIFNOT2"; +(int, slice, int) udict_get_nexteq?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTUGETNEXTEQ" "NULLSWAPIFNOT2"; +(int, slice, int) udict_get_prev?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTUGETPREV" "NULLSWAPIFNOT2"; +(int, slice, int) udict_get_preveq?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTUGETPREVEQ" "NULLSWAPIFNOT2"; +(int, slice, int) idict_get_next?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTIGETNEXT" "NULLSWAPIFNOT2"; +(int, slice, int) idict_get_nexteq?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTIGETNEXTEQ" "NULLSWAPIFNOT2"; +(int, slice, int) idict_get_prev?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTIGETPREV" "NULLSWAPIFNOT2"; +(int, slice, int) idict_get_preveq?(cell dict, int key_len, int pivot) asm(pivot dict key_len -> 1 0 2) "DICTIGETPREVEQ" "NULLSWAPIFNOT2"; + +;;; Creates an empty dictionary, which is actually a null value. Equivalent to PUSHNULL +cell new_dict() asm "NEWDICT"; +;;; Checks whether a dictionary is empty. Equivalent to cell_null?. +int dict_empty?(cell c) asm "DICTEMPTY"; + + +{- Prefix dictionary primitives -} +(slice, slice, slice, int) pfxdict_get?(cell dict, int key_len, slice key) asm(key dict key_len) "PFXDICTGETQ" "NULLSWAPIFNOT2"; +(cell, int) pfxdict_set?(cell dict, int key_len, slice key, slice value) asm(value key dict key_len) "PFXDICTSET"; +(cell, int) pfxdict_delete?(cell dict, int key_len, slice key) asm(key dict key_len) "PFXDICTDEL"; + +;;; Returns the value of the global configuration parameter with integer index `i` as a `cell` or `null` value. +cell config_param(int x) asm "CONFIGOPTPARAM"; +;;; Checks whether c is a null. Note, that FunC also has polymorphic null? built-in. +int cell_null?(cell c) asm "ISNULL"; + +;;; Creates an output action which would reserve exactly amount nanotoncoins (if mode = 0), at most amount nanotoncoins (if mode = 2), or all but amount nanotoncoins (if mode = 1 or mode = 3), from the remaining balance of the account. It is roughly equivalent to creating an outbound message carrying amount nanotoncoins (or b − amount nanotoncoins, where b is the remaining balance) to oneself, so that the subsequent output actions would not be able to spend more money than the remainder. Bit +2 in mode means that the external action does not fail if the specified amount cannot be reserved; instead, all remaining balance is reserved. Bit +8 in mode means `amount <- -amount` before performing any further actions. Bit +4 in mode means that amount is increased by the original balance of the current account (before the compute phase), including all extra currencies, before performing any other checks and actions. Currently, amount must be a non-negative integer, and mode must be in the range 0..15. +() raw_reserve(int amount, int mode) impure asm "RAWRESERVE"; +;;; Similar to raw_reserve, but also accepts a dictionary extra_amount (represented by a cell or null) with extra currencies. In this way currencies other than TonCoin can be reserved. +() raw_reserve_extra(int amount, cell extra_amount, int mode) impure asm "RAWRESERVEX"; +;;; Sends a raw message contained in msg, which should contain a correctly serialized object Message X, with the only exception that the source address is allowed to have dummy value addr_none (to be automatically replaced with the current smart contract address), and ihr_fee, fwd_fee, created_lt and created_at fields can have arbitrary values (to be rewritten with correct values during the action phase of the current transaction). Integer parameter mode contains the flags. Currently mode = 0 is used for ordinary messages; mode = 128 is used for messages that are to carry all the remaining balance of the current smart contract (instead of the value originally indicated in the message); mode = 64 is used for messages that carry all the remaining value of the inbound message in addition to the value initially indicated in the new message (if bit 0 is not set, the gas fees are deducted from this amount); mode' = mode + 1 means that the sender wants to pay transfer fees separately; mode' = mode + 2 means that any errors arising while processing this message during the action phase should be ignored. Finally, mode' = mode + 32 means that the current account must be destroyed if its resulting balance is zero. This flag is usually employed together with +128. +() send_raw_message(cell msg, int mode) impure asm "SENDRAWMSG"; +;;; Creates an output action that would change this smart contract code to that given by cell new_code. Notice that this change will take effect only after the successful termination of the current run of the smart contract +() set_code(cell new_code) impure asm "SETCODE"; + +;;; Generates a new pseudo-random unsigned 256-bit integer x. The algorithm is as follows: if r is the old value of the random seed, considered as a 32-byte array (by constructing the big-endian representation of an unsigned 256-bit integer), then its sha512(r) is computed; the first 32 bytes of this hash are stored as the new value r' of the random seed, and the remaining 32 bytes are returned as the next random value x. +int random() impure asm "RANDU256"; +;;; Generates a new pseudo-random integer z in the range 0..range−1 (or range..−1, if range < 0). More precisely, an unsigned random value x is generated as in random; then z := x * range / 2^256 is computed. +int rand(int range) impure asm "RAND"; +;;; Returns the current random seed as an unsigned 256-bit Integer. +int get_seed() impure asm "RANDSEED"; +;;; Sets the random seed to unsigned 256-bit seed. +() set_seed(int) impure asm "SETRAND"; +;;; Mixes unsigned 256-bit integer x into the random seed r by setting the random seed to sha256 of the concatenation of two 32-byte strings: the first with the big-endian representation of the old seed r, and the second with the big-endian representation of x. +() randomize(int x) impure asm "ADDRAND"; +;;; Equivalent to randomize(cur_lt());. +() randomize_lt() impure asm "LTIME" "ADDRAND"; + +;;; Checks whether the data parts of two slices coinside +int equal_slice_bits (slice a, slice b) asm "SDEQ"; + +;;; Concatenates two builders +builder store_builder(builder to, builder from) asm "STBR"; diff --git a/server/src/e2e/FunC/testcases/impure-inspection/Inspection.test b/server/src/e2e/FunC/testcases/impure-inspection/Inspection.test new file mode 100644 index 00000000..ccbb1c50 --- /dev/null +++ b/server/src/e2e/FunC/testcases/impure-inspection/Inspection.test @@ -0,0 +1,473 @@ +======================================================================== +Should drop non-impure call if no result is returned +======================================================================== +() bar(int a) { + throw(a); +} + +(int) foo() { + bar(42); + return 123; +} +------------------------------------------------------------------------ +5:4 to 5:11 + + +======================================================================== +Should not drop explicitly impure call +======================================================================== +() bar(int a) impure { + throw(a); +} + +(int) foo() { + bar(42); + return 123; +} +------------------------------------------------------------------------ +no issues + +======================================================================== +If function has return value, but it is not used, should drop call +======================================================================== +(int) bar(int a) { + return a * a; +} + +(int) foo() impure { + bar(42); + return 43; +} +------------------------------------------------------------------------ +5:4 to 5:11 + +======================================================================== +If return value is used in impure constructs, should not drop call +======================================================================== +(int) bar(int a) { + return a * a; +} + +() used_in_if() { + var a = bar(100); + if(a) { + } +} + +() used_in_while() { + var a = bar(100); + while(a > 0) { + a = a - 1; + } +} + +() used_in_do_while() { + var a = bar(100); + do { + a = a - 1; + } until(a > 0); +} + +() used_in_repeat() { + var b = 2; + var a = bar(100); + repeat(a){ + b = b * b; + } +} +(int) used_in_return() { + var a = bar(100); + return a; +} +------------------------------------------------------------------------ +no issues + +======================================================================== +If result is used only in block expr of conditional construct, shouldn't be exampt from impure +======================================================================== +(int) bar(int a) { + return a * a; +} + +() used_in_if() { + var b = true; + var a = bar(100); + if(b) { + a = a + 1; + } +} + +() used_in_while() { + var b = true; + var a = bar(100); + + while(b) { + a = a + 1; + } +} + +() used_in_do_while() { + var b = true; + var a = bar(100); + do { + a = a + 1; + } until(b); +} + +() used_in_repeat() { + var b = 2; + var a = bar(100); + repeat(b){ + a = a * a; + } +} +------------------------------------------------------------------------ +6:12 to 6:20 +14:12 to 14:20 +23:12 to 23:20 +31:12 to 31:20 + +======================================================================== +Should drop if non-impure call to stdlib function is made and result is not used +======================================================================== + +(int) not_bound(slice addr) { + parse_std_addr(addr); +} + +() bound_not_used(slice addr) { + var (wc, hash) = parse_std_addr(addr); +} + +() bound_and_used(slice addr) { + var (wc, hash) = parse_std_addr(addr); + throw_if(42, wc == -1); +} +------------------------------------------------------------------------ +1:4 to 1:24 +5:21 to 5:41 + +======================================================================== +Should not drop call if result is referenced in impure call +======================================================================== +() check_wc(int a) impure { + throw_unless(42, a == 0); +} + +() test_func_call(slice test_addr) { + var (wc, hash) = parse_std_addr(test_addr); + check_wc(wc); +} +() test_method_call(slice test_addr) { + var (wc, hash) = parse_std_addr(test_addr); + wc.check_wc(); +} +() test_mod_method_call(slice test_addr) { + var (wc, hash) = parse_std_addr(test_addr); + wc~check_wc(); +} +------------------------------------------------------------------------ +no issues + +======================================================================== +Should drop call if only result reference is in non-impure call +======================================================================== +() check_wc(int a) { + throw_unless(42, a == 0); +} +() test_func_call(slice test_addr) { + var (wc, hash) = parse_std_addr(test_addr); + check_wc(wc); +} +() test_method_call(slice test_addr) { + var (wc, hash) = parse_std_addr(test_addr); + wc.check_wc(); +} +() test_mod_method_call(slice test_addr) { + var (wc, hash) = parse_std_addr(test_addr); + wc~check_wc(); +} +------------------------------------------------------------------------ +4:21 to 4:46 +5:4 to 5:16 +8:21 to 8:46 +9:6 to 9:17 +12:21 to 12:46 +13:6 to 13:17 + + +======================================================================== +Should not drop result if it is used in impure built-in +======================================================================== +(int) bar(int a) { + return a * a; +} + +(int) throw_case() { + int foo = bar(); + throw(foo); +} + +(int) throw_if_case() { + int foo = bar(); + throw_if(42, foo); +} + +(int) throw_unless_case() { + int foo = bar(); + throw_unless(42, foo); +} +(int) dump_case() { + int foo = bar(); + foo~dump(); +} + +------------------------------------------------------------------------ +no issues + + + + +======================================================================== +Should correctly identify bindings in nested tensor var declaration +======================================================================== +(int, int) bar() { + return (42, 84); +} + +(int) foo() { + return (42) +} +(int) drop_foo() { + var ((a, b), c) = (bar(), foo()); + return a; +} +(int) drop_bar() { + var ((a, b), c) = (bar(), foo()); + return c; +} +------------------------------------------------------------------------ +8:30 to 8:35 +12:23 to 12:28 + +======================================================================== +Should correctly identify bindings in nested tensor explicit type declaration +======================================================================== +(int, int) bar() { + return (42, 84); +} + +(int) foo() { + return (42) +} +(int) drop_foo() { + ((int a, int b), int c) = (bar(), foo()); + return b; +} +(int) drop_bar() { + ((int a, int b), int c) = (bar(), foo()); + return c; +} +------------------------------------------------------------------------ +8:38 to 8:43 +12:31 to 12:36 + +======================================================================== +Should correctly identify bindings in nested tensor expression assignment +======================================================================== +(int, int) bar() { + return (42, 84); +} + +(int) foo() { + return (42) +} + +(int) drop_foo(int a, int b, int c) { + ((a, b), c) = (bar(), foo()); + return b; +} + +(int) drop_bar(int a, int b, int c) { + ((a, b), c) = (bar(), foo()); + return c; +} +------------------------------------------------------------------------ +9:26 to 9:31 +14:19 to 14:24 + +======================================================================== +Should correctly identify bindings in generic (forall) declarations +======================================================================== +forall X, Y -> (X, Y) bar() { + return (42, 84); +} + +forall X -> (X) foo() { + return (42) +} + +(int) drop_foo(int a, int b, int c) { + ((a, b), c) = (bar(), foo()); + return b; +} + +(int) drop_bar(int a, int b, int c) { + ((a, b), c) = (bar(), foo()); + return c; +} +------------------------------------------------------------------------ +9:26 to 9:31 +14:19 to 14:24 + + +======================================================================== +Should drop call if result is dropped via _ +======================================================================== +(int, int) bar() { + return (42, 84); +} +(int) foo() { + return (42) +} + +(int) drop_foo(int a, int b, int c) { + ((a, b), _) = (bar(), foo()); + return b; +} + +(int) drop_bar(int a, int b, int c) { + ((_, _), c) = (bar(), foo()); + return c; +} +------------------------------------------------------------------------ +8:26 to 8:31 +13:19 to 13:24 + +======================================================================== +Should correctly identify bindings in nested tuple assignment +======================================================================== +[int, int] bar(int a) { + return [a * 42, a * 84]; +} + +[int, int] foo() { + return 42 +} +(int) drop_foo() { + var ([a, b], [c, d]) = [bar(), foo()]; + return b; +} +(int) drop_bar() { + var ([a, b], [c, d]) = [bar(), foo()]; + return c; +} +------------------------------------------------------------------------ +8:35 to 8:40 +12:28 to 12:33 + +======================================================================== +Should not drop if any element of complex tensor is referenced +======================================================================== +() foo(int a, int, b) impure { + throw(a * b); +} + +(int, [int, cell, (int, int)]) bar(int a) { + return (a * 42, begin_cell().store_uint(a, 32).end_cell(), (a * 84, a *a)); +} + +() main() { + var (a, [b, c, d]) = bar(42); + ;; d should bound to (int, int) tensor + foo(d); +} +------------------------------------------------------------------------ +no issues + +======================================================================== +Should correctly bind return in modifying method call +======================================================================== +(int, (int, int)) ~powa(int a) { + var b = a * a; + return (b, (b * a, b * b)) +} + +(int) foo() { + return (42) +} + +(int) used_ret() { + var a = foo(); + (int c, int d) = a~powa(); + return d; ;; Used, shouldn't drop +} + +(int) used_modifier() { + var a = foo(); + (int c, int d) =a~powa(); + return a; ;; Used, shouldn't drop +} +------------------------------------------------------------------------ +no issues + +======================================================================== +Should drop if none of modifyint method resulst are used +======================================================================== +(int, (int, int)) ~powa(int a) { + var b = a * a; + return (b, (b * a, b * b)) +} + +(int) foo() { + return (42) +} + +(int) not_used() { + var a = 42; + (int c, int d) = a~powa(); + return foo(); +} + +------------------------------------------------------------------------ +11:22 to 11:29 + +======================================================================== +Should not drop if chain of non-modifying method calls result is used +======================================================================== +;; Note store ops are impure +(cell) build_payload(int a) { + cell res = begin_cell().store_ref( + begin_cell().store_uint(12345, 32).store_uint(a, 64).end_cell() + ).end_cell(); + return res; +} + +------------------------------------------------------------------------ +no issues + +======================================================================== +Should not drop if result if used in multi-assingment logic +======================================================================== + +(int) foo(int a) { + return a * a; +} + +(cell) bar(int a, int b) { + return begin_cell().store_uint(a, 32).store_uint(b, 64).end_cell(); +} + +(cell) main() { + int c; + var a = foo(42); + var b = a + 100; + if(b > 10) { + c = bar(a, b); + } + return c; +} + +------------------------------------------------------------------------ +no issues + diff --git a/server/src/e2e/tolk/BaseTestSuite.ts b/server/src/e2e/common/BaseTestSuite.ts similarity index 95% rename from server/src/e2e/tolk/BaseTestSuite.ts rename to server/src/e2e/common/BaseTestSuite.ts index 0057fb5c..05a0b2ae 100644 --- a/server/src/e2e/tolk/BaseTestSuite.ts +++ b/server/src/e2e/common/BaseTestSuite.ts @@ -24,6 +24,7 @@ export abstract class BaseTestSuite { protected document!: vscode.TextDocument protected editor!: vscode.TextEditor protected testFilePath!: string + protected language!: string protected testDir: string = "" protected updates: TestUpdate[] = [] protected additionalFiles: TextDocument[] = [] @@ -41,13 +42,15 @@ export abstract class BaseTestSuite { } public async setup(): Promise { - this.testFilePath = path.join(this.workingDir(), "test.tolk") + const lang = process.env["TON_LANGUAGE"] ?? "tolk" + this.testFilePath = path.join(this.workingDir(), `test.${lang.toLowerCase()}`) this.testDir = path.dirname(this.testFilePath) + this.language = lang await fs.promises.mkdir(this.testDir, {recursive: true}) await fs.promises.writeFile(this.testFilePath, "") this.document = await vscode.workspace.openTextDocument(this.testFilePath) - await vscode.languages.setTextDocumentLanguage(this.document, "tolk") + await vscode.languages.setTextDocumentLanguage(this.document, this.language.toLowerCase()) await this.openMainFile() } @@ -74,7 +77,7 @@ export abstract class BaseTestSuite { await fs.promises.writeFile(filePath, content) const additionalFile = await vscode.workspace.openTextDocument(filePath) - await vscode.languages.setTextDocumentLanguage(additionalFile, "tolk") + await vscode.languages.setTextDocumentLanguage(additionalFile, this.language) await vscode.window.showTextDocument(additionalFile, { preview: true, @@ -227,11 +230,12 @@ export abstract class BaseTestSuite { } public runTestsFromDirectory(directory: string): void { + const lang = process.env["TON_LANGUAGE"] ?? "tolk" const testCasesPath = path.join( __dirname, "..", "..", - "tolk", + lang, "testcases", directory, "*.test", @@ -310,8 +314,9 @@ async function activate(): Promise { console.log("Waiting for language server initialization...") await new Promise(resolve => setTimeout(resolve, 1000)) + const targetLang = process.env["TON_LANGUAGE"] ?? "tolk" const languages = await vscode.languages.getLanguages() - if (!languages.includes("tolk")) { + if (!languages.includes(targetLang.toLowerCase())) { throw new Error("Tolk language not registered. Check package.json configuration.") } diff --git a/server/src/e2e/runTolkTest.ts b/server/src/e2e/runLangTest.ts similarity index 87% rename from server/src/e2e/runTolkTest.ts rename to server/src/e2e/runLangTest.ts index b8dcfee8..9eeee918 100644 --- a/server/src/e2e/runTolkTest.ts +++ b/server/src/e2e/runLangTest.ts @@ -9,6 +9,7 @@ interface TestRunOptions { suite?: string test?: string file?: string + lang?: string updateSnapshots?: boolean verbose?: boolean } @@ -21,6 +22,11 @@ function parseArgs(): TestRunOptions { const arg = args[i] switch (arg) { + case "--lang": + case "-s": { + options.lang = args[++i] + break + } case "--suite": case "-s": { options.suite = args[++i] @@ -104,14 +110,19 @@ async function main(): Promise { if (options.file) { process.env["TON_TEST_FILE"] = options.file } + // If not set to known languages, fall back to tolk + if (!options.lang || !(options.lang == "tolk" || options.lang == "FunC")) { + options.lang = "tolk" + } if (options.verbose) { console.log("Starting e2e tests with options:", options) } const extensionDevelopmentPath = path.resolve(__dirname, "../../../") - const extensionTestsPath = path.resolve(__dirname, "./out/tolk/index.js") + const extensionTestsPath = path.resolve(__dirname, `./out/${options.lang}/index.js`) const testWorkspace = path.resolve(__dirname, "../../../test-workspace") + process.env["TON_LANGUAGE"] = options.lang await runTests({ extensionDevelopmentPath, diff --git a/server/src/e2e/tolk/compiler-tests.test.ts b/server/src/e2e/tolk/compiler-tests.test.ts index d3bd0759..77e82148 100644 --- a/server/src/e2e/tolk/compiler-tests.test.ts +++ b/server/src/e2e/tolk/compiler-tests.test.ts @@ -7,7 +7,7 @@ import * as path from "node:path" import * as vscode from "vscode" import {glob} from "glob" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" interface UnresolvedIdentifier { readonly name: string diff --git a/server/src/e2e/tolk/completion-select.test.ts b/server/src/e2e/tolk/completion-select.test.ts index 93dcb06e..f259853a 100644 --- a/server/src/e2e/tolk/completion-select.test.ts +++ b/server/src/e2e/tolk/completion-select.test.ts @@ -7,7 +7,7 @@ import {CompletionItem, Position} from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Completion Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/completion.test.ts b/server/src/e2e/tolk/completion.test.ts index ade95955..c3d60760 100644 --- a/server/src/e2e/tolk/completion.test.ts +++ b/server/src/e2e/tolk/completion.test.ts @@ -8,7 +8,7 @@ import {CompletionItem} from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Completion Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/document-symbols.test.ts b/server/src/e2e/tolk/document-symbols.test.ts index 977fa596..8e67f7c7 100644 --- a/server/src/e2e/tolk/document-symbols.test.ts +++ b/server/src/e2e/tolk/document-symbols.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Document Symbols Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/documentation.test.ts b/server/src/e2e/tolk/documentation.test.ts index ebfaa643..2f680050 100644 --- a/server/src/e2e/tolk/documentation.test.ts +++ b/server/src/e2e/tolk/documentation.test.ts @@ -10,7 +10,7 @@ import type * as lsp from "vscode-languageserver" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" import type {GetTypeAtPositionParams} from "./types.test" diff --git a/server/src/e2e/tolk/foldings.test.ts b/server/src/e2e/tolk/foldings.test.ts index b673dd6a..2cab6528 100644 --- a/server/src/e2e/tolk/foldings.test.ts +++ b/server/src/e2e/tolk/foldings.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Folding Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/inlay-hints.test.ts b/server/src/e2e/tolk/inlay-hints.test.ts index 0966eae6..75fc5463 100644 --- a/server/src/e2e/tolk/inlay-hints.test.ts +++ b/server/src/e2e/tolk/inlay-hints.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Inlay Hints Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/inspections.test.ts b/server/src/e2e/tolk/inspections.test.ts index 390c8d38..7129443f 100644 --- a/server/src/e2e/tolk/inspections.test.ts +++ b/server/src/e2e/tolk/inspections.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Inspection Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/intentions.test.ts b/server/src/e2e/tolk/intentions.test.ts index c4249c02..1428b465 100644 --- a/server/src/e2e/tolk/intentions.test.ts +++ b/server/src/e2e/tolk/intentions.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Intentions Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/multifile-completion-select.test.ts b/server/src/e2e/tolk/multifile-completion-select.test.ts index e6b9050d..f4551f46 100644 --- a/server/src/e2e/tolk/multifile-completion-select.test.ts +++ b/server/src/e2e/tolk/multifile-completion-select.test.ts @@ -7,7 +7,7 @@ import {CompletionItem, Position} from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Multi file Completion Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/multifile-inspections.test.ts b/server/src/e2e/tolk/multifile-inspections.test.ts index 059b117f..fb61605b 100644 --- a/server/src/e2e/tolk/multifile-inspections.test.ts +++ b/server/src/e2e/tolk/multifile-inspections.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Multi file Inspection Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/multifile-intentions.test.ts b/server/src/e2e/tolk/multifile-intentions.test.ts index 29a9428c..ee0bd124 100644 --- a/server/src/e2e/tolk/multifile-intentions.test.ts +++ b/server/src/e2e/tolk/multifile-intentions.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Multi file Intentions Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/multifile-resolving.test.ts b/server/src/e2e/tolk/multifile-resolving.test.ts index afc136d6..29965c48 100644 --- a/server/src/e2e/tolk/multifile-resolving.test.ts +++ b/server/src/e2e/tolk/multifile-resolving.test.ts @@ -8,7 +8,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Multi file Resolve Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/references.test.ts b/server/src/e2e/tolk/references.test.ts index 29f858c5..d5d0ca7a 100644 --- a/server/src/e2e/tolk/references.test.ts +++ b/server/src/e2e/tolk/references.test.ts @@ -8,7 +8,7 @@ import type {TextDocumentPositionParams} from "vscode-languageserver" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("References Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/rename.test.ts b/server/src/e2e/tolk/rename.test.ts index 10e6caa2..62150ee7 100644 --- a/server/src/e2e/tolk/rename.test.ts +++ b/server/src/e2e/tolk/rename.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" interface RenamePosition { readonly line: number diff --git a/server/src/e2e/tolk/resolving.test.ts b/server/src/e2e/tolk/resolving.test.ts index 9ae775c3..bfb1a596 100644 --- a/server/src/e2e/tolk/resolving.test.ts +++ b/server/src/e2e/tolk/resolving.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Resolve Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/signature-help.test.ts b/server/src/e2e/tolk/signature-help.test.ts index bb0ae309..5808bc45 100644 --- a/server/src/e2e/tolk/signature-help.test.ts +++ b/server/src/e2e/tolk/signature-help.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Signatures Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/type-resolving.test.ts b/server/src/e2e/tolk/type-resolving.test.ts index fa10e24e..acff2bdc 100644 --- a/server/src/e2e/tolk/type-resolving.test.ts +++ b/server/src/e2e/tolk/type-resolving.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" suite("Type Resolve Test Suite", () => { const testSuite = new (class extends BaseTestSuite { diff --git a/server/src/e2e/tolk/types-2.test.ts b/server/src/e2e/tolk/types-2.test.ts index 162ad9ac..b89d89d5 100644 --- a/server/src/e2e/tolk/types-2.test.ts +++ b/server/src/e2e/tolk/types-2.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" export interface GetTypeAtPositionParams { readonly textDocument: { diff --git a/server/src/e2e/tolk/types.test.ts b/server/src/e2e/tolk/types.test.ts index 525bd8dc..d16ff4da 100644 --- a/server/src/e2e/tolk/types.test.ts +++ b/server/src/e2e/tolk/types.test.ts @@ -6,7 +6,7 @@ import * as vscode from "vscode" import type {TestCase} from "../common/TestParser" -import {BaseTestSuite} from "./BaseTestSuite" +import {BaseTestSuite} from "../common/BaseTestSuite" export interface GetTypeAtPositionParams { readonly textDocument: { diff --git a/server/src/languages/func/inspections/Inspection.ts b/server/src/languages/func/inspections/Inspection.ts index 4df63b45..77baf4fd 100644 --- a/server/src/languages/func/inspections/Inspection.ts +++ b/server/src/languages/func/inspections/Inspection.ts @@ -9,6 +9,7 @@ export const InspectionIds = { UNUSED_TYPE_PARAMETER: "unused-type-parameter", UNUSED_VARIABLE: "unused-variable", UNUSED_IMPORT: "unused-import", + UNUSED_IMPURE: "unused-impure", } as const export type InspectionId = (typeof InspectionIds)[keyof typeof InspectionIds] diff --git a/server/src/languages/func/inspections/UnusedImpure.ts b/server/src/languages/func/inspections/UnusedImpure.ts new file mode 100644 index 00000000..50c6fc98 --- /dev/null +++ b/server/src/languages/func/inspections/UnusedImpure.ts @@ -0,0 +1,283 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2025 TON Core + +import * as lsp from "vscode-languageserver" + +import {Node} from "web-tree-sitter" + +import type {FuncFile} from "@server/languages/func/psi/FuncFile" + +import {RecursiveVisitor} from "@server/visitor/visitor" +import {Func} from "@server/languages/func/psi/Decls" +import {asLspRange} from "@server/utils/position" +import {closestNamedSibling, parentOfType, parentOfTypeWithCb} from "@server/psi/utils" +import {Referent} from "@server/languages/func/psi/Referent" +import {FunCBindingResolver} from "@server/languages/func/psi/BindingResolver" +import {FUNC_PARSED_FILES_CACHE} from "@server/files" + +import {UnusedInspection} from "./UnusedInspection" +import {Inspection, InspectionIds} from "./Inspection" + +export class UnusedImpureInspection extends UnusedInspection implements Inspection { + public readonly id: "unused-impure" = InspectionIds.UNUSED_IMPURE + + private readonly impureMap: Map + private readonly dropableMap: Map + private readonly resultsCache: Map + private readonly impureBuiltins: Set + + public constructor() { + super() + this.resultsCache = new Map() + this.impureMap = new Map() + this.dropableMap = new Map() + this.impureBuiltins = new Set([ + "throw", + "throw_if", + "throw_unless", + "throw_arg", + "throw_arg_op", + "throw_arg_unless", + "~dump", + "~strdump", + ]) + } + + private getCallDef(call: Node, mode: "dropable" | "impure" = "dropable"): Func | undefined { + let callDef: Func | undefined + const lookupMap = mode == "dropable" ? this.dropableMap : this.impureMap + const callType = call.type + if (callType == "function_application") { + const funcIdentifier = call.childForFieldName("callee") + if (funcIdentifier) { + callDef = lookupMap.get(funcIdentifier.text) + } + } else if (callType == "method_call") { + const funcIdentifier = call.childForFieldName("method_name") + if (funcIdentifier) { + const methodName = funcIdentifier.text + callDef = lookupMap.get(methodName) + callDef ??= lookupMap.get("~" + methodName) + } + } else { + throw new Error(`Unsupported call type ${call.toString()}`) + } + + return callDef + } + private isImpureBuiltIn(call: Node): boolean { + let calleeName: Node | null + switch (call.type) { + case "function_application": { + calleeName = call.childForFieldName("callee") + if (calleeName) { + return this.impureBuiltins.has(calleeName.text) + } + break + } + case "method_call": { + calleeName = call.childForFieldName("method_name") + if (calleeName) { + return this.impureBuiltins.has(calleeName.text) + } + } + } + return false + } + + private isCall(call: Node): boolean { + return call.type == "function_application" || call.type == "method_call" + } + + private setCache(node: Node, result: boolean): void { + const cacheKey = [ + node.startPosition.row, + node.startPosition.column, + node.endPosition.row, + node.endPosition.column, + ].join(":") + this.resultsCache.set(cacheKey, result) + } + private getCache(node: Node): boolean | undefined { + const cacheKey = [ + node.startPosition.row, + node.startPosition.column, + node.endPosition.row, + node.endPosition.column, + ].join(":") + return this.resultsCache.get(cacheKey) + } + + protected checkFile(file: FuncFile, diagnostics: lsp.Diagnostic[]): void { + // Populate impure functions map + FUNC_PARSED_FILES_CACHE.forEach(parsedFile => { + parsedFile.getFunctions().forEach(f => { + if (f.isImpure) { + this.impureMap.set(f.name(true), f) + } else { + this.dropableMap.set(f.name(true), f) + } + }) + }) + const bindResolver = new FunCBindingResolver(file) + RecursiveVisitor.visit(file.rootNode, (node): boolean => { + if (!this.isCall(node)) { + return true + } + let willDrop = false + // Skip impure builtins calls + if (this.isImpureBuiltIn(node)) { + return true + } + // const droppableDef = this.getCallDef(node) + if (this.checkCallWillDrop(node, file, bindResolver)) { + willDrop = true + const range = asLspRange(node) + diagnostics.push({ + severity: lsp.DiagnosticSeverity.Error, + code: "unused-impure", + range, + message: "This call will be dropped due to lack of impure specifier!", + source: "func", + }) + } + this.setCache(node, willDrop) + return true + }) + } + + private checkRecursiveReference( + node: Node, + file: FuncFile, + bindResolver: FunCBindingResolver, + ): boolean { + const expressionParent = parentOfTypeWithCb<{parent: Node; origin: Node}>( + node, + (parent, origin) => { + return {parent, origin} + }, + "expression_statement", + "return_statement", + "function_application", + "method_call", + "if_statement", + "while_statement", + "do_while_statement", + "repeat_statement", + "return_statement", + ) + if (!expressionParent) { + // Could happen in incomplete code + return false + } + const parentType = expressionParent.parent.type + // If call is in the block_statement of any kind, it will be a child of expression_statement + // Otherwise it is in condition block of if/while/do while + // Or in arguments clause of other function_application/method_call + if (parentType !== "expression_statement") { + if (parentType == "function_application" || parentType == "method_call") { + return this.checkCallWillDrop(expressionParent.parent, file, bindResolver) + } + // If expression is in condition or return statement it will not be dropped + return false + } + + // We are in the expression expression_statement + // Bind the values from the expression + const resolvedBinding = bindResolver.resolve(expressionParent.parent) + // If no lvalue, non-impure call will drop + if (resolvedBinding.bindings.size === 0) { + return true + } + // If no identifiers referenced in lvalue, means those are whole type and will be dropped + // const affectedIdentifiers = resolvedBinding.bindings.values() + + for (const boundValue of resolvedBinding.bindings.values()) { + // Bound is expression wide. Process only call specific parts of bind. + if (!boundValue.producer_exp.some(n => n.id == node.id)) { + continue + } + // Find references to the bound variables from below the current expression. + const references = new Referent(boundValue.identifier, file) + .findReferences({limit: Infinity}) + .filter(ref => ref.node.startIndex >= node.endIndex) + // Has to be referenced in non impure call, conditional or return statement to not drop + for (const ref of references) { + const parent = parentOfType( + ref.node, + "expression_statement", // But don't go above expression_statement + "function_application", + "method_call", + "if_statement", + "while_statement", + "do_while_statement", + "repeat_statement", + "return_statement", + ) + if (!parent) { + continue + } + if (parent.type !== "expression_statement") { + let willDrop = false + if (this.isCall(parent)) { + willDrop = this.checkCallWillDrop(parent, file, bindResolver) + } + this.setCache(parent, willDrop) + // Only return here in case the call will not drop, otherwise continue + if (willDrop) { + continue + } + return willDrop + } + // Check reference in method call + const refSibling = closestNamedSibling( + ref.node, + "next", + sibl => sibl.type == "method_call", + ) + if (refSibling) { + // If this is a droppable call, go to next ref, else expression is not droppable + if (!this.checkCallWillDrop(refSibling, file, bindResolver)) { + return false + } + } else { + if (!this.checkRecursiveReference(ref.node, ref.file, bindResolver)) { + return false + } + } + } + } + return true + } + + private checkCallWillDrop( + node: Node, + file: FuncFile, + bindResolver: FunCBindingResolver, + ): boolean { + const cachedRes = this.getCache(node) + if (cachedRes !== undefined) { + return cachedRes + } + + const definition = this.getCallDef(node, "dropable") + + if (!definition) { + // If no dropable def found, check that impure is implicit just in case + const willDrop = !(this.getCallDef(node, "impure") ?? this.isImpureBuiltIn(node)) + this.setCache(node, willDrop) + return willDrop + } + + const returnExp = definition.returnType() + if (returnExp !== null) { + // If return type of a function is empty tensor - check no more. + if (returnExp.node.text == "()") { + return true + } + } + const dropRes = this.checkRecursiveReference(node, file, bindResolver) + this.setCache(node, dropRes) + return dropRes + } +} diff --git a/server/src/languages/func/inspections/index.ts b/server/src/languages/func/inspections/index.ts index 87ee79a2..de5cb676 100644 --- a/server/src/languages/func/inspections/index.ts +++ b/server/src/languages/func/inspections/index.ts @@ -8,6 +8,8 @@ import {UnusedVariableInspection} from "@server/languages/func/inspections/Unuse import {UnusedImportInspection} from "@server/languages/func/inspections/UnusedImportInspection" import {UnusedTypeParameterInspection} from "@server/languages/func/inspections/UnusedTypeParameterInspection" +import {UnusedImpureInspection} from "./UnusedImpure" + export async function runFuncInspections( uri: string, file: FuncFile, @@ -18,6 +20,7 @@ export async function runFuncInspections( new UnusedTypeParameterInspection(), new UnusedVariableInspection(), new UnusedImportInspection(), + new UnusedImpureInspection(), ] const settings = await getDocumentSettings(uri) diff --git a/server/src/languages/func/psi/BindingResolver.ts b/server/src/languages/func/psi/BindingResolver.ts new file mode 100644 index 00000000..b33810ef --- /dev/null +++ b/server/src/languages/func/psi/BindingResolver.ts @@ -0,0 +1,501 @@ +import type {Node as SyntaxNode} from "web-tree-sitter" + +import {Func} from "@server/languages/func/psi/Decls" + +import {closestNamedSibling} from "@server/psi/utils" +import {FUNC_PARSED_FILES_CACHE} from "@server/files" + +import {Expression} from "./FuncNode" +import {FuncFile} from "./FuncFile" + +interface Binding { + readonly identifier: SyntaxNode + readonly producer_exp: SyntaxNode[] + readonly type?: string +} + +interface BindingResult { + readonly expression: Expression + readonly lhs: SyntaxNode[] + readonly rhs: SyntaxNode[] + readonly bindings: Map +} + +export class FunCBindingResolver { + protected funcMap: Map + protected bindings: Map + protected assignmentOps: Set + + public constructor(protected readonly file: FuncFile) { + this.bindings = new Map() + this.funcMap = new Map() + this.assignmentOps = new Set([ + "=", + "+=", + "-=", + "*=", + "/=", + "~/=", + "^/=", + "%=", + "~%=", + "^%=", + "<<=", + ">>=", + "~>>=", + "^>>=", + "&=", + "|=", + "^=", + ]) + + FUNC_PARSED_FILES_CACHE.forEach(parsedFile => { + parsedFile.getFunctions().forEach(f => { + this.funcMap.set(f.name(), f) + }) + }) + } + + public resolve(expression: SyntaxNode): BindingResult { + const lhs: SyntaxNode[] = [] + const rhs: SyntaxNode[] = [] + let equalsFound = false + + for (const curChild of expression.children) { + if (!curChild) { + continue + } + + if (this.assignmentOps.has(curChild.text)) { + equalsFound = true + if (lhs.length === 0) { + throw new RangeError("Equals encountered before first lhs identifier") + } + } + if (curChild.isNamed) { + if (equalsFound) { + rhs.push(curChild) + } else { + lhs.push(curChild) + } + } + } + + const bindRes: BindingResult = { + expression: new Expression(expression, this.file), + lhs, + rhs, + bindings: new Map(), + } + if (lhs[0].type == "underscore") { + return bindRes + } + if (lhs.length > 1 && rhs.length > 0) { + // Do we even need dat? + console.log("TOTO multi lhs bindings") + return bindRes + } + + const pattern = this.simplifyNested(lhs[0]) + if (rhs.length > 0) { + if (rhs.length == 1) { + rhs[0] = this.simplifyNested(rhs[0]) + } + this.walkPattern(pattern, rhs) + } else { + // Without rhs there still may be method calls on left. + // ds~skip_bits(32); ;; Stuff like that + this.bindModifyingCalls(lhs) + } + + // Copy the map for the output + for (const [k, v] of this.bindings.entries()) { + bindRes.bindings.set(k, v) + } + // Free up the map + this.bindings.clear() + return bindRes + } + + protected simplifyNested(node: SyntaxNode): SyntaxNode { + /* + * Dirty code for dirty issues. + * Heplfull in cases like + * (int a, int b) foo() { + * return (42, 43); + * } + * This shit, unfortunately is perfectly legal expression. + * + * var (((((a, b))))) = foo(); + * One could even write + * int ((((a)))) = b; ;; Why are u doing dat to me? + * Or + * b = (((((a))))); ;; Why, dawg!! + * + * And we need to get to the bottom of this without processing + * it all over honest binding mechanism. + * Besides, this simplification singificantly speeds binding up. + */ + try { + const childNode = node.firstNamedChild + // If there is no named children, we're already at the bottom of this. + if (childNode) { + const nodeType = node.type + // Theese types have two named children, one of which is type, + // so binding part is usually a level lower + const specialType = + nodeType == "tensor_vars_declaration" || nodeType == "tuple_vars_declaration" + // If there is more named children, that is where actual binding begins + if (node.namedChildCount == 1 || specialType) { + let nextNode = childNode + if (specialType) { + const specialChildren = node + .childrenForFieldName("vars") + .filter(c => c !== null) + if (specialChildren.length == 1) { + nextNode = specialChildren[0] + } else { + return node + } + } + return this.simplifyNested(nextNode) + } + } + } catch (error) { + console.log(error) + } + return node + } + + protected walkPattern(pattern: SyntaxNode, value: SyntaxNode[]): void { + if (pattern.type == "underscore") { + return + } + + try { + switch (pattern.type) { + case "identifier": { + this.bindIdentifier(pattern, value) + break + } + case "local_vars_declaration": { + const curLhs = pattern.childForFieldName("lhs") + if (!curLhs) { + throw new Error("No lhs in var declaration") + } + this.walkPattern(curLhs, value) + break + } + case "var_declaration": { + const idName = pattern.childForFieldName("name") + if (idName) { + this.bindIdentifier(idName, value) + } + break + } + case "tensor_vars_declaration": + case "tensor_expression": + case "typed_tuple": + case "parenthesized_expression": + case "nested_tensor_declaration": + case "grouped_expression": + case "tuple_vars_declaration": { + this.bindCollection(pattern, value) + break + } + } + } catch (error) { + console.error( + `Failed to waks pattern ${error} ${pattern.toString()}, ${value.join("")}`, + ) + } + } + + protected bindModifyingCalls(value: SyntaxNode[]): void { + value.forEach(curNode => { + if (curNode.type == "method_call") { + // Only modifying calls + if (curNode.children[0]?.text == "~") { + const identifier = curNode.previousNamedSibling + if (identifier && identifier.type == "identifier") { + this.bindToMethodCall(identifier, curNode) + } + } + } else { + // In case calls are in tensor expressions + curNode.descendantsOfType("method_call").forEach(methodCall => { + if (methodCall && methodCall.children[0]?.text == "~") { + const identifier = curNode.previousNamedSibling + if (identifier && identifier.type == "identifier") { + this.bindToMethodCall(identifier, curNode) + } + } + }) + } + }) + } + protected bindIdentifier( + target: SyntaxNode, + value: SyntaxNode[], + checkMethodRhs: boolean = true, + type?: string, + ): void { + if (checkMethodRhs) { + this.bindModifyingCalls(value) + } + this.bindings.set(target.text, { + identifier: target, + producer_exp: value, + type, + }) + } + + private bindCollection(target: SyntaxNode, value: SyntaxNode[]): void { + if (value.length >= 2) { + value.forEach(curNode => { + if (curNode.type == "method_call") { + this.bindToMethodCall(target, curNode) + } + }) + } else if (value.length == 1) { + const curValue = value[0] + const curValueType = curValue.type + if (curValueType == "function_application") { + this.bindToFunctionCall(target, curValue) + return + } + const filteredTarget = + target.type == "tensor_vars_declaration" || target.type == "tuple_vars_declaration" + ? target.childrenForFieldName("vars").filter(c => c?.isNamed) + : target.namedChildren + if (filteredTarget.length != curValue.namedChildCount) { + throw new Error( + `Arity error binding ${target.toString()} to ${curValue.toString()}`, + ) + } + for (const [i, nextTarget] of filteredTarget.entries()) { + const actualTarget = + nextTarget?.type == "grouped_expression" + ? nextTarget.firstNamedChild + : nextTarget + if (!actualTarget) { + continue + } + + const nextValue = curValue.namedChildren[i] + const actualValue = + nextValue?.type == "grouped_expression" ? nextValue.firstNamedChild : nextValue + if (!actualValue) { + throw new Error(`Undefined next value ${curValue.toString()}`) + } + + this.walkPattern(actualTarget, [actualValue]) + } + /* + } else { + throw new TypeError(`Type ${curValueType} is not yet supported!`) + } + */ + } + } + + private bindToMethodCall(target: SyntaxNode, value: SyntaxNode): void { + const isModifying = value.children[0]?.text == "~" + const methodName = value.childForFieldName("method_name")?.text + if (!methodName) { + throw new Error(`Failed to find method name for ${value.toString()}`) + } + let methodDecl = this.funcMap.get(methodName) + if (!methodDecl) { + // Thre could be method with ~ prefix being part of the name + if (isModifying && !methodName.startsWith("~")) { + methodDecl = this.funcMap.get("~" + methodName) + } + } + if (!methodDecl) { + throw new Error(`Failed to get method declaration ${methodName}`) + } + const retType = methodDecl.returnType() + + if (!retType) { + throw new Error(`Method ${methodName} has no return type`) + } + + // For non-modofiying method bind as normal function call; + let bindScope = retType.node + + if (isModifying) { + if (retType.node.type !== "tensor_type") { + throw new TypeError( + `Expected tensor_type for modifying method return type got ${retType.node.type}`, + ) + } + + const firstArg = closestNamedSibling(value, "prev", sybl => sybl.type == "identifier") + if (!firstArg) { + throw new Error(`First arg not found for modifying method call ${value.toString()}`) + } + this.bindIdentifier(firstArg, [value], false) + // If firstArg is same as target, we're done here. + if (firstArg.id == target.id) { + return + } + + // Next tensor type + let retTensor: SyntaxNode | undefined + const childrenCount = bindScope.namedChildCount + // First is bound to the first method arg already + for (let i = 1; i < childrenCount; i++) { + const curChild = bindScope.namedChild(i) + if (!curChild) { + continue + } + const childType = curChild.type + if (childType == "primitive_type") { + this.bindIdentifier(curChild, [value], false) + return + } + if (childType == "tensor_type" || childType == "tuple_type") { + retTensor = curChild + break + } + } + if (!retTensor) { + throw new Error( + `Return tensor not defined for method ${methodDecl.node.toString()}`, + ) + } + // If sub tensor is empty, we can return at this point + if (retTensor.namedChildCount == 0) { + return + } + // Otherwise bind to the sub-tensor + bindScope = retTensor + } + + this.bindToReturnType(target, value, this.simplifyNested(bindScope), false) + } + private bindToFunctionCall(target: SyntaxNode, value: SyntaxNode): void { + const funcIdentifier = value.childForFieldName("callee") + if (!funcIdentifier) { + throw new Error(`Calle not found: ${value.toString()}`) + } + const funcName = funcIdentifier.text + const funcDecl = this.funcMap.get(funcName) + if (!funcDecl) { + throw new Error(`Failed to find function declaration ${funcName}`) + } + const retType = funcDecl.returnType() + if (!retType) { + throw new Error(`Function ${funcName} without return type. Grammar failure?`) + } + this.bindToReturnType(target, value, this.simplifyNested(retType.node)) + } + private bindToReturnType( + target: SyntaxNode, + callNode: SyntaxNode, + retType: SyntaxNode, + checkMethodRhs: boolean = true, + ): void { + const targetType = target.type + let targetFiltered: (SyntaxNode | null)[] + // Hacky,but drop types + if (targetType == "tensor_vars_declaration" || targetType == "tuple_vars_declaration") { + targetFiltered = target.childrenForFieldName("vars").filter(v => v?.isNamed) + } else if (targetType == "var_declaration" || targetType == "identifier") { + // Name is only part of var declaration + const identifierNode = target.childForFieldName("name") ?? target + this.bindIdentifier(identifierNode, [callNode], checkMethodRhs) + return + } else if (targetType == "grouped_expression") { + targetFiltered = target.firstNamedChild?.namedChildren ?? [] + } else { + targetFiltered = target.namedChildren + } + + if (targetFiltered.length != retType.namedChildCount) { + if (targetFiltered.length == 1 && retType.type == "primitive_type") { + const targetNode = targetFiltered[0] + if (targetNode) { + this.bindIdentifier(targetNode, [callNode], checkMethodRhs) + return + } + } + throw new Error(`Return type arity error ${target.toString()} ${retType.toString()}`) + } + + for (const [i, pattern] of targetFiltered.entries()) { + const bindRhs = retType.namedChildren[i] + + if (pattern == null || pattern.type == "underscore") { + continue + } + if (!bindRhs) { + throw new Error(`Type node can't be null`) + } + + const bindType = bindRhs.type + const patternType = pattern.type + + switch (patternType) { + case "tuple_vars_declaration": + case "tuple_expression": { + if (bindType != "tuple_type") { + throw new Error(`Can't map ${patternType} to ${bindType}`) + } + this.bindToReturnType(pattern, callNode, bindRhs, checkMethodRhs) + break + } + case "local_vars_declaration": { + const lhs = pattern.childForFieldName("lhs") + if (!lhs) { + throw new Error(`No lhs in local_vars_declaration. Broken grammar`) + } + this.bindToReturnType( + this.simplifyNested(lhs), + callNode, + this.simplifyNested(bindRhs), + checkMethodRhs, + ) + break + } + case "grouped_expression": { + const nextChild = pattern.firstNamedChild + if (!nextChild) { + throw new Error("No child for grouped_expression. Borken grammar") + } + this.bindToReturnType( + this.simplifyNested(nextChild), + callNode, + this.simplifyNested(bindRhs), + checkMethodRhs, + ) + break + } + case "tensor_var_declaration": + case "nested_tensor_declaration": + case "tensor_expression": { + if (bindType !== "tensor_type") { + throw new Error(`Cant map ${patternType} to ${bindType}`) + } + this.bindToReturnType(pattern, callNode, bindRhs, checkMethodRhs) + break + } + case "var_declaration": { + const varName = pattern.childForFieldName("name") + if (!varName) { + throw new Error( + `No variable name in var_declaration. Broken grammar ${pattern.toString()}`, + ) + } + this.bindIdentifier(varName, [callNode], checkMethodRhs, bindType) + break + } + case "identifier": { + this.bindIdentifier(pattern, [callNode], checkMethodRhs, bindType) + break + } + } + } + } +} diff --git a/server/src/languages/func/psi/Decls.ts b/server/src/languages/func/psi/Decls.ts index bed353c0..8354827c 100644 --- a/server/src/languages/func/psi/Decls.ts +++ b/server/src/languages/func/psi/Decls.ts @@ -105,6 +105,10 @@ export class FunctionBase extends NamedNode { const methodId = specifiers?.children.find(it => it?.type === "method_id") ?? null return methodId !== null } + public get isImpure(): boolean { + const specifiers = this.node.childForFieldName("specifiers") + return Boolean(specifiers?.children.find(it => it?.type === "impure")) + } public get hasExplicitMethodId(): boolean { // check for diff --git a/server/src/languages/func/psi/Reference.ts b/server/src/languages/func/psi/Reference.ts index 4bca660c..0efcc4ff 100644 --- a/server/src/languages/func/psi/Reference.ts +++ b/server/src/languages/func/psi/Reference.ts @@ -329,6 +329,22 @@ export class Reference { } } } + if (firstChild?.type === "tensor_expression") { + // (int foo, cell bar) = (42, someCall()) + for (const childDeclaration of firstChild.descendantsOfType( + "local_vars_declaration", + )) { + if (!childDeclaration) { + continue + } + const lhs = childDeclaration.childForFieldName("lhs") + if (lhs) { + if (!this.processVariableDeclaration(lhs, proc, file, state)) { + return false + } + } + } + } if (firstChild?.type === "typed_tuple") { // [_, int a, int b] = [1, 2]; @@ -355,7 +371,11 @@ export class Reference { if (!proc.execute(new VarDeclaration(lhs, file), state)) return false } - if (lhs.type === "tuple_vars_declaration" || lhs.type === "tensor_vars_declaration") { + if ( + lhs.type === "tuple_vars_declaration" || + lhs.type === "tensor_vars_declaration" || + lhs.type === "nested_tensor_declaration" + ) { const vars = lhs.childrenForFieldName("vars") for (const variable of vars) { if (!variable) continue diff --git a/server/src/languages/func/tree-sitter-func/grammar.js b/server/src/languages/func/tree-sitter-func/grammar.js index f6dbf92b..84d0f394 100644 --- a/server/src/languages/func/tree-sitter-func/grammar.js +++ b/server/src/languages/func/tree-sitter-func/grammar.js @@ -14,6 +14,9 @@ function commaSep1(rule) { function commaSep2(rule) { return seq(rule, repeat1(seq(",", rule))) } +function commaSep1Trailing(rule) { + return seq(commaSep1(rule), optional(",")) +} const FUNC_GRAMMAR = { source_file: $ => repeat($._top_level_item), @@ -72,7 +75,7 @@ const FUNC_GRAMMAR = { seq( field("type_parameters", optional($.type_parameters)), field("return_type", $._type_hint), - field("name", $.identifier), + field("name", $.function_name), choice( seq( field("parameters", $.parameter_list), @@ -161,7 +164,7 @@ const FUNC_GRAMMAR = { return_statement: $ => seq("return", $._expression, ";"), block_statement: $ => seq("{", repeat($._statement), "}"), - expression_statement: $ => prec.right(seq($._expression, optional(";"))), + expression_statement: $ => prec.right(seq($._expression, ";")), empty_statement: _ => ";", repeat_statement: $ => seq("repeat", field("count", $._expression), field("body", $.block_statement)), @@ -201,7 +204,7 @@ const FUNC_GRAMMAR = { _expression: $ => $._expr10, _expr10: $ => - prec( + prec.right( 10, seq( $._expr13, @@ -232,7 +235,22 @@ const FUNC_GRAMMAR = { ), ), - _expr13: $ => prec(13, seq($._expr15, optional(seq("?", $._expression, ":", $._expr13)))), + ternary_condition: $ => $._expr15, + ternary_expression: $ => + prec.right( + 13, + seq( + field("condition", $.ternary_condition), + "?", + field("consequent", $._expression), + ":", + field("alternative", $._expression), + ), + ), + + _expr13: $ => prec(13, choice($._expr15, $.ternary_expression)), + + // _expr13: $ => prec(13, seq($._expr15, optional(seq("?", $._expression, ":", $._expr13)))), _expr15: $ => prec( @@ -287,16 +305,82 @@ const FUNC_GRAMMAR = { ), ), ), - local_vars_declaration: $ => prec.dynamic(90, field("lhs", $._var_declaration_lhs)), - tuple_vars_declaration: $ => - prec(100, seq("[", field("vars", commaSep1($._var_declaration_lhs)), optional(","), "]")), + // ------------------------------------------------------------------ + // local vars + var_declaration: $ => seq(field("type", optional($._type_hint)), field("name", $.identifier)), + + nested_tensor_declaration: $ => + prec( + 101, + seq( + "(", + field( + "vars", + commaSep1Trailing( + choice( + $.nested_tensor_declaration, + $.var_declaration, + $.tuple_vars_declaration, + $.underscore, + ), + ), + ), + ")", + ), + ), tensor_vars_declaration: $ => - prec(100, seq("(", field("vars", commaSep1($._var_declaration_lhs)), optional(","), ")")), - var_declaration: $ => seq(field("type", $._type_hint), field("name", $.identifier)), + prec( + 101, + seq( + field("type", $.var_type), // e.g. `var` + "(", + field( + "vars", + commaSep1Trailing( + choice( + $.nested_tensor_declaration, + $.var_declaration, + $.tuple_vars_declaration, + $.underscore, + ), + ), + ), + ")", + ), + ), + + _multiple_vars_declaration: $ => + prec.left(90, seq(choice($.tensor_vars_declaration, $.tuple_vars_declaration))), + local_vars_declaration: $ => + field("lhs", choice($._multiple_vars_declaration, $.var_declaration)), + //local_vars_declaration: $ => prec.dynamic(90, field("lhs", $._var_declaration_lhs)), + + tuple_vars_declaration: $ => + prec( + 101, + seq( + field("type", optional($.var_type)), + "[", + field( + "vars", + commaSep1Trailing( + choice( + $.var_declaration, + $.nested_tensor_declaration, + $.tuple_vars_declaration, + ), + ), + ), + "]", + ), + ), + // tensor_vars_declaration: $ => + // prec(100, seq("(", field("vars", commaSep1Trailing($._var_declaration_lhs)), optional(","), ")")), + // var_declaration: $ => seq(field("type", $._type_hint), field("name", $.identifier)), - _var_declaration_lhs: $ => - choice($.tuple_vars_declaration, $.tensor_vars_declaration, $.var_declaration), + // _var_declaration_lhs: $ => + // choice($.tuple_vars_declaration, $.tensor_vars_declaration, $.var_declaration), type_expression: $ => prec( @@ -332,10 +416,12 @@ const FUNC_GRAMMAR = { _expr100: $ => prec(100, choice($._nontype_expr100)), + // We need named not here, otherwise all children will be glued into expressions fileds individually + grouped_expression: $ => $._expression, parenthesized_expression: $ => seq("(", $._expression, ")"), tensor_expression: $ => - choice(seq("(", ")"), seq("(", field("expressions", commaSep2($._expression)), ")")), - typed_tuple: $ => seq("[", field("expressions", commaSep($._expression)), "]"), + choice(seq("(", ")"), seq("(", field("expressions", commaSep2($.grouped_expression)), ")")), + typed_tuple: $ => seq("[", field("expressions", commaSep($.grouped_expression)), "]"), // ---------------------------------------------------------- // type system @@ -352,15 +438,15 @@ const FUNC_GRAMMAR = { $.type_identifier, $.tensor_type, $.tuple_type, - $._parenthesized_type, + // $._parenthesized_type, ), - _parenthesized_type: $ => seq("(", $._type_hint, ")"), + // _parenthesized_type: $ => alias(seq("(", $._type_hint, ")"), $.tensor_type), primitive_type: $ => choice("int", "cell", "slice", "builder", "cont", "tuple"), // constant_type: $ => choice("int", "slice"), - tensor_type: $ => choice(seq("(", ")"), seq("(", field("types", commaSep2($._type_hint)), ")")), + tensor_type: $ => choice(seq("(", ")"), seq("(", field("types", commaSep1($._type_hint)), ")")), tuple_type: $ => seq("[", field("types", commaSep($._type_hint)), "]"), @@ -385,6 +471,7 @@ const FUNC_GRAMMAR = { // actually, FunC identifiers are much more flexible identifier: _ => /`[^`]+`|[a-zA-Z0-9_\$%][^\s\+\-\*\/%,\.;\(\)\{\}\[\]=\|\^\~]*/, underscore: _ => "_", + function_name: $ => /(`.*`)|((\.|~)?(([$%a-zA-Z0-9_](\w|['?:$%!])+)|([a-zA-Z%$])))/, // multiline_comment: $ => seq('{-', repeat(choice(/./, $.multiline_comment)), '-}'), // unfortunately getting panic while generating parser with support for nested comments @@ -414,5 +501,6 @@ module.exports = grammar({ [$.parameter_list_relaxed, $.parameter_list], [$.tensor_expression, $.tensor_type], [$.typed_tuple, $.tuple_type], + [$.var_declaration, $.type_identifier], ], }) diff --git a/server/src/psi/utils.ts b/server/src/psi/utils.ts index 997c8fbd..99f93724 100644 --- a/server/src/psi/utils.ts +++ b/server/src/psi/utils.ts @@ -13,6 +13,38 @@ export function parentOfType(node: SyntaxNode, ...types: readonly string[]): Syn return null } +export function parentOfTypeWithCb( + node: SyntaxNode, + cb: (parent: SyntaxNode, branch: SyntaxNode) => T, + ...types: readonly string[] +): T | null { + let parent = node.parent + let prevNode = node + + for (let i = 0; i < 100; i++) { + if (parent === null) return null + if (types.includes(parent.type)) return cb(parent, prevNode) + prevNode = parent + parent = parent.parent + } + + return null +} +export function closestNamedSibling( + node: SyntaxNode, + direction: "prev" | "next", + cb: (sibling: SyntaxNode) => boolean, +): SyntaxNode | null { + let curSibling = direction == "prev" ? node.previousNamedSibling : node.nextNamedSibling + while (curSibling) { + if (cb(curSibling)) { + return curSibling + } + curSibling = + direction == "prev" ? curSibling.previousNamedSibling : curSibling.nextNamedSibling + } + return null +} export function measureTime(label: string, fn: () => T): T { const startTime = performance.now() diff --git a/server/src/server.ts b/server/src/server.ts index f938b2ac..099b7c7c 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -236,6 +236,29 @@ const showErrorMessage = (msg: string): void => { }) } +async function findFuncStdlib(): Promise<{path: string; uri: string} | null> { + // FunC settings currently doesn't even have stdlib path + // So, we get straight to business + const funcStdlibEnv = process.env["FUNC_STDLIB"] + const testStdlibPath = process.env["TEST_FUNC_STDLIB_PATH"] + + const searchDirs = [testStdlibPath, funcStdlibEnv] + + for (const searchDir of searchDirs) { + if (searchDir) { + const stdlibUri = filePathToUri(searchDir) + if (await existsVFS(globalVFS, stdlibUri)) { + return { + path: searchDir, + uri: stdlibUri, + } + } + } + } + // Since FunC is typically included directly, we can skip error reporting + return null +} + async function findTolkStdlib(settings: ServerSettings, rootDir: string): Promise { if (settings.tolk.stdlib.path !== null && settings.tolk.stdlib.path.length > 0) { return settings.tolk.stdlib.path @@ -337,13 +360,20 @@ async function initialize(): Promise { const stdlibPath = await findTolkStdlib(settings, rootDir) if (stdlibPath !== null) { - reporter.report(50, "Indexing: (1/3) Standard Library") + reporter.report(50, "Indexing: (1/3) TOLK Standard Library") const stdlibUri = filePathToUri(stdlibPath) tolkIndex.withStdlibRoot(new TolkIndexRoot("stdlib", stdlibUri)) const stdlibRoot = new TolkIndexingRoot(stdlibUri, IndexingRootKind.Stdlib) await stdlibRoot.index() } + const funcStdLib = await findFuncStdlib() + if (funcStdLib) { + reporter.report(50, "Indexing: (1/3) FUNC Standard Library") + funcIndex.withStdlibRoot(new FuncIndexRoot("stdlib", funcStdLib.uri)) + const stdlibRoot = new FuncIndexingRoot(funcStdLib.uri, IndexingRootKind.Stdlib) + await stdlibRoot.index() + } setProjectTolkStdlibPath(stdlibPath)