diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index c2929990..e91c9937 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -12,8 +12,6 @@ jobs: steps: - uses: actions/checkout@v4 - with: - submodules: recursive - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 @@ -25,7 +23,6 @@ jobs: - name: Check generated files run: | - cd service_contracts make clean-gen make gen if [ -n "$(git status --porcelain)" ]; then @@ -33,7 +30,7 @@ jobs: echo "Uncommitted changes detected:" git status --porcelain echo "" - echo "Please run 'make gen' in service_contracts/ and commit the changes." + echo "Please run 'make gen' and commit the changes." exit 1 fi echo "Generated files are up to date ✓" @@ -44,8 +41,6 @@ jobs: steps: - uses: actions/checkout@v4 - with: - submodules: recursive - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 @@ -54,11 +49,10 @@ jobs: - name: Check for ABI changes run: | - cd service_contracts if [ ! -d "abi" ]; then echo "Error: No ABI directory found!" echo "ABIs must be checked into the repository to track interface changes." - echo "Please run 'make update-abi' in service_contracts/ and commit the ABIs." + echo "Please run 'make update-abi' and commit the ABIs." exit 1 fi make update-abi @@ -70,7 +64,7 @@ jobs: echo "Diff:" git diff abi/ echo "" - echo "Please run 'make update-abi' in service_contracts/ and commit the changes." + echo "Please run 'make update-abi' and commit the changes." exit 1 fi echo "All ABIs are up to date ✓" diff --git a/.github/workflows/contract-size.yml b/.github/workflows/contract-size.yml index 3dd85c9b..61c65253 100644 --- a/.github/workflows/contract-size.yml +++ b/.github/workflows/contract-size.yml @@ -20,22 +20,16 @@ jobs: version: v1.3.5 - name: Install Dependencies - run: | - cd service_contracts - forge install + run: make install - name: Install jq run: sudo apt-get install -y jq - name: Check contract size - run: | - cd service_contracts - make contract-size-check + run: make contract-size-check - name: Build current commit sizes - run: | - cd service_contracts - forge build --sizes --json > current_sizes.json + run: forge build --sizes --json > current_sizes.json - name: Build base branch sizes (PR only) if: github.event_name == 'pull_request' @@ -43,22 +37,21 @@ jobs: git fetch origin main mkdir base_build git worktree add base_build origin/main - cd base_build/service_contracts + cd base_build if forge build --offline --sizes --json > ../base_sizes.json 2>/dev/null; then echo "Offline build succeeded" else echo "Offline build failed, installing dependencies" - forge install + make install forge build --sizes --json > ../base_sizes.json fi - cd ../.. + cd .. git worktree remove --force base_build - name: Compare contract sizes run: | - cd service_contracts if [ -f base_sizes.json ]; then - ./tools/compare_contract_sizes.sh current_sizes.json base_sizes.json + ./packages/warm-storage/tools/compare_contract_sizes.sh current_sizes.json base_sizes.json else echo "No base size data found - skipping delta comparison." fi diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f311b4ac..db20a7e3 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -11,8 +11,6 @@ jobs: steps: - uses: actions/checkout@v4 - with: - submodules: recursive - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 @@ -21,7 +19,6 @@ jobs: - name: Run Lint run: | - cd service_contracts forge fmt --check forge build ! (forge lint 2>&1 | grep "") diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml new file mode 100644 index 00000000..1cb361a6 --- /dev/null +++ b/.github/workflows/release-please.yml @@ -0,0 +1,17 @@ +name: Release Please + +on: + push: + branches: + - main + +jobs: + release-please: + runs-on: ubuntu-latest + steps: + - uses: google-github-actions/release-please-action@v4 + id: release + with: + config-file: release-please-config.json + manifest-file: .release-please-manifest.json + diff --git a/.github/workflows/subgraph.yml b/.github/workflows/subgraph.yml index 1b12e9da..c549337f 100644 --- a/.github/workflows/subgraph.yml +++ b/.github/workflows/subgraph.yml @@ -12,8 +12,6 @@ jobs: steps: - uses: actions/checkout@v4 - with: - submodules: recursive - name: Setup Node.js uses: actions/setup-node@v4 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1d3ad879..4569176c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,8 +11,6 @@ jobs: steps: - uses: actions/checkout@v4 - with: - submodules: recursive - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 @@ -20,26 +18,21 @@ jobs: version: v1.3.5 - name: Install Dependencies - run: | - cd service_contracts - make install + run: make install - name: Run build run: | export PATH="/home/runner/.config/.foundry/bin:$PATH" - cd service_contracts make build - name: Run tests run: | export PATH="/home/runner/.config/.foundry/bin:$PATH" - cd service_contracts make test - name: Generate coverage report run: | export PATH="/home/runner/.config/.foundry/bin:$PATH" - cd service_contracts echo "::warning::Coverage is currently disabled due to Solidity stack depth limitations" echo "The combination of complex nested mappings, Payments contract interactions, and coverage" echo "instrumentation exceeds Solidity's stack depth limits even with --ir-minimum flag." @@ -52,6 +45,6 @@ jobs: - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 with: - directory: ./service_contracts + directory: ./ files: ./lcov.info fail_ci_if_error: false diff --git a/.gitignore b/.gitignore index 1d5cf883..98d8e6ff 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,18 @@ -# VIM -*.swp +# Compiler files +cache/ +out/ + +# Ignores development broadcast logs +broadcast/ + +# Node modules +node_modules/ + +# Foundry dependencies +/lib + +# Ignore IDEs +.idea + +# Ignore VIM +*.swp \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index de464b85..37fea8de 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,18 +1,3 @@ -[submodule "service_contracts/lib/forge-std"] - path = service_contracts/lib/forge-std - url = https://github.com/foundry-rs/forge-std -[submodule "service_contracts/lib/openzeppelin-contracts"] - path = service_contracts/lib/openzeppelin-contracts - url = https://github.com/OpenZeppelin/openzeppelin-contracts -[submodule "service_contracts/lib/openzeppelin-contracts-upgradeable"] - path = service_contracts/lib/openzeppelin-contracts-upgradeable - url = https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable -[submodule "service_contracts/lib/fws-payments"] - path = service_contracts/lib/fws-payments - url = https://github.com/FilOzone/fws-payments -[submodule "service_contracts/lib/pdp"] - path = service_contracts/lib/pdp - url = https://github.com/FilOzone/pdp -[submodule "service_contracts/lib/session-key-registry"] - path = service_contracts/lib/session-key-registry - url = https://github.com/FilOzone/SessionKeyRegistry +[submodule "lib/fvm-solidity"] + path = lib/fvm-solidity + url = https://github.com/filecoin-project/fvm-solidity diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..4a569d13 --- /dev/null +++ b/Makefile @@ -0,0 +1,420 @@ +# Unified Makefile for Filecoin Services Monorepo +# Consolidates all package tools and provides unified interface + +# Variables +RPC_URL ?= +KEYSTORE ?= +PASSWORD ?= +CHALLENGE_FINALITY ?= +VERBOSE ?= false + +# Package directories +PACKAGES := pay pdp session-key-registry warm-storage + +# Default target +.PHONY: default +default: build test + +# All target including installation +.PHONY: all +all: install build test + +# ============================================================================= +# INSTALLATION & SETUP +# ============================================================================= + +.PHONY: install +install: + @echo "Installing npm dependencies..." + npm install + @echo "Building forge dependencies..." + forge build + @echo "Dependencies installed and built successfully!" + +.PHONY: install-npm +install-npm: + npm install + +.PHONY: install-forge +install-forge: + forge install + +# ============================================================================= +# BUILD & COMPILATION +# ============================================================================= + +.PHONY: build +build: + @echo "Building all packages..." + forge build + +.PHONY: build-pay +build-pay: + @echo "Building pay package..." + forge build packages/pay + +.PHONY: build-pdp +build-pdp: + @echo "Building pdp package..." + forge build packages/pdp + +.PHONY: build-session-key +build-session-key: + @echo "Building session-key-registry package..." + forge build packages/session-key-registry + +.PHONY: build-warm-storage +build-warm-storage: + @echo "Building warm-storage package..." + forge build packages/warm-storage + +# ============================================================================= +# TESTING +# ============================================================================= + +.PHONY: test +test: + @echo "Running all tests..." + forge test + +.PHONY: test-pay +test-pay: + @echo "Testing pay package..." + forge test --match-path 'packages/pay/**/*.t.sol' + +.PHONY: test-pdp +test-pdp: + @echo "Testing pdp package..." + forge test --match-path 'packages/pdp/**/*.t.sol' + +.PHONY: test-session-key +test-session-key: + @echo "Testing session-key-registry package..." + forge test --match-path 'packages/session-key-registry/**/*.t.sol' + +.PHONY: test-warm-storage +test-warm-storage: + @echo "Testing warm-storage package..." + forge test --match-path 'packages/warm-storage/**/*.t.sol' + +.PHONY: test-verbose +test-verbose: + @echo "Running all tests with verbose output..." + forge test --verbosity 2 + +.PHONY: test-gas +test-gas: + @echo "Running tests with gas report..." + forge test --gas-report + +.PHONY: test-coverage +test-coverage: + @echo "Running test coverage..." + forge coverage + +# ============================================================================= +# LINTING & FORMATTING +# ============================================================================= + +.PHONY: lint +lint: + @echo "Checking code formatting..." + forge fmt --check + +.PHONY: format +format: + @echo "Formatting code..." + forge fmt + +.PHONY: lint-fix +lint-fix: format + +# ============================================================================= +# CLEANUP +# ============================================================================= + +.PHONY: clean +clean: + @echo "Cleaning build artifacts..." + forge clean + rm -rf node_modules + rm -rf abi + +.PHONY: clean-cache +clean-cache: + @echo "Cleaning cache..." + forge clean + +.PHONY: clean-deps +clean-deps: + @echo "Cleaning dependencies..." + rm -rf node_modules + +.PHONY: clean-all +clean-all: clean clean-gen + @echo "All artifacts cleaned" + +# ============================================================================= +# ABI EXTRACTION +# ============================================================================= + +# Extract just the ABI arrays into abi/ContractName.abi.json +.PHONY: extract-abis +extract-abis: + mkdir -p abi + @find out -type f -name '*.json' | while read file; do \ + name=$$(basename "$${file%.*}"); \ + jq '.abi' "$${file}" > "abi/$${name}.abi.json"; \ + done + +# ============================================================================= +# CONTRACT SIZE CHECKING +# ============================================================================= + +.PHONY: size-check +size-check: + @echo "Checking contract sizes..." + forge build --sizes + +.PHONY: size-check-pay +size-check-pay: build-pay + @echo "Checking pay package contract sizes..." + forge build --sizes packages/pay + +.PHONY: size-check-pdp +size-check-pdp: build-pdp + @echo "Checking pdp package contract sizes..." + forge build --sizes packages/pdp + +.PHONY: size-check-session-key +size-check-session-key: build-session-key + @echo "Checking session-key-registry package contract sizes..." + forge build --sizes packages/session-key-registry + +.PHONY: size-check-warm-storage +size-check-warm-storage: build-warm-storage + @echo "Checking warm-storage package contract sizes..." + forge build --sizes packages/warm-storage + +# ============================================================================= +# UTILITY TARGETS +# ============================================================================= + +.PHONY: help +help: + @echo "Filecoin Services Monorepo - Available targets:" + @echo "" + @echo "Setup & Installation:" + @echo " install - Install all dependencies (npm + forge)" + @echo " install-npm - Install npm dependencies only" + @echo " install-forge - Install forge dependencies only" + @echo " dev-setup - Complete development environment setup" + @echo "" + @echo "Building:" + @echo " build - Build all packages" + @echo " build-pay - Build pay package only" + @echo " build-pdp - Build pdp package only" + @echo " build-session-key- Build session-key-registry package only" + @echo " build-warm-storage- Build warm-storage package only" + @echo "" + @echo "Testing:" + @echo " test - Run all tests" + @echo " test-pay - Test pay package only" + @echo " test-pdp - Test pdp package only" + @echo " test-session-key - Test session-key-registry package only" + @echo " test-warm-storage- Test warm-storage package only" + @echo " test-verbose - Run all tests with verbose output" + @echo " test-gas - Run tests with gas report" + @echo " test-coverage - Run test coverage" + @echo " coverage - Run coverage with --ir-minimum" + @echo " coverage-lcov - Generate LCOV coverage report" + @echo "" + @echo "Code Quality:" + @echo " lint - Check code formatting" + @echo " format - Format code" + @echo " lint-fix - Alias for format" + @echo " pre-commit - Run pre-commit checks (format, lint, test, size-check)" + @echo "" + @echo "Cleanup:" + @echo " clean - Clean build artifacts and dependencies" + @echo " clean-cache - Clean forge cache only" + @echo " clean-deps - Clean npm dependencies only" + @echo " clean-all - Clean all artifacts (build, generated files)" + @echo " clean-gen - Clean generated files" + @echo "" + @echo "ABI Extraction:" + @echo " extract-abis - Extract ABIs from all packages" + @echo "" + @echo "Contract Size:" + @echo " size-check - Check contract sizes for all packages" + @echo " size-check-pay - Check contract sizes for pay package" + @echo " size-check-pdp - Check contract sizes for pdp package" + @echo " size-check-session-key - Check contract sizes for session-key-registry" + @echo " size-check-warm-storage - Check contract sizes for warm-storage" + @echo " contract-size-check - Check contract sizes using warm-storage tools" + @echo "" + @echo "Code Generation:" + @echo " gen - Generate code (storage layout, view contracts)" + @echo " force-gen - Force regeneration of all generated files" + @echo " check-tools - Check required tools (jq, forge)" + @echo "" + @echo "PDP Tools:" + @echo " pdp-create-dataset - Create PDP dataset" + @echo " pdp-test-burn-fee - Test PDP burn fee" + @echo " pdp-upgrade-contract - Upgrade PDP contract" + @echo " pdp-claim-owner - Claim PDP contract ownership" + @echo "" + @echo "Development Workflow:" + @echo " ci - Run CI pipeline (install, build, test, lint, coverage, size-check)" + @echo " release - Run release-please" + @echo " release-create - Create release with release-please" + @echo " release-tag - Tag release with release-please" + @echo "" + @echo "Utilities:" + @echo " help - Show this help message" + @echo " all - Install, build, and test everything" + @echo " list-tools - List available tools in packages" + +.PHONY: list-tools +list-tools: + @echo "Available tools across all packages:" + @echo "" + @for package in $(PACKAGES); do \ + if [ -d "packages/$$package/tools" ]; then \ + echo "$$package package tools:"; \ + ls -la packages/$$package/tools/*.sh 2>/dev/null | awk '{print " " $$9}' | sed 's|packages/.*/tools/||' || echo " (no tools found)"; \ + echo ""; \ + fi; \ + done + +# ============================================================================= +# PACKAGE-SPECIFIC TOOLS (delegation to individual package tools) +# ============================================================================= + +# PDP specific tools +.PHONY: pdp-create-dataset +pdp-create-dataset: + @echo "Creating PDP dataset..." + cd packages/pdp && ./tools/create_data_set.sh + +.PHONY: pdp-test-burn-fee +pdp-test-burn-fee: + @echo "Testing PDP burn fee..." + cd packages/pdp && ./tools/testBurnFee.sh + +.PHONY: pdp-upgrade-contract +pdp-upgrade-contract: + @echo "Upgrading PDP contract..." + cd packages/pdp && ./tools/upgrade-contract.sh + +.PHONY: pdp-claim-owner +pdp-claim-owner: + @echo "Claiming PDP ownership..." + cd packages/pdp && ./tools/claim-owner.sh + +# ============================================================================= +# CODE GENERATION & COVERAGE +# ============================================================================= + +# Generated files for warm-storage +WARM_STORAGE_LAYOUT=packages/warm-storage/src/lib/FilecoinWarmStorageServiceLayout.sol +WARM_STORAGE_INTERNAL_LIB=packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol +WARM_STORAGE_VIEW_CONTRACT=packages/warm-storage/src/FilecoinWarmStorageServiceStateView.sol +WARM_STORAGE_LIBRARY_JSON=out/FilecoinWarmStorageServiceStateLibrary.sol/FilecoinWarmStorageServiceStateLibrary.json + +# Code generation targets +.PHONY: gen +gen: check-tools $(WARM_STORAGE_LAYOUT) $(WARM_STORAGE_INTERNAL_LIB) $(WARM_STORAGE_VIEW_CONTRACT) + @echo "Code generation complete" + +.PHONY: force-gen +force-gen: clean-gen gen + @echo "Force regeneration complete" + +.PHONY: clean-gen +clean-gen: + @echo "Removing generated files..." + @rm -f $(WARM_STORAGE_LAYOUT) $(WARM_STORAGE_INTERNAL_LIB) $(WARM_STORAGE_VIEW_CONTRACT) + @rm -rf out/FilecoinWarmStorageServiceStateLibrary.sol + @echo "Generated files removed" + +# Storage layout generation +$(WARM_STORAGE_LAYOUT): packages/warm-storage/tools/generate_storage_layout.sh packages/warm-storage/src/FilecoinWarmStorageService.sol + $^ | forge fmt -r - > $@ + +# JSON compilation for library +$(WARM_STORAGE_LIBRARY_JSON): packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateLibrary.sol + forge build --via-ir $^ + +# View contract generation +$(WARM_STORAGE_VIEW_CONTRACT): packages/warm-storage/tools/generate_view_contract.sh $(WARM_STORAGE_LIBRARY_JSON) + $^ | forge fmt -r - > $@ + +# Internal library generation +%StateInternalLibrary.sol: %StateLibrary.sol + sed -e 's/public/internal/g' -e 's/StateLibrary/StateInternalLibrary/g' $< | awk 'NR == 4 { print "// Code generated - DO NOT EDIT.\n// This file is a generated binding and any changes will be lost.\n// Generated with make $@\n"} {print}' | forge fmt -r - > $@ + +# Check required tools +.PHONY: check-tools +check-tools: + @which jq >/dev/null 2>&1 || (echo "Error: jq is required but not installed" && exit 1) + @JQ_VERSION=$$(jq --version 2>/dev/null | sed 's/jq-//'); \ + MAJOR=$$(echo $$JQ_VERSION | cut -d. -f1); \ + MINOR=$$(echo $$JQ_VERSION | cut -d. -f2); \ + if [ "$$MAJOR" -lt 1 ] || ([ "$$MAJOR" -eq 1 ] && [ "$$MINOR" -lt 7 ]); then \ + echo "Warning: jq version $$JQ_VERSION detected. Version 1.7+ recommended for full functionality"; \ + fi + @which forge >/dev/null 2>&1 || (echo "Error: forge is required but not installed" && exit 1) + +# Coverage targets +.PHONY: coverage +coverage: + @echo "Running coverage with --ir-minimum (required due to stack depth issues)..." + forge coverage --ir-minimum --report summary + +.PHONY: coverage-lcov +coverage-lcov: + @echo "Generating LCOV coverage report with --ir-minimum..." + forge coverage --ir-minimum --report lcov + +# Contract size check +.PHONY: contract-size-check +contract-size-check: + @echo "Checking contract sizes..." + bash packages/warm-storage/tools/check-contract-size.sh packages/ + +# ============================================================================= +# DEVELOPMENT WORKFLOW +# ============================================================================= + +.PHONY: dev-setup +dev-setup: install + @echo "Development environment setup complete!" + +.PHONY: ci +ci: install build test lint + @echo "CI pipeline completed successfully!" + +.PHONY: pre-commit +pre-commit: format lint test + @echo "Pre-commit checks completed!" + +# ============================================================================= +# RELEASE MANAGEMENT +# ============================================================================= + +.PHONY: release +release: + @echo "Creating release..." + npm run release + +.PHONY: release-create +release-create: + @echo "Creating release PR..." + npm run release:create + +.PHONY: release-tag +release-tag: + @echo "Tagging release..." + npm run release:tag + diff --git a/foundry.toml b/foundry.toml new file mode 100644 index 00000000..ac6e87a6 --- /dev/null +++ b/foundry.toml @@ -0,0 +1,47 @@ +[profile.default] +src = 'packages' +test = 'packages' +script = 'scripts' +out = 'out' +libs = ["node_modules", "lib"] +cache_path = 'cache' +solc = "0.8.30" +optimizer = true +optimizer_runs = 200 +via_ir = true + +# Dependencies - using npm packages +remappings = [ + '@openzeppelin/contracts/=node_modules/@openzeppelin/contracts/', + '@openzeppelin/contracts-upgradeable/=node_modules/@openzeppelin/contracts-upgradeable/', + 'forge-std/=node_modules/forge-std/src/', + '@pythnetwork/pyth-sdk-solidity/=node_modules/@pythnetwork/pyth-sdk-solidity/', + '@prb-math/=node_modules/@prb/math/src/', + 'fvm-solidity/=lib/fvm-solidity/src/', + # Cross-package imports as per original proposal + '@filoz/pay/=packages/pay/src/', + '@filoz/pdp/=packages/pdp/src/', + '@filoz/session-key-registry/=packages/session-key-registry/src/', + '@filoz/warm-storage/=packages/warm-storage/src/', + # Legacy aliases for backward compatibility + '@fws-payments/=packages/pay/src/', + '@pdp/=packages/pdp/src/', + '@session-key-registry/=packages/session-key-registry/src/', + '@warm-storage/=packages/warm-storage/src/', +] + +# Allow reading test data files +fs_permissions = [ + { access = "read", path = "./packages" }, + { access = "read", path = "./test" } +] + +[lint] +exclude_lints = [ + "asm-keccak256", + "incorrect-shift", + "mixed-case-function", + "mixed-case-variable", + "pascal-case-struct", + "screaming-snake-case-immutable", +] diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..ad2c7760 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,3223 @@ +{ + "name": "@filoz/filecoin-services", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@filoz/filecoin-services", + "version": "1.0.0", + "license": "Apache-2.0 OR MIT", + "workspaces": [ + "packages/pay", + "packages/pdp", + "packages/session-key-registry", + "packages/warm-storage" + ], + "dependencies": { + "@openzeppelin/contracts": "^5.3.0", + "@openzeppelin/contracts-upgradeable": "^5.3.0", + "@prb/math": "^4.0.0", + "@pythnetwork/pyth-sdk-solidity": "^3.0.0", + "forge-std": "github:foundry-rs/forge-std#v1.9.7" + }, + "devDependencies": { + "release-please": "^15.0.0" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=9.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@conventional-commits/parser": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@conventional-commits/parser/-/parser-0.4.1.tgz", + "integrity": "sha512-H2ZmUVt6q+KBccXfMBhbBF14NlANeqHTXL4qCL6QGbMzrc4HDXyzWuxPxPNbz71f/5UkR5DrycP5VO9u7crahg==", + "dev": true, + "license": "ISC", + "dependencies": { + "unist-util-visit": "^2.0.3", + "unist-util-visit-parents": "^3.1.1" + } + }, + "node_modules/@filoz/pay": { + "resolved": "packages/pay", + "link": true + }, + "node_modules/@filoz/pdp": { + "resolved": "packages/pdp", + "link": true + }, + "node_modules/@filoz/session-key-registry": { + "resolved": "packages/session-key-registry", + "link": true + }, + "node_modules/@filoz/warm-storage": { + "resolved": "packages/warm-storage", + "link": true + }, + "node_modules/@google-automations/git-file-utils": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-1.2.6.tgz", + "integrity": "sha512-wNWeNLBtQH39kTayGuJMF1IRVPxfcywH//thQHD+xzRlanTNDI/5WACUvBkIz9bhnEC7ADm5ibA+DX9meU+JwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@octokit/rest": "^19.0.7", + "@octokit/types": "^9.0.0", + "minimatch": "^5.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@iarna/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-td6ZUkz2oS3VeleBcN+m//Q6HlCFCPrnI0FZhrt/h4XqLEdOyYp2u21nd8MdsR+WJy5r9PTDaHTDDfhf4H4l6Q==", + "dev": true, + "license": "ISC" + }, + "node_modules/@lerna/child-process": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/child-process/-/child-process-6.4.1.tgz", + "integrity": "sha512-dvEKK0yKmxOv8pccf3I5D/k+OGiLxQp5KYjsrDtkes2pjpCFfQAMbmpol/Tqx6w/2o2rSaRrLsnX8TENo66FsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "execa": "^5.0.0", + "strong-log-transformer": "^2.1.0" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/collect-updates": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/collect-updates/-/collect-updates-6.4.1.tgz", + "integrity": "sha512-pzw2/FC+nIqYkknUHK9SMmvP3MsLEjxI597p3WV86cEDN3eb1dyGIGuHiKShtjvT08SKSwpTX+3bCYvLVxtC5Q==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "@lerna/child-process": "6.4.1", + "@lerna/describe-ref": "6.4.1", + "minimatch": "^3.0.4", + "npmlog": "^6.0.2", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/collect-updates/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@lerna/collect-updates/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@lerna/describe-ref": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/describe-ref/-/describe-ref-6.4.1.tgz", + "integrity": "sha512-MXGXU8r27wl355kb1lQtAiu6gkxJ5tAisVJvFxFM1M+X8Sq56icNoaROqYrvW6y97A9+3S8Q48pD3SzkFv31Xw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "@lerna/child-process": "6.4.1", + "npmlog": "^6.0.2" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/package": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/package/-/package-6.4.1.tgz", + "integrity": "sha512-TrOah58RnwS9R8d3+WgFFTu5lqgZs7M+e1dvcRga7oSJeKscqpEK57G0xspvF3ycjfXQwRMmEtwPmpkeEVLMzA==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "load-json-file": "^6.2.0", + "npm-package-arg": "8.1.1", + "write-pkg": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/package-graph": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/package-graph/-/package-graph-6.4.1.tgz", + "integrity": "sha512-fQvc59stRYOqxT3Mn7g/yI9/Kw5XetJoKcW5l8XeqKqcTNDURqKnN0qaNBY6lTTLOe4cR7gfXF2l1u3HOz0qEg==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "@lerna/prerelease-id-from-version": "6.4.1", + "@lerna/validation-error": "6.4.1", + "npm-package-arg": "8.1.1", + "npmlog": "^6.0.2", + "semver": "^7.3.4" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/prerelease-id-from-version": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/prerelease-id-from-version/-/prerelease-id-from-version-6.4.1.tgz", + "integrity": "sha512-uGicdMFrmfHXeC0FTosnUKRgUjrBJdZwrmw7ZWMb5DAJGOuTzrvJIcz5f0/eL3XqypC/7g+9DoTgKjX3hlxPZA==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.3.4" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/query-graph": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/query-graph/-/query-graph-6.4.1.tgz", + "integrity": "sha512-gBGZLgu2x6L4d4ZYDn4+d5rxT9RNBC+biOxi0QrbaIq83I+JpHVmFSmExXK3rcTritrQ3JT9NCqb+Yu9tL9adQ==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "@lerna/package-graph": "6.4.1" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/run-topologically": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/run-topologically/-/run-topologically-6.4.1.tgz", + "integrity": "sha512-gXlnAsYrjs6KIUGDnHM8M8nt30Amxq3r0lSCNAt+vEu2sMMEOh9lffGGaJobJZ4bdwoXnKay3uER/TU8E9owMw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "@lerna/query-graph": "6.4.1", + "p-queue": "^6.6.2" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@lerna/validation-error": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@lerna/validation-error/-/validation-error-6.4.1.tgz", + "integrity": "sha512-fxfJvl3VgFd7eBfVMRX6Yal9omDLs2mcGKkNYeCEyt4Uwlz1B5tPAXyk/sNMfkKV2Aat/mlK5tnY13vUrMKkyA==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "dependencies": { + "npmlog": "^6.0.2" + }, + "engines": { + "node": "^14.15.0 || >=16.0.0" + } + }, + "node_modules/@octokit/auth-token": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz", + "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/core": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz", + "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^3.0.0", + "@octokit/graphql": "^5.0.0", + "@octokit/request": "^6.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/endpoint": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz", + "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^9.0.0", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/graphql": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz", + "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "18.1.1", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz", + "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz", + "integrity": "sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/tsconfig": "^1.0.2", + "@octokit/types": "^9.2.3" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "@octokit/core": ">=4" + } + }, + "node_modules/@octokit/plugin-request-log": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", + "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.2.3.tgz", + "integrity": "sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^10.0.0" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz", + "integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^18.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "6.2.8", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz", + "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^7.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/request-error": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz", + "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^9.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/rest": { + "version": "19.0.13", + "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.13.tgz", + "integrity": "sha512-/EzVox5V9gYGdbAI+ovYj3nXQT1TtTHRT+0eZPcuC05UFSWO3mdO9UY1C0i2eLF9Un1ONJkAk+IEtYGAC+TahA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/core": "^4.2.1", + "@octokit/plugin-paginate-rest": "^6.1.2", + "@octokit/plugin-request-log": "^1.0.4", + "@octokit/plugin-rest-endpoint-methods": "^7.1.2" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/tsconfig": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@octokit/tsconfig/-/tsconfig-1.0.2.tgz", + "integrity": "sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/types": { + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz", + "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^18.0.0" + } + }, + "node_modules/@openzeppelin/contracts": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/@openzeppelin/contracts/-/contracts-5.4.0.tgz", + "integrity": "sha512-eCYgWnLg6WO+X52I16TZt8uEjbtdkgLC0SUX/xnAksjjrQI4Xfn4iBRoI5j55dmlOhDv1Y7BoR3cU7e3WWhC6A==", + "license": "MIT" + }, + "node_modules/@openzeppelin/contracts-upgradeable": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-5.4.0.tgz", + "integrity": "sha512-STJKyDzUcYuB35Zub1JpWW58JxvrFFVgQ+Ykdr8A9PGXgtq/obF5uoh07k2XmFyPxfnZdPdBdhkJ/n2YxJ87HQ==", + "license": "MIT", + "peerDependencies": { + "@openzeppelin/contracts": "5.4.0" + } + }, + "node_modules/@prb/math": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@prb/math/-/math-4.1.0.tgz", + "integrity": "sha512-ef5Xrlh3BeX4xT5/Wi810dpEPq2bYPndRxgFIaKSU1F/Op/s8af03kyom+mfU7gEpvfIZ46xu8W0duiHplbBMg==", + "license": "MIT" + }, + "node_modules/@pythnetwork/pyth-sdk-solidity": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@pythnetwork/pyth-sdk-solidity/-/pyth-sdk-solidity-3.1.0.tgz", + "integrity": "sha512-NgtEPUTL9r0qqMpIOEtSdUVjrF08fHK6wa7eM8hdjhjzMOti4o053B9oFqpm4lkzJW6eELXuP4sRJOABGmNTUg==", + "license": "Apache-2.0" + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/npm-package-arg": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/@types/npm-package-arg/-/npm-package-arg-6.1.4.tgz", + "integrity": "sha512-vDgdbMy2QXHnAruzlv68pUtXCjmqUk3WrBAsRboRovsOmxbfn/WiYCjmecyKjGztnMps5dWp4Uq2prp+Ilo17Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "16.0.9", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.9.tgz", + "integrity": "sha512-tHhzvkFXZQeTECenFoRljLBYPZJ7jAVxqqtEI0qTLOmuultnFp4I9yKE17vTuhf7BkhCu7I4XuemPgikDVuYqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@xmldom/xmldom": { + "version": "0.8.11", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.11.tgz", + "integrity": "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/aproba": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz", + "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==", + "dev": true, + "license": "ISC" + }, + "node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-ify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", + "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==", + "dev": true, + "license": "MIT" + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "retry": "0.13.1" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "dev": true, + "license": "ISC" + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/builtins": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-1.0.3.tgz", + "integrity": "sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/code-suggester": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-4.3.4.tgz", + "integrity": "sha512-qOj12mccFX2NALK01WnrwJKCmIwp1TMuskueh2EVaR4bc3xw072yfX9Ojq7yFQL4AmXfTXHKNjSO8lvh0y5MuA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@octokit/rest": "^19.0.5", + "@types/yargs": "^16.0.0", + "async-retry": "^1.3.1", + "diff": "^5.0.0", + "glob": "^7.1.6", + "parse-diff": "^0.11.0", + "yargs": "^16.0.0" + }, + "bin": { + "code-suggester": "build/src/bin/code-suggester.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/code-suggester/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/code-suggester/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/compare-func": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz", + "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-ify": "^1.0.0", + "dot-prop": "^5.1.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/conventional-changelog-conventionalcommits": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-6.1.0.tgz", + "integrity": "sha512-3cS3GEtR78zTfMzk0AizXKKIdN4OvSh7ibNz6/DPbhWWQu7LqE/8+/GqSodV+sywUR2gpJAdP/1JFf4XtN7Zpw==", + "dev": true, + "license": "ISC", + "dependencies": { + "compare-func": "^2.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-changelog-writer": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-6.0.1.tgz", + "integrity": "sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "conventional-commits-filter": "^3.0.0", + "dateformat": "^3.0.3", + "handlebars": "^4.7.7", + "json-stringify-safe": "^5.0.1", + "meow": "^8.1.2", + "semver": "^7.0.0", + "split": "^1.0.1" + }, + "bin": { + "conventional-changelog-writer": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-commits-filter": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-3.0.0.tgz", + "integrity": "sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash.ismatch": "^4.4.0", + "modify-values": "^1.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-select": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz", + "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-what": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", + "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/dateformat": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", + "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", + "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", + "dev": true, + "license": "MIT", + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "license": "MIT", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "license": "BSD-2-Clause" + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/domutils": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", + "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/duplexer": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", + "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", + "dev": true, + "license": "MIT" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=4.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/escodegen/node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esprima": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz", + "integrity": "sha512-+JpPZam9w5DuJ3Q67SqsMGtiHKENSMRVoxvArfJZK01/BfLEObtZ6orJa/MtoGNR/rfMgp5837T41PAmTwAv/A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "dev": true, + "license": "MIT" + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/forge-std": { + "version": "1.9.7", + "resolved": "git+ssh://git@github.com/foundry-rs/forge-std.git#77041d2ce690e692d6e03cc812b57d1ddaa4d505", + "license": "(Apache-2.0 OR MIT)" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "license": "MIT", + "bin": { + "he": "bin/he" + } + }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true, + "license": "ISC" + }, + "node_modules/jsonpath": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.1.1.tgz", + "integrity": "sha512-l6Cg7jRpixfbgoWgkrl77dgEj8RPvND0wMH6TwQmi9Qs4TFfS9u5cUFnbeKTwj5ga5Y3BTGGNI28k117LJ009w==", + "dev": true, + "license": "MIT", + "dependencies": { + "esprima": "1.2.2", + "static-eval": "2.0.2", + "underscore": "1.12.1" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/load-json-file": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-6.2.0.tgz", + "integrity": "sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.15", + "parse-json": "^5.0.0", + "strip-bom": "^4.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/load-json-file/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.ismatch": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", + "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/map-obj": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz", + "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/modify-values": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz", + "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-html-parser": { + "version": "6.1.13", + "resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.13.tgz", + "integrity": "sha512-qIsTMOY4C/dAa5Q5vsobRpOOvPfC4pB61UVW2uSwZNUp0QU/jCekTal1vMmbO0DgdHeLUJpv/ARmDqErVxA3Sg==", + "dev": true, + "license": "MIT", + "dependencies": { + "css-select": "^5.1.0", + "he": "1.2.0" + } + }, + "node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm-package-arg": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.1.1.tgz", + "integrity": "sha512-CsP95FhWQDwNqiYS+Q0mZ7FAEDytDZAkNxQqea6IaAFJTAY9Lhhqyl0irU/6PMc7BGfUmnsbHcqxJD7XuVM/rg==", + "dev": true, + "license": "ISC", + "dependencies": { + "hosted-git-info": "^3.0.6", + "semver": "^7.0.0", + "validate-npm-package-name": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm-package-arg/node_modules/hosted-git-info": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.8.tgz", + "integrity": "sha512-aXpmwoOhRBrw6X3j0h5RloK4x1OzsxMPyxqIHyNfSe2pypkVTZFpEiRoSipPEPlMrh0HW/XsjkJ5WgnCirpNUw==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-queue": { + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz", + "integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^4.0.4", + "p-timeout": "^3.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-timeout": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", + "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-finally": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-diff": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.1.tgz", + "integrity": "sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse-github-repo-url": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz", + "integrity": "sha512-bSWyzBKqcSL4RrncTpGsEKoJ7H8a4L3++ifTAbTFeMHyq2wRV+42DGmQcHIrJIvdcacjIOxEuKH/w4tthF17gg==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true, + "license": "ISC" + }, + "node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/read-pkg/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/release-please": { + "version": "15.13.0", + "resolved": "https://registry.npmjs.org/release-please/-/release-please-15.13.0.tgz", + "integrity": "sha512-xIwTEnikX7RegDSaU+J+wou1QNcUReQEZT2MSTzuBKnk51Hnus12VDnDfJ5E0MoZMTJ0q9lB1c+vl8G/awpexA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@conventional-commits/parser": "^0.4.1", + "@google-automations/git-file-utils": "^1.2.5", + "@iarna/toml": "^3.0.0", + "@lerna/collect-updates": "^6.4.1", + "@lerna/package": "^6.4.1", + "@lerna/package-graph": "^6.4.1", + "@lerna/run-topologically": "^6.4.1", + "@octokit/graphql": "^5.0.0", + "@octokit/request": "^6.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/rest": "^19.0.0", + "@types/npm-package-arg": "^6.1.0", + "@xmldom/xmldom": "^0.8.4", + "chalk": "^4.0.0", + "code-suggester": "^4.2.0", + "conventional-changelog-conventionalcommits": "^6.0.0", + "conventional-changelog-writer": "^6.0.0", + "conventional-commits-filter": "^3.0.0", + "detect-indent": "^6.1.0", + "diff": "^5.0.0", + "figures": "^3.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.1", + "js-yaml": "^4.0.0", + "jsonpath": "^1.1.1", + "node-html-parser": "^6.0.0", + "parse-github-repo-url": "^1.4.1", + "semver": "^7.0.0", + "type-fest": "^3.0.0", + "typescript": "^4.6.4", + "unist-util-visit": "^2.0.3", + "unist-util-visit-parents": "^3.1.1", + "xpath": "^0.0.32", + "yaml": "^2.2.2", + "yargs": "^17.0.0" + }, + "bin": { + "release-please": "build/src/bin/release-please.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true, + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sort-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", + "integrity": "sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-plain-obj": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.22", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", + "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/split": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "through": "2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/static-eval": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz", + "integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "escodegen": "^1.8.1" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strong-log-transformer": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strong-log-transformer/-/strong-log-transformer-2.1.0.tgz", + "integrity": "sha512-B3Hgul+z0L9a236FAUC9iZsL+nVHgoCJnqCbN588DjYxvGXaXaaFbfmQ/JhvKjZwsOukuR72XbHv71Qkug0HxA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "duplexer": "^0.1.1", + "minimist": "^1.2.0", + "through": "^2.3.4" + }, + "bin": { + "sl-log-transformer": "bin/sl-log-transformer.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/trim-newlines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz", + "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/underscore": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", + "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unist-util-is": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", + "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", + "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", + "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/validate-npm-package-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-3.0.0.tgz", + "integrity": "sha512-M6w37eVCMMouJ9V/sdPGnC5H4uDr73/+xdq0FBLO3TFFX1+7wiUY6Es328NN+y43tmY+doUdN9g9J21vqB7iLw==", + "dev": true, + "license": "ISC", + "dependencies": { + "builtins": "^1.0.3" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "node_modules/write-json-file": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/write-json-file/-/write-json-file-3.2.0.tgz", + "integrity": "sha512-3xZqT7Byc2uORAatYiP3DHUUAVEkNOswEWNs9H5KXiicRTvzYzYqKjYc4G7p+8pltvAw641lVByKVtMpf+4sYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-indent": "^5.0.0", + "graceful-fs": "^4.1.15", + "make-dir": "^2.1.0", + "pify": "^4.0.1", + "sort-keys": "^2.0.0", + "write-file-atomic": "^2.4.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/write-json-file/node_modules/detect-indent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-5.0.0.tgz", + "integrity": "sha512-rlpvsxUtM0PQvy9iZe640/IWwWYyBsTApREbA1pHOpmOUIl9MkP/U4z7vTtg4Oaojvqhxt7sdufnT0EzGaR31g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/write-pkg": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/write-pkg/-/write-pkg-4.0.0.tgz", + "integrity": "sha512-v2UQ+50TNf2rNHJ8NyWttfm/EJUBWMJcx6ZTYZr6Qp52uuegWw/lBkCtCbnYZEmPRNL61m+u67dAmGxo+HTULA==", + "dev": true, + "license": "MIT", + "dependencies": { + "sort-keys": "^2.0.0", + "type-fest": "^0.4.1", + "write-json-file": "^3.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/write-pkg/node_modules/type-fest": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.4.1.tgz", + "integrity": "sha512-IwzA/LSfD2vC1/YDYMv/zHP4rDF1usCwllsDpbolT3D4fUepIO7f9K70jjmUewU/LmGUKJcwcVtDCpnKk4BPMw==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=6" + } + }, + "node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "packages/pay": { + "name": "@filoz/pay", + "version": "1.0.0", + "license": "Apache-2.0 OR MIT" + }, + "packages/pdp": { + "name": "@filoz/pdp", + "version": "1.0.0", + "license": "Apache-2.0 OR MIT" + }, + "packages/session-key-registry": { + "name": "@filoz/session-key-registry", + "version": "1.0.0", + "license": "Apache-2.0 OR MIT" + }, + "packages/warm-storage": { + "name": "@filoz/warm-storage", + "version": "1.0.0", + "license": "Apache-2.0 OR MIT" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..b970c3e0 --- /dev/null +++ b/package.json @@ -0,0 +1,84 @@ +{ + "name": "@filoz/filecoin-services", + "version": "1.0.0", + "description": "Filecoin Services Monorepo - Smart contracts for Filecoin ecosystem services", + "private": true, + "workspaces": [ + "packages/pay", + "packages/pdp", + "packages/session-key-registry", + "packages/warm-storage" + ], + "scripts": { + "build": "make build", + "test": "make test", + "lint": "make lint", + "clean": "make clean", + "install:all": "make install", + + "forge:build": "forge build", + "forge:test": "forge test", + "forge:clean": "forge clean", + "forge:test:verbose": "forge test --verbosity 2", + "forge:test:gas": "forge test --gas-report", + "forge:coverage": "forge coverage", + + "pay:build": "forge build packages/pay", + "pay:test": "forge test --match-path 'packages/pay/**/*.t.sol'", + "pay:clean": "forge clean", + + "pdp:build": "forge build packages/pdp", + "pdp:test": "forge test --match-path 'packages/pdp/**/*.t.sol'", + "pdp:clean": "forge clean", + + "session-key:build": "forge build packages/session-key-registry", + "session-key:test": "forge test --match-path 'packages/session-key-registry/**/*.t.sol'", + "session-key:clean": "forge clean", + + "warm-storage:build": "forge build packages/warm-storage", + "warm-storage:test": "forge test --match-path 'packages/warm-storage/**/*.t.sol'", + "warm-storage:clean": "forge clean", + + "release": "release-please", + "release:create": "release-please create", + "release:tag": "release-please tag", + + "size": "forge build --sizes", + "size:check": "forge build --sizes", + + "deploy:all": "echo 'Deploy scripts to be implemented'", + "deploy:pay": "echo 'Deploy pay scripts to be implemented'", + "deploy:pdp": "echo 'Deploy pdp scripts to be implemented'", + "deploy:session-key": "echo 'Deploy session-key scripts to be implemented'", + "deploy:warm-storage": "echo 'Deploy warm-storage scripts to be implemented'" + }, + "dependencies": { + "@openzeppelin/contracts": "^5.3.0", + "@openzeppelin/contracts-upgradeable": "^5.3.0", + "@pythnetwork/pyth-sdk-solidity": "^3.0.0", + "@prb/math": "^4.0.0", + "forge-std": "github:foundry-rs/forge-std#v1.9.7" + }, + "devDependencies": { + "release-please": "^15.0.0" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=9.0.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/FilOzone/filecoin-services.git" + }, + "keywords": [ + "filecoin", + "solidity", + "smart-contracts", + "blockchain", + "storage", + "pdp", + "payments" + ], + "author": "FilOzone", + "license": "Apache-2.0 OR MIT" +} diff --git a/packages/pay/package.json b/packages/pay/package.json new file mode 100644 index 00000000..6ea6e9a6 --- /dev/null +++ b/packages/pay/package.json @@ -0,0 +1,29 @@ +{ + "name": "@filoz/pay", + "version": "1.0.0", + "description": "Filecoin Payments - Smart contracts for payment processing in Filecoin ecosystem", + "main": "src/index.js", + "files": [ + "src/**/*.sol", + "abi/**/*.json" + ], + "scripts": { + "build": "echo 'Use root forge build'", + "test": "echo 'Use root forge test'", + "clean": "echo 'Use root forge clean'", + "lint": "echo 'Use root forge fmt --check'", + "lint:fix": "echo 'Use root forge fmt'" + }, + "repository": { + "type": "git", + "url": "https://github.com/FilOzone/filecoin-pay.git" + }, + "keywords": [ + "filecoin", + "payments", + "solidity", + "smart-contracts" + ], + "author": "FilOzone", + "license": "Apache-2.0 OR MIT" +} diff --git a/packages/pay/src/Dutch.sol b/packages/pay/src/Dutch.sol new file mode 100644 index 00000000..7147fe1a --- /dev/null +++ b/packages/pay/src/Dutch.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.30; + +import {UD60x18, uEXP2_MAX_INPUT, uUNIT} from "@prb-math/UD60x18.sol"; + +/** + * @dev Recurring dutch auction + */ +library Dutch { + // Target 1 auction per week, on average + uint256 public constant RESET_FACTOR = 4; + uint256 public constant HALVING_INTERVAL = 3.5 days; + + uint256 public constant MAX_DECAY = uEXP2_MAX_INPUT * HALVING_INTERVAL / uUNIT; + + /** + * @notice Exponential decay by 1/4 per week + * @param startPrice The initial price in attoFIL at elapsed = 0 + * @param elapsed Seconds of time since the startPrice + * @return price The decayed price in attoFIL + */ + function decay(uint256 startPrice, uint256 elapsed) internal pure returns (uint256 price) { + if (elapsed > MAX_DECAY) { + return 0; + } + UD60x18 coefficient = UD60x18.wrap(startPrice); + UD60x18 decayFactor = UD60x18.wrap(elapsed * uUNIT / HALVING_INTERVAL).exp2(); + + return coefficient.div(decayFactor).unwrap(); + } +} diff --git a/packages/pay/src/Errors.sol b/packages/pay/src/Errors.sol new file mode 100644 index 00000000..5028126a --- /dev/null +++ b/packages/pay/src/Errors.sol @@ -0,0 +1,296 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + +/// @title Errors +/// @notice Centralized library for custom error definitions across the protocol +/// @dev Convention: For any error comparing two values, always pass the expected value first, followed by the actual value +library Errors { + /// @notice Rail does not exist or is beyond its last settlement after termination + /// @param railId The ID of the rail + error RailInactiveOrSettled(uint256 railId); + + /// @notice Only the rail client can perform this action + /// @param expected The expected client address + /// @param caller The actual caller address + error OnlyRailClientAllowed(address expected, address caller); + + /// @notice Only the rail operator can perform this action + /// @param expected The expected operator address + /// @param caller The actual caller address + error OnlyRailOperatorAllowed(address expected, address caller); + + /// @notice Only the rail participant (client, operator, or recipient) can perform this action + /// @param expectedFrom The expected client address + /// @param expectedOperator The expected operator address + /// @param expectedTo The expected recipient address + /// @param caller The actual caller address + error OnlyRailParticipantAllowed(address expectedFrom, address expectedOperator, address expectedTo, address caller); + + /// @notice Rail is already terminated + /// @param railId The ID of the rail + error RailAlreadyTerminated(uint256 railId); + + /// @notice Rail is not terminated, but the action requires a terminated rail + /// @param railId The ID of the rail + error RailNotTerminated(uint256 railId); + + /// @notice The provided address is zero, which is not allowed + /// @param varName The name of the variable that was expected to be non-zero + error ZeroAddressNotAllowed(string varName); + + /// @notice One-time payment exceeds the lockup amount for the rail + /// @param railId The ID of the rail + /// @param available The available lockup amount for the rail + /// @param required The required lockup amount for the rail + error OneTimePaymentExceedsLockup(uint256 railId, uint256 available, uint256 required); + + /// @notice The caller is not authorized to terminate the rail + /// @dev Only the rail operator or the rail client (with fully settled lockup) can terminate the rail + /// @param railId The ID of the rail being terminated + /// @param allowedClient The rail client address (from) + /// @param allowedOperator The rail operator address + /// @param caller The address attempting to terminate the rail + error NotAuthorizedToTerminateRail(uint256 railId, address allowedClient, address allowedOperator, address caller); + + /// @notice The payer's lockup rate is inconsistent with the rail's payment rate + /// @dev Indicates that the payer's lockup rate is less than the rail's payment rate, which should not occur + /// @param railId The ID of the rail to terminate + /// @param from The address of the payer + /// @param paymentRate The payment rate for the rail + /// @param lockupRate The current lockup rate of the payer + error LockupRateInconsistent(uint256 railId, address from, uint256 paymentRate, uint256 lockupRate); + + /// @notice Ether sent must equal the amount for native token transfers + /// @param required The required amount (must match msg.value) + /// @param sent The msg.value sent with the transaction + error MustSendExactNativeAmount(uint256 required, uint256 sent); + + /// @notice Ether (msg.value) must not be sent when transferring ERC20 tokens + /// @param sent The msg.value sent with the transaction + error NativeTokenNotAccepted(uint256 sent); + + /// @notice Native tokens are not supported in depositWithPermit; only ERC20 tokens are allowed + error NativeTokenNotSupported(); + + /// @notice Attempted to withdraw more than the available unlocked funds + /// @param available The amount of unlocked funds available for withdrawal + /// @param requested The amount requested for withdrawal + error InsufficientUnlockedFunds(uint256 available, uint256 requested); + + /// @notice The receiving contract rejected the native token transfer + /// @param to The address to which the transfer was attempted + /// @param amount The amount of native token attempted to send + error NativeTransferFailed(address to, uint256 amount); + + /// @notice The operator is not approved for the client (from address) + /// @param from The address of the client (payer) + /// @param operator The operator attempting the action + error OperatorNotApproved(address from, address operator); + + /// @notice The specified commission rate exceeds the allowed maximum. + /// @param maxAllowed The maximum allowed commission rate in basis points (BPS) + /// @param actual The actual commission rate that was attempted to be set + error CommissionRateTooHigh(uint256 maxAllowed, uint256 actual); + + /// @notice A non-zero commission rate was provided, but no service fee recipient was set + error MissingServiceFeeRecipient(); + + /// @notice Invalid attempt to modify a terminated rail's lockup settings + /// @param actualPeriod The rail's actual period value + /// @param actualLockupFixed The current lockupFixed value + /// @param attemptedPeriod The period value provided + /// @param attemptedLockupFixed The new lockupFixed value proposed + error InvalidTerminatedRailModification( + uint256 actualPeriod, uint256 actualLockupFixed, uint256 attemptedPeriod, uint256 attemptedLockupFixed + ); + + /// @notice The payer's current lockup is insufficient to cover the requested lockup reduction + /// @param from The address of the payer + /// @param token The token involved in the lockup + /// @param currentLockup The payer's current lockup amount + /// @param lockupReduction The reduction attempted to be made + error InsufficientCurrentLockup(IERC20 token, address from, uint256 currentLockup, uint256 lockupReduction); + + /// @notice Cannot change the lockup period due to insufficient funds to cover the current lockup + /// @param token The token for the lockup + /// @param from The address whose account is checked (from) + /// @param actualLockupPeriod The current rail lockup period + /// @param attemptedLockupPeriod The new period requested + error LockupPeriodChangeNotAllowedDueToInsufficientFunds( + IERC20 token, address from, uint256 actualLockupPeriod, uint256 attemptedLockupPeriod + ); + + /// @notice Cannot increase the fixed lockup due to insufficient funds to cover the current lockup + /// @param token The token for the lockup + /// @param from The address whose account is checked + /// @param actualLockupFixed The current rail fixed lockup amount + /// @param attemptedLockupFixed The new fixed lockup amount requested + error LockupFixedIncreaseNotAllowedDueToInsufficientFunds( + IERC20 token, address from, uint256 actualLockupFixed, uint256 attemptedLockupFixed + ); + + /// @notice The requested lockup period exceeds the operator's maximum allowed lockup period + /// @param token The token for the lockup + /// @param operator The operator for the rail + /// @param maxAllowedPeriod The operator's maximum allowed lockup period + /// @param requestedPeriod The lockup period requested + error LockupPeriodExceedsOperatorMaximum( + IERC20 token, address operator, uint256 maxAllowedPeriod, uint256 requestedPeriod + ); + + /// @notice The payer's current lockup is less than the old lockup value + /// @param token The token for the lockup + /// @param from The address whose account is checked + /// @param oldLockup The calculated old lockup amount + /// @param currentLockup The current lockup value in the account + error CurrentLockupLessThanOldLockup(IERC20 token, address from, uint256 oldLockup, uint256 currentLockup); + + /// @notice Cannot modify a terminated rail beyond its end epoch + /// @param railId The ID of the rail + /// @param maxSettlementEpoch The last allowed block for modifications + /// @param blockNumber The current block number + error CannotModifyTerminatedRailBeyondEndEpoch(uint256 railId, uint256 maxSettlementEpoch, uint256 blockNumber); + + /// @notice Cannot increase the payment rate or change the rate on a terminated rail + /// @param railId The ID of the rail + error RateChangeNotAllowedOnTerminatedRail(uint256 railId); + + /// @notice Account lockup must be fully settled to change the payment rate on an active rail + /// @param railId The ID of the rail + /// @param from The address whose lockup is being checked + /// @param isSettled Whether the account lockup is fully settled + /// @param currentRate The current payment rate + /// @param attemptedRate The attempted new payment rate + error LockupNotSettledRateChangeNotAllowed( + uint256 railId, address from, bool isSettled, uint256 currentRate, uint256 attemptedRate + ); + + /// @notice Payer's lockup rate is less than the old payment rate when updating an active rail + /// @param railId The ID of the rail + /// @param from The address whose lockup is being checked + /// @param lockupRate The current lockup rate of the payer + /// @param oldRate The current payment rate for the rail + error LockupRateLessThanOldRate(uint256 railId, address from, uint256 lockupRate, uint256 oldRate); + + /// @notice The payer does not have enough funds for the one-time payment + /// @param token The token being used for payment + /// @param from The payer's address + /// @param required The amount required (oneTimePayment) + /// @param actual The actual funds available in the payer's account + error InsufficientFundsForOneTimePayment(IERC20 token, address from, uint256 required, uint256 actual); + + /// @notice Cannot settle a terminated rail without validation until after the max settlement epoch has passed + /// @param railId The ID of the rail being settled + /// @param currentBlock The current block number (actual) + /// @param requiredBlock The max settlement epoch block (expected, must be exceeded) + error CannotSettleTerminatedRailBeforeMaxEpoch( + uint256 railId, + uint256 requiredBlock, // expected (maxSettleEpoch + 1) + uint256 currentBlock // actual (block.number) + ); + + /// @notice Cannot settle a rail for epochs in the future. + /// @param railId The ID of the rail being settled + /// @param maxAllowedEpoch The latest epoch that can be settled (expected, must be >= actual) + /// @param attemptedEpoch The epoch up to which settlement was attempted (actual) + error CannotSettleFutureEpochs(uint256 railId, uint256 maxAllowedEpoch, uint256 attemptedEpoch); + + /// @notice No progress was made in settlement; settledUpTo did not advance. + /// @param railId The ID of the rail + /// @param expectedSettledUpTo The expected value for settledUpTo (must be > startEpoch) + /// @param actualSettledUpTo The actual value after settlement attempt + error NoProgressInSettlement(uint256 railId, uint256 expectedSettledUpTo, uint256 actualSettledUpTo); + + /// @notice The payer's current lockup is less than the fixed lockup amount during rail finalization. + /// @param railId The ID of the rail being finalized + /// @param token The token used for the rail + /// @param from The address whose lockup is being reduced + /// @param expectedLockup The expected minimum lockup amount (rail.lockupFixed) + /// @param actualLockup The actual current lockup in the payer's account (payer.lockupCurrent) + error LockupInconsistencyDuringRailFinalization( + uint256 railId, IERC20 token, address from, uint256 expectedLockup, uint256 actualLockup + ); + + /// @notice The next rate change in the queue is scheduled before the current processed epoch, indicating an invalid state. + /// @param nextRateChangeUntilEpoch The untilEpoch of the next rate change in the queue + /// @param processedEpoch The epoch that has been processed up to + error InvalidRateChangeQueueState(uint256 nextRateChangeUntilEpoch, uint256 processedEpoch); + + /// @notice The validator attempted to settle an epoch before the allowed segment start + /// @param railId The ID of the rail being settled + /// @param allowedStart The minimum epoch allowed (segment start) + /// @param attemptedStart The epoch at which settlement was attempted + error ValidatorSettledBeforeSegmentStart(uint256 railId, uint256 allowedStart, uint256 attemptedStart); + + /// @notice The validator attempted to settle an epoch beyond the allowed segment end + /// @param railId The ID of the rail being settled + /// @param allowedEnd The maximum epoch allowed (segment end) + /// @param attemptedEnd The epoch at which settlement was attempted + error ValidatorSettledBeyondSegmentEnd(uint256 railId, uint256 allowedEnd, uint256 attemptedEnd); + + /// @notice The validator returned a modified amount exceeding the maximum allowed for the confirmed epochs + /// @param railId The ID of the rail being settled + /// @param maxAllowed The maximum allowed settlement amount for the segment + /// @param attempted The attempted (modified) settlement amount + error ValidatorModifiedAmountExceedsMaximum(uint256 railId, uint256 maxAllowed, uint256 attempted); + + /// @notice The account does not have enough funds to cover the required settlement amount + /// @param token The token used for the settlement + /// @param from The address of the account being checked + /// @param available The actual funds available in the account + /// @param required The amount required for settlement + error InsufficientFundsForSettlement(IERC20 token, address from, uint256 available, uint256 required); + + /// @notice The payer does not have enough lockup to cover the required settlement amount + /// @param token The token used for the settlement + /// @param from The payer address being checked + /// @param available The actual lockup available in the account + /// @param required The required lockup amount for the settlement + error InsufficientLockupForSettlement(IERC20 token, address from, uint256 available, uint256 required); + + /// @notice Invariant violation: The payer's lockup exceeds their available funds after settlement + /// @dev Indicates a critical accounting bug or logic error in the settlement process. + /// @param token The token being checked + /// @param account The address whose lockup is being checked + /// @param lockupCurrent The current lockup amount + /// @param fundsCurrent The current funds available + error LockupExceedsFundsInvariant(IERC20 token, address account, uint256 lockupCurrent, uint256 fundsCurrent); + + /// @notice The rate change queue must be empty after full settlement, but it's not + /// @param nextUntilEpoch The untilEpoch value of the next queued rate change (tail of the queue) + error RateChangeQueueNotEmpty(uint256 nextUntilEpoch); + + /// @notice The attempted operation exceeds the operator's allowed rate usage + /// @param allowed The total rate allowance for the operator + /// @param attemptedUsage The rate usage attempted after increase + error OperatorRateAllowanceExceeded(uint256 allowed, uint256 attemptedUsage); + + /// @notice The attempted operation exceeds the operator's allowed lockup usage + /// @param allowed The total lockup allowance for the operator + /// @param attemptedUsage The lockup usage attempted after increase + error OperatorLockupAllowanceExceeded(uint256 allowed, uint256 attemptedUsage); + + /// @notice Attempted to withdraw more than the accumulated fees for the given token + /// @param token The token address + /// @param available The current accumulated fees + /// @param requested The amount attempted to withdraw + error WithdrawAmountExceedsAccumulatedFees(IERC20 token, uint256 available, uint256 requested); + + /// @notice Native token transfer failed during fee withdrawal + /// @param to The recipient address + /// @param amount The amount attempted to send + error FeeWithdrawalNativeTransferFailed(address to, uint256 amount); + + /// @notice Not enough native token sent for the burn operation + /// @param required The minimum required native token amount + /// @param sent The amount of native token sent with the transaction + error InsufficientNativeTokenForBurn(uint256 required, uint256 sent); + + /// @notice The 'to' address must equal the transaction sender (self-recipient enforcement) + /// @dev Used by flows like permit and transfer-with-authorization to ensure only self-deposits + /// @param expected The expected address (msg.sender) + /// @param actual The actual 'to' address provided + error SignerMustBeMsgSender(address expected, address actual); +} diff --git a/packages/pay/src/Payments.sol b/packages/pay/src/Payments.sol new file mode 100644 index 00000000..74eef296 --- /dev/null +++ b/packages/pay/src/Payments.sol @@ -0,0 +1,1832 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {BURN_ADDRESS} from "fvm-solidity/FVMActors.sol"; +import {FVMPay} from "fvm-solidity/FVMPay.sol"; +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {IERC20Permit} from "@openzeppelin/contracts/token/ERC20/extensions/IERC20Permit.sol"; +import {SafeERC20} from "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; +import {ReentrancyGuard} from "@openzeppelin/contracts/utils/ReentrancyGuard.sol"; +import {Strings} from "@openzeppelin/contracts/utils/Strings.sol"; +import {Dutch} from "./Dutch.sol"; +import {Errors} from "./Errors.sol"; +import {RateChangeQueue} from "./RateChangeQueue.sol"; +import {IERC3009} from "./interfaces/IERC3009.sol"; + +uint88 constant UINT88_MAX = 0xffffffffffffffffffffff; + +// FIL max supply cap is 2 billion +uint88 constant MAX_AUCTION_START_PRICE = UINT88_MAX; // 309,485,009.821345068724781055 FIL +uint88 constant FIRST_AUCTION_START_PRICE = 0.0021 ether; // 0.0021 FIL + +interface IValidator { + struct ValidationResult { + // The actual payment amount determined by the validator after validation of a rail during settlement + uint256 modifiedAmount; + // The epoch up to and including which settlement should occur. + uint256 settleUpto; + // A placeholder note for any additional information the validator wants to send to the caller of `settleRail` + string note; + } + + function validatePayment( + uint256 railId, + uint256 proposedAmount, + // the epoch up to and including which the rail has already been settled + uint256 fromEpoch, + // the epoch up to and including which validation is requested; payment will be validated for (toEpoch - fromEpoch) epochs + uint256 toEpoch, + uint256 rate + ) external returns (ValidationResult memory result); + + function railTerminated(uint256 railId, address terminator, uint256 endEpoch) external; +} + +// @title Payments contract. +contract Payments is ReentrancyGuard { + using Dutch for uint256; + using SafeERC20 for IERC20; + using RateChangeQueue for RateChangeQueue.Queue; + + // Maximum commission rate in basis points (100% = 10000 BPS) + uint256 public constant COMMISSION_MAX_BPS = 10000; + + uint256 public constant NETWORK_FEE_NUMERATOR = 1; // 0.5% + uint256 public constant NETWORK_FEE_DENOMINATOR = 200; + + IERC20 private constant NATIVE_TOKEN = IERC20(address(0)); + + // Events + event AccountLockupSettled( + IERC20 indexed token, + address indexed owner, + uint256 lockupCurrent, + uint256 lockupRate, + uint256 lockupLastSettledAt + ); + event OperatorApprovalUpdated( + IERC20 indexed token, + address indexed client, + address indexed operator, + bool approved, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ); + + event RailCreated( + uint256 indexed railId, + address indexed payer, + address indexed payee, + IERC20 token, + address operator, + address validator, + address serviceFeeRecipient, + uint256 commissionRateBps + ); + event RailLockupModified( + uint256 indexed railId, + uint256 oldLockupPeriod, + uint256 newLockupPeriod, + uint256 oldLockupFixed, + uint256 newLockupFixed + ); + event RailOneTimePaymentProcessed( + uint256 indexed railId, uint256 netPayeeAmount, uint256 operatorCommission, uint256 networkFee + ); + event RailRateModified(uint256 indexed railId, uint256 oldRate, uint256 newRate); + event RailSettled( + uint256 indexed railId, + uint256 totalSettledAmount, + uint256 totalNetPayeeAmount, + uint256 operatorCommission, + uint256 networkFee, + uint256 settledUpTo + ); + event RailTerminated(uint256 indexed railId, address indexed by, uint256 endEpoch); + event RailFinalized(uint256 indexed railId); + + event DepositRecorded(IERC20 indexed token, address indexed from, address indexed to, uint256 amount); + event WithdrawRecorded(IERC20 indexed token, address indexed from, address indexed to, uint256 amount); + + struct Account { + uint256 funds; + uint256 lockupCurrent; + uint256 lockupRate; + // epoch up to and including which lockup has been settled for the account + uint256 lockupLastSettledAt; + } + + struct Rail { + IERC20 token; + address from; + address to; + address operator; + address validator; + uint256 paymentRate; + uint256 lockupPeriod; + uint256 lockupFixed; + // epoch up to and including which this rail has been settled + uint256 settledUpTo; + RateChangeQueue.Queue rateChangeQueue; + uint256 endEpoch; // Final epoch up to which the rail can be settled (0 if not terminated) + // Operator commission rate in basis points (e.g., 100 BPS = 1%) + uint256 commissionRateBps; + address serviceFeeRecipient; // address to collect operator comission + } + + struct OperatorApproval { + bool isApproved; + uint256 rateAllowance; + uint256 lockupAllowance; + uint256 rateUsage; // Track actual usage for rate + uint256 lockupUsage; // Track actual usage for lockup + uint256 maxLockupPeriod; // Maximum lockup period the operator can set for rails created on behalf of the client + } + + // Counter for generating unique rail IDs + uint256 private _nextRailId = 1; + + // Internal balances + // The self-balance collects network fees + mapping(IERC20 token => mapping(address owner => Account)) public accounts; + + // railId => Rail + mapping(uint256 railId => Rail) internal rails; + + // Struct to hold rail data without the RateChangeQueue (for external returns) + struct RailView { + IERC20 token; + address from; + address to; + address operator; + address validator; + uint256 paymentRate; + uint256 lockupPeriod; + uint256 lockupFixed; + uint256 settledUpTo; + uint256 endEpoch; + // Operator commission rate in basis points (e.g., 100 BPS = 1%) + uint256 commissionRateBps; + address serviceFeeRecipient; // address to collect operator commission + } + + // token => client => operator => Approval + mapping(IERC20 token => mapping(address client => mapping(address operator => OperatorApproval))) public + operatorApprovals; + + // Define a struct for rails by payee information + struct RailInfo { + uint256 railId; // The rail ID + bool isTerminated; // True if rail is terminated + uint256 endEpoch; // End epoch for terminated rails (0 for active rails) + } + + // token => payee => array of railIds + mapping(IERC20 token => mapping(address payee => uint256[])) private payeeRails; + + // token => payer => array of railIds + mapping(IERC20 token => mapping(address payer => uint256[])) private payerRails; + + // pack into one storage slot + struct AuctionInfo { + uint88 startPrice; // highest possible price is MAX_AUCTION_START_PRICE + uint168 startTime; + } + + mapping(IERC20 token => AuctionInfo) public auctionInfo; + + struct SettlementState { + uint256 totalSettledAmount; + uint256 totalNetPayeeAmount; + uint256 totalOperatorCommission; + uint256 totalNetworkFee; + uint256 processedEpoch; + string note; + } + + constructor() { + _nextRailId = 1; + } + + modifier validateRailActive(uint256 railId) { + require(rails[railId].from != address(0), Errors.RailInactiveOrSettled(railId)); + _; + } + + modifier onlyRailClient(uint256 railId) { + require(rails[railId].from == msg.sender, Errors.OnlyRailClientAllowed(rails[railId].from, msg.sender)); + _; + } + + modifier onlyRailOperator(uint256 railId) { + require( + rails[railId].operator == msg.sender, Errors.OnlyRailOperatorAllowed(rails[railId].operator, msg.sender) + ); + _; + } + + modifier validateRailNotTerminated(uint256 railId) { + require(rails[railId].endEpoch == 0, Errors.RailAlreadyTerminated(railId)); + _; + } + + modifier validateRailTerminated(uint256 railId) { + require(isRailTerminated(rails[railId], railId), Errors.RailNotTerminated(railId)); + _; + } + + modifier validateNonZeroAddress(address addr, string memory varName) { + require(addr != address(0), Errors.ZeroAddressNotAllowed(varName)); + _; + } + + modifier validateSignerIsRecipient(address to) { + require(to == msg.sender, Errors.SignerMustBeMsgSender(msg.sender, to)); + _; + } + + modifier settleAccountLockupBeforeAndAfter(IERC20 token, address owner, bool settleFull) { + Account storage payer = accounts[token][owner]; + + // Before function execution + performSettlementCheck(token, owner, payer, settleFull, true); + + _; + + // After function execution + performSettlementCheck(token, owner, payer, settleFull, false); + } + + modifier settleAccountLockupBeforeAndAfterForRail(uint256 railId, bool settleFull, uint256 oneTimePayment) { + Rail storage rail = rails[railId]; + + require(rail.from != address(0), Errors.RailInactiveOrSettled(railId)); + + Account storage payer = accounts[rail.token][rail.from]; + + require( + rail.lockupFixed >= oneTimePayment, + Errors.OneTimePaymentExceedsLockup(railId, rail.lockupFixed, oneTimePayment) + ); + + // Before function execution + performSettlementCheck(rail.token, rail.from, payer, settleFull, true); + + // ---- EXECUTE FUNCTION + _; + // ---- FUNCTION EXECUTION COMPLETE + + // After function execution + performSettlementCheck(rail.token, rail.from, payer, settleFull, false); + } + + function performSettlementCheck(IERC20 token, address owner, Account storage payer, bool settleFull, bool isBefore) + internal + { + require( + payer.funds >= payer.lockupCurrent, + isBefore + ? "invariant failure: insufficient funds to cover lockup before function execution" + : "invariant failure: insufficient funds to cover lockup after function execution" + ); + + settleAccountLockup(token, owner, payer); + + // Verify full settlement if required + // TODO: give the user feedback on what they need to deposit in their account to complete the operation. + require( + !settleFull || isAccountLockupFullySettled(payer), + isBefore + ? "payers's full account lockup was not met as a precondition of the requested operation" + : "payers's full account lockup was not met as a postcondition of the requested operation" + ); + + require( + payer.funds >= payer.lockupCurrent, + isBefore + ? "invariant failure: insufficient funds to cover lockup before function execution" + : "invariant failure: insufficient funds to cover lockup after function execution" + ); + } + + /// @notice Gets the current state of the target rail or reverts if the rail isn't active. + /// @param railId the ID of the rail. + function getRail(uint256 railId) external view validateRailActive(railId) returns (RailView memory) { + Rail storage rail = rails[railId]; + return RailView({ + token: rail.token, + from: rail.from, + to: rail.to, + operator: rail.operator, + validator: rail.validator, + paymentRate: rail.paymentRate, + lockupPeriod: rail.lockupPeriod, + lockupFixed: rail.lockupFixed, + settledUpTo: rail.settledUpTo, + endEpoch: rail.endEpoch, + commissionRateBps: rail.commissionRateBps, + serviceFeeRecipient: rail.serviceFeeRecipient + }); + } + + /// @notice Updates the approval status and allowances for an operator on behalf of the message sender. + /// @param token The ERC20 token address for which the approval is being set. + /// @param operator The address of the operator whose approval is being modified. + /// @param approved Whether the operator is approved (true) or not (false) to create new rails. + /// @param rateAllowance The maximum payment rate the operator can set across all rails created by the operator on behalf of the message sender. If this is less than the current payment rate, the operator will only be able to reduce rates until they fall below the target. + /// @param lockupAllowance The maximum amount of funds the operator can lock up on behalf of the message sender towards future payments. If this exceeds the current total amount of funds locked towards future payments, the operator will only be able to reduce future lockup. + /// @param maxLockupPeriod The maximum number of epochs (blocks) the operator can lock funds for. If this is less than the current lockup period for a rail, the operator will only be able to reduce the lockup period. + function setOperatorApproval( + IERC20 token, + address operator, + bool approved, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) external nonReentrant validateNonZeroAddress(operator, "operator") { + _setOperatorApproval(token, operator, approved, rateAllowance, lockupAllowance, maxLockupPeriod); + } + + function _setOperatorApproval( + IERC20 token, + address operator, + bool approved, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) internal { + OperatorApproval storage approval = operatorApprovals[token][msg.sender][operator]; + + // Update approval status and allowances + approval.isApproved = approved; + approval.rateAllowance = rateAllowance; + approval.lockupAllowance = lockupAllowance; + approval.maxLockupPeriod = maxLockupPeriod; + + emit OperatorApprovalUpdated( + token, msg.sender, operator, approved, rateAllowance, lockupAllowance, maxLockupPeriod + ); + } + + /// @notice Increases the rate and lockup allowances for an existing operator approval. + /// @param token The ERC20 token address for which the approval is being increased. + /// @param operator The address of the operator whose allowances are being increased. + /// @param rateAllowanceIncrease The amount to increase the rate allowance by. + /// @param lockupAllowanceIncrease The amount to increase the lockup allowance by. + /// @custom:constraint Operator must already be approved. + function increaseOperatorApproval( + IERC20 token, + address operator, + uint256 rateAllowanceIncrease, + uint256 lockupAllowanceIncrease + ) external nonReentrant validateNonZeroAddress(operator, "operator") { + _increaseOperatorApproval(token, operator, rateAllowanceIncrease, lockupAllowanceIncrease); + } + + function _increaseOperatorApproval( + IERC20 token, + address operator, + uint256 rateAllowanceIncrease, + uint256 lockupAllowanceIncrease + ) internal { + OperatorApproval storage approval = operatorApprovals[token][msg.sender][operator]; + + // Operator must already be approved + require(approval.isApproved, Errors.OperatorNotApproved(msg.sender, operator)); + + // Directly update allowances + approval.rateAllowance += rateAllowanceIncrease; + approval.lockupAllowance += lockupAllowanceIncrease; + + emit OperatorApprovalUpdated( + token, + msg.sender, + operator, + approval.isApproved, + approval.rateAllowance, + approval.lockupAllowance, + approval.maxLockupPeriod + ); + } + + /// @notice Terminates a payment rail, preventing further payments after the rail's lockup period. After calling this method, the lockup period cannot be changed, and the rail's rate and fixed lockup may only be reduced. + /// @param railId The ID of the rail to terminate. + /// @custom:constraint Caller must be a rail client or operator. + /// @custom:constraint Rail must be active and not already terminated. + /// @custom:constraint If called by the client, the payer's account must be fully funded. + /// @custom:constraint If called by the operator, the payer's funding status isn't checked. + function terminateRail(uint256 railId) + external + validateRailActive(railId) + nonReentrant + validateRailNotTerminated(railId) + settleAccountLockupBeforeAndAfterForRail(railId, false, 0) + { + Rail storage rail = rails[railId]; + Account storage payer = accounts[rail.token][rail.from]; + + // Only client with fully settled lockup or operator can terminate a rail + require( + (msg.sender == rail.from && isAccountLockupFullySettled(payer)) || msg.sender == rail.operator, + Errors.NotAuthorizedToTerminateRail(railId, rail.from, rail.operator, msg.sender) + ); + + rail.endEpoch = payer.lockupLastSettledAt + rail.lockupPeriod; + + emit RailTerminated(railId, msg.sender, rail.endEpoch); + + // Notify the validator if one exists + if (rail.validator != address(0)) { + IValidator(rail.validator).railTerminated(railId, msg.sender, rail.endEpoch); + } + + // Remove the rail rate from account lockup rate but don't set rail rate to zero yet. + // The rail rate will be used to settle the rail and so we can't zero it yet. + // However, we remove the rail rate from the client lockup rate because we don't want to + // lock funds for the rail beyond `rail.endEpoch` as we're exiting the rail + // after that epoch. + require( + payer.lockupRate >= rail.paymentRate, + Errors.LockupRateInconsistent(railId, rail.from, rail.paymentRate, payer.lockupRate) + ); + payer.lockupRate -= rail.paymentRate; + + // Reduce operator rate allowance + OperatorApproval storage operatorApproval = operatorApprovals[rail.token][rail.from][rail.operator]; + updateOperatorRateUsage(operatorApproval, rail.paymentRate, 0); + } + + /// @notice Deposits tokens from the message sender's account into `to`'s account. + /// @param token The ERC20 token address to deposit. + /// @param to The address whose account will be credited. + /// @param amount The amount of tokens to deposit. + /// @custom:constraint The message sender must have approved this contract to spend the requested amount via the ERC-20 token (`token`). + function deposit(IERC20 token, address to, uint256 amount) + external + payable + nonReentrant + validateNonZeroAddress(to, "to") + settleAccountLockupBeforeAndAfter(token, to, false) + { + // Transfer tokens from sender to contract + if (token == NATIVE_TOKEN) { + require(msg.value == amount, Errors.MustSendExactNativeAmount(amount, msg.value)); + } else { + require(msg.value == 0, Errors.NativeTokenNotAccepted(msg.value)); + amount = transferIn(token, msg.sender, amount); + } + + accounts[token][to].funds += amount; + + emit DepositRecorded(token, msg.sender, to, amount); + } + + /** + * @notice Deposits tokens using permit (EIP-2612) approval in a single transaction. + * @param token The ERC20 token address to deposit. + * @param to The address whose account will be credited (must be the permit signer). + * @param amount The amount of tokens to deposit. + * @param deadline Permit deadline (timestamp). + * @param v,r,s Permit signature. + */ + function depositWithPermit( + IERC20 token, + address to, + uint256 amount, + uint256 deadline, + uint8 v, + bytes32 r, + bytes32 s + ) external nonReentrant validateNonZeroAddress(to, "to") settleAccountLockupBeforeAndAfter(token, to, false) { + _depositWithPermit(token, to, amount, deadline, v, r, s); + } + + function _depositWithPermit( + IERC20 token, + address to, + uint256 amount, + uint256 deadline, + uint8 v, + bytes32 r, + bytes32 s + ) internal { + // Revert if token is address(0) as permit is not supported for native tokens + require(token != NATIVE_TOKEN, Errors.NativeTokenNotSupported()); + + // Use 'to' as the owner in permit call (the address that signed the permit) + IERC20Permit(address(token)).permit(to, address(this), amount, deadline, v, r, s); + + amount = transferIn(token, to, amount); + + accounts[token][to].funds += amount; + + emit DepositRecorded(token, to, to, amount); + } + + /** + * @notice Deposits tokens using permit (EIP-2612) approval in a single transaction, + * while also setting operator approval. + * @param token The ERC20 token address to deposit and for which the operator approval is being set. + * Note: The token must support EIP-2612 permit functionality. + * @param to The address whose account will be credited (must be the permit signer). + * @param amount The amount of tokens to deposit. + * @param deadline Permit deadline (timestamp). + * @param v,r,s Permit signature. + * @param operator The address of the operator whose approval is being modified. + * @param rateAllowance The maximum payment rate the operator can set across all rails created by the operator + * on behalf of the message sender. If this is less than the current payment rate, the operator will + * only be able to reduce rates until they fall below the target. + * @param lockupAllowance The maximum amount of funds the operator can lock up on behalf of the message sender + * towards future payments. If this exceeds the current total amount of funds locked towards future payments, + * the operator will only be able to reduce future lockup. + * @param maxLockupPeriod The maximum number of epochs (blocks) the operator can lock funds for. If this is less than + * the current lockup period for a rail, the operator will only be able to reduce the lockup period. + */ + function depositWithPermitAndApproveOperator( + IERC20 token, + address to, + uint256 amount, + uint256 deadline, + uint8 v, + bytes32 r, + bytes32 s, + address operator, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) + external + nonReentrant + validateNonZeroAddress(operator, "operator") + validateNonZeroAddress(to, "to") + validateSignerIsRecipient(to) + settleAccountLockupBeforeAndAfter(token, to, false) + { + _setOperatorApproval(token, operator, true, rateAllowance, lockupAllowance, maxLockupPeriod); + _depositWithPermit(token, to, amount, deadline, v, r, s); + } + + /** + * @notice Deposits tokens using permit (EIP-2612) approval in a single transaction, + * while also increasing operator approval allowances. + * @param token The ERC20 token address to deposit and for which the operator approval is being increased. + * Note: The token must support EIP-2612 permit functionality. + * @param to The address whose account will be credited (must be the permit signer). + * @param amount The amount of tokens to deposit. + * @param deadline Permit deadline (timestamp). + * @param v,r,s Permit signature. + * @param operator The address of the operator whose allowances are being increased. + * @param rateAllowanceIncrease The amount to increase the rate allowance by. + * @param lockupAllowanceIncrease The amount to increase the lockup allowance by. + * @custom:constraint Operator must already be approved. + */ + function depositWithPermitAndIncreaseOperatorApproval( + IERC20 token, + address to, + uint256 amount, + uint256 deadline, + uint8 v, + bytes32 r, + bytes32 s, + address operator, + uint256 rateAllowanceIncrease, + uint256 lockupAllowanceIncrease + ) + external + nonReentrant + validateNonZeroAddress(operator, "operator") + validateNonZeroAddress(to, "to") + validateSignerIsRecipient(to) + settleAccountLockupBeforeAndAfter(token, to, false) + { + _increaseOperatorApproval(token, operator, rateAllowanceIncrease, lockupAllowanceIncrease); + _depositWithPermit(token, to, amount, deadline, v, r, s); + } + + /** + * @notice Deposits tokens using an ERC-3009 authorization in a single transaction. + * @param token The ERC-3009-compliant token contract. + * @param to The address whose account within the contract will be credited. + * @param amount The amount of tokens to deposit. + * @param validAfter The timestamp after which the authorization is valid. + * @param validBefore The timestamp before which the authorization is valid. + * @param nonce A unique nonce for the authorization, used to prevent replay attacks. + * @param v,r,s The signature of the authorization. + */ + function depositWithAuthorization( + IERC3009 token, + address to, + uint256 amount, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce, + uint8 v, + bytes32 r, + bytes32 s + ) external nonReentrant validateNonZeroAddress(to, "to") settleAccountLockupBeforeAndAfter(token, to, false) { + _depositWithAuthorization(token, to, amount, validAfter, validBefore, nonce, v, r, s); + } + + /** + * @notice Deposits tokens using an ERC-3009 authorization in a single transaction. + * while also setting operator approval. + * @param token The ERC-3009-compliant token contract. + * @param to The address whose account within the contract will be credited. + * @param amount The amount of tokens to deposit. + * @param validAfter The timestamp after which the authorization is valid. + * @param validBefore The timestamp before which the authorization is valid. + * @param nonce A unique nonce for the authorization, used to prevent replay attacks. + * @param v,r,s The signature of the authorization. + * @param operator The address of the operator whose approval is being modified. + * @param rateAllowance The maximum payment rate the operator can set across all rails created by the operator + * on behalf of the message sender. If this is less than the current payment rate, the operator will + * only be able to reduce rates until they fall below the target. + * @param lockupAllowance The maximum amount of funds the operator can lock up on behalf of the message sender + * towards future payments. If this exceeds the current total amount of funds locked towards future payments, + * the operator will only be able to reduce future lockup. + * @param maxLockupPeriod The maximum number of epochs (blocks) the operator can lock funds for. If this is less than + * the current lockup period for a rail, the operator will only be able to reduce the lockup period. + */ + function depositWithAuthorizationAndApproveOperator( + IERC3009 token, + address to, + uint256 amount, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce, + uint8 v, + bytes32 r, + bytes32 s, + address operator, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) + external + nonReentrant + validateNonZeroAddress(operator, "operator") + validateNonZeroAddress(to, "to") + validateSignerIsRecipient(to) + settleAccountLockupBeforeAndAfter(token, to, false) + { + _setOperatorApproval(token, operator, true, rateAllowance, lockupAllowance, maxLockupPeriod); + _depositWithAuthorization(token, to, amount, validAfter, validBefore, nonce, v, r, s); + } + + /** + * @notice Deposits tokens using an ERC-3009 authorization in a single transaction. + * while also setting operator approval. + * @param token The ERC-3009-compliant token contract. + * @param to The address whose account within the contract will be credited. + * @param amount The amount of tokens to deposit. + * @param validAfter The timestamp after which the authorization is valid. + * @param validBefore The timestamp before which the authorization is valid. + * @param nonce A unique nonce for the authorization, used to prevent replay attacks. + * @param v,r,s The signature of the authorization. + * @param operator The address of the operator whose allowances are being increased. + * @param rateAllowanceIncrease The amount to increase the rate allowance by. + * @param lockupAllowanceIncrease The amount to increase the lockup allowance by. + * @custom:constraint Operator must already be approved. + */ + function depositWithAuthorizationAndIncreaseOperatorApproval( + IERC3009 token, + address to, + uint256 amount, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce, + uint8 v, + bytes32 r, + bytes32 s, + address operator, + uint256 rateAllowanceIncrease, + uint256 lockupAllowanceIncrease + ) + external + nonReentrant + validateNonZeroAddress(operator, "operator") + validateNonZeroAddress(to, "to") + validateSignerIsRecipient(to) + settleAccountLockupBeforeAndAfter(token, to, false) + { + _increaseOperatorApproval(token, operator, rateAllowanceIncrease, lockupAllowanceIncrease); + _depositWithAuthorization(token, to, amount, validAfter, validBefore, nonce, v, r, s); + } + + function _depositWithAuthorization( + IERC3009 token, + address to, + uint256 amount, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce, + uint8 v, + bytes32 r, + bytes32 s + ) internal { + // Revert if token is address(0) as authorization is not supported for native tokens + require(token != NATIVE_TOKEN, Errors.NativeTokenNotSupported()); + + // Use balance-before/balance-after accounting to correctly handle fee-on-transfer tokens + uint256 balanceBefore = token.balanceOf(address(this)); + + // Call ERC-3009 receiveWithAuthorization. + // This will transfer 'amount' from 'to' to this contract. + // The token contract itself verifies the signature. + token.receiveWithAuthorization(to, address(this), amount, validAfter, validBefore, nonce, v, r, s); + + uint256 balanceAfter = token.balanceOf(address(this)); + amount = balanceAfter - balanceBefore; + + // Credit the beneficiary's internal account + accounts[token][to].funds += amount; + + // Emit an event to record the deposit, marking it as made via an off-chain signature. + emit DepositRecorded(token, to, to, amount); + } + + /// @notice Withdraws tokens from the caller's account to the caller's account, up to the amount of currently available tokens (the tokens not currently locked in rails). + /// @param token The ERC20 token address to withdraw. + /// @param amount The amount of tokens to withdraw. + function withdraw(IERC20 token, uint256 amount) + external + nonReentrant + settleAccountLockupBeforeAndAfter(token, msg.sender, true) + { + return withdrawToInternal(token, msg.sender, amount); + } + + /// @notice Withdraws tokens (`token`) from the caller's account to `to`, up to the amount of currently available tokens (the tokens not currently locked in rails). + /// @param token The ERC20 token address to withdraw. + /// @param to The address to receive the withdrawn tokens. + /// @param amount The amount of tokens to withdraw. + function withdrawTo(IERC20 token, address to, uint256 amount) + external + nonReentrant + validateNonZeroAddress(to, "to") + settleAccountLockupBeforeAndAfter(token, msg.sender, true) + { + return withdrawToInternal(token, to, amount); + } + + function withdrawToInternal(IERC20 token, address to, uint256 amount) internal { + Account storage account = accounts[token][msg.sender]; + uint256 available = account.funds - account.lockupCurrent; + require(amount <= available, Errors.InsufficientUnlockedFunds(available, amount)); + if (token == NATIVE_TOKEN) { + require(FVMPay.pay(to, amount), Errors.NativeTransferFailed(to, amount)); + } else { + uint256 actual = transferOut(token, to, amount); + if (amount != actual) { + amount = actual; + require(amount <= available, Errors.InsufficientUnlockedFunds(available, amount)); + } + } + account.funds -= amount; + + emit WithdrawRecorded(token, msg.sender, to, amount); + } + + function transferOut(IERC20 token, address to, uint256 amount) internal returns (uint256 actual) { + // handle fee-on-transfer and hidden-denominator tokens + uint256 balanceBefore = token.balanceOf(address(this)); + token.safeTransfer(to, amount); + uint256 balanceAfter = token.balanceOf(address(this)); + actual = balanceBefore - balanceAfter; + } + + function transferIn(IERC20 token, address from, uint256 amount) internal returns (uint256 actual) { + // handle fee-on-transfer and hidden-denominator tokens + uint256 balanceBefore = token.balanceOf(address(this)); + token.safeTransferFrom(from, address(this), amount); + uint256 balanceAfter = token.balanceOf(address(this)); + actual = balanceAfter - balanceBefore; + } + + /// @notice Create a new rail from `from` to `to`, operated by the caller. + /// @param token The ERC20 token address for payments on this rail. + /// @param from The client address (payer) for this rail. + /// @param to The recipient address for payments on this rail. + /// @param validator Optional address of an validator contract (can be address(0) for no validation). + /// @param commissionRateBps Optional operator commission in basis points (0-10000). + /// @param serviceFeeRecipient Address to receive operator commission + /// @return The ID of the newly created rail. + /// @custom:constraint Caller must be approved as an operator by the client (from address). + function createRail( + IERC20 token, + address from, + address to, + address validator, + uint256 commissionRateBps, + address serviceFeeRecipient + ) external nonReentrant validateNonZeroAddress(from, "from") validateNonZeroAddress(to, "to") returns (uint256) { + address operator = msg.sender; + + // Check if operator is approved - approval is required for rail creation + OperatorApproval storage approval = operatorApprovals[token][from][operator]; + require(approval.isApproved, Errors.OperatorNotApproved(from, operator)); + + // Validate commission rate + require( + commissionRateBps <= COMMISSION_MAX_BPS, Errors.CommissionRateTooHigh(COMMISSION_MAX_BPS, commissionRateBps) + ); + + require(commissionRateBps == 0 || serviceFeeRecipient != address(0), Errors.MissingServiceFeeRecipient()); + + uint256 railId = _nextRailId++; + + Rail storage rail = rails[railId]; + rail.token = token; + rail.from = from; + rail.to = to; + rail.operator = operator; + rail.validator = validator; + rail.settledUpTo = block.number; + rail.endEpoch = 0; + rail.commissionRateBps = commissionRateBps; + rail.serviceFeeRecipient = serviceFeeRecipient; + + // Record this rail in the payee's and payer's lists + payeeRails[token][to].push(railId); + payerRails[token][from].push(railId); + + emit RailCreated(railId, from, to, token, operator, validator, serviceFeeRecipient, commissionRateBps); + + return railId; + } + + /// @notice Modifies the fixed lockup and lockup period of a rail. + /// - If the rail has already been terminated, the lockup period may not be altered and the fixed lockup may only be reduced. + /// - If the rail is active, the lockup may only be modified if the payer's account is fully funded and will remain fully funded after the operation. + /// @param railId The ID of the rail to modify. + /// @param period The new lockup period (in epochs/blocks). + /// @param lockupFixed The new fixed lockup amount. + /// @custom:constraint Caller must be the rail operator. + /// @custom:constraint Operator must have sufficient lockup allowance to cover any increases the lockup period or the fixed lockup. + function modifyRailLockup(uint256 railId, uint256 period, uint256 lockupFixed) + external + validateRailActive(railId) + onlyRailOperator(railId) + nonReentrant + settleAccountLockupBeforeAndAfterForRail(railId, false, 0) + { + Rail storage rail = rails[railId]; + bool isTerminated = isRailTerminated(rail, railId); + + uint256 oldLockupPeriod = rail.lockupPeriod; + uint256 oldLockupFixed = rail.lockupFixed; + + if (isTerminated) { + modifyTerminatedRailLockup(rail, period, lockupFixed); + } else { + modifyNonTerminatedRailLockup(rail, period, lockupFixed); + } + + emit RailLockupModified(railId, oldLockupPeriod, period, oldLockupFixed, lockupFixed); + } + + function modifyTerminatedRailLockup(Rail storage rail, uint256 period, uint256 lockupFixed) internal { + require( + period == rail.lockupPeriod && lockupFixed <= rail.lockupFixed, + Errors.InvalidTerminatedRailModification(rail.lockupPeriod, rail.lockupFixed, period, lockupFixed) + ); + + Account storage payer = accounts[rail.token][rail.from]; + + // Calculate the fixed lockup reduction - this is the only change allowed for terminated rails + uint256 lockupReduction = rail.lockupFixed - lockupFixed; + + // Update payer's lockup - subtract the exact reduction amount + require( + payer.lockupCurrent >= lockupReduction, + Errors.InsufficientCurrentLockup(rail.token, rail.from, payer.lockupCurrent, lockupReduction) + ); + payer.lockupCurrent -= lockupReduction; + + // Reduce operator rate allowance + OperatorApproval storage operatorApproval = operatorApprovals[rail.token][rail.from][rail.operator]; + updateOperatorLockupUsage(operatorApproval, rail.lockupFixed, lockupFixed); + + rail.lockupFixed = lockupFixed; + } + + function modifyNonTerminatedRailLockup(Rail storage rail, uint256 period, uint256 lockupFixed) internal { + Account storage payer = accounts[rail.token][rail.from]; + + // Don't allow changing the lockup period or increasing the fixed lockup unless the payer's + // account is fully settled. + if (!isAccountLockupFullySettled(payer)) { + require( + period == rail.lockupPeriod, + Errors.LockupPeriodChangeNotAllowedDueToInsufficientFunds( + rail.token, rail.from, rail.lockupPeriod, period + ) + ); + + require( + lockupFixed <= rail.lockupFixed, + Errors.LockupFixedIncreaseNotAllowedDueToInsufficientFunds( + rail.token, rail.from, rail.lockupFixed, lockupFixed + ) + ); + } + + // Get operator approval + OperatorApproval storage operatorApproval = operatorApprovals[rail.token][rail.from][rail.operator]; + + // Check if period exceeds the max lockup period allowed for this operator + // Only enforce this constraint when increasing the period, not when decreasing + if (period > rail.lockupPeriod) { + require( + period <= operatorApproval.maxLockupPeriod, + Errors.LockupPeriodExceedsOperatorMaximum( + rail.token, rail.operator, operatorApproval.maxLockupPeriod, period + ) + ); + } + + // Calculate current (old) lockup. + uint256 oldLockup = rail.lockupFixed + (rail.paymentRate * rail.lockupPeriod); + + // Calculate new lockup amount with new parameters + uint256 newLockup = lockupFixed + (rail.paymentRate * period); + + require( + payer.lockupCurrent >= oldLockup, + Errors.CurrentLockupLessThanOldLockup(rail.token, rail.from, oldLockup, payer.lockupCurrent) + ); + + // We blindly update the payer's lockup. If they don't have enough funds to cover the new + // amount, we'll revert in the post-condition. + payer.lockupCurrent = payer.lockupCurrent - oldLockup + newLockup; + + updateOperatorLockupUsage(operatorApproval, oldLockup, newLockup); + + // Update rail lockup parameters + rail.lockupPeriod = period; + rail.lockupFixed = lockupFixed; + } + + /// @notice Modifies the payment rate and optionally makes a one-time payment. + /// - If the rail has already been terminated, one-time payments can be made and the rate may always be decreased (but never increased) regardless of the status of the payer's account. + /// - If the payer's account isn't fully funded and the rail is active (not terminated), the rail's payment rate may not be changed at all (increased or decreased). + /// - Regardless of the payer's account status, one-time payments will always go through provided that the rail has sufficient fixed lockup to cover the payment. + /// @param railId The ID of the rail to modify. + /// @param newRate The new payment rate (per epoch). This new rate applies starting the next epoch after the current one. + /// @param oneTimePayment Optional one-time payment amount to transfer immediately, taken out of the rail's fixed lockup. + /// @custom:constraint Caller must be the rail operator. + /// @custom:constraint Operator must have sufficient rate and lockup allowances for any increases. + function modifyRailPayment(uint256 railId, uint256 newRate, uint256 oneTimePayment) + external + nonReentrant + validateRailActive(railId) + onlyRailOperator(railId) + settleAccountLockupBeforeAndAfterForRail(railId, false, oneTimePayment) + { + Rail storage rail = rails[railId]; + Account storage payer = accounts[rail.token][rail.from]; + Account storage payee = accounts[rail.token][rail.to]; + + uint256 oldRate = rail.paymentRate; + bool isTerminated = isRailTerminated(rail, railId); + + // Validate rate changes based on rail state and account lockup + if (isTerminated) { + uint256 maxSettlementEpoch = maxSettlementEpochForTerminatedRail(rail, railId); + require( + block.number < maxSettlementEpoch, + Errors.CannotModifyTerminatedRailBeyondEndEpoch(railId, maxSettlementEpoch, block.number) + ); + + require(newRate <= oldRate, Errors.RateChangeNotAllowedOnTerminatedRail(railId)); + } else { + bool isSettled = isAccountLockupFullySettled(payer); + require( + isSettled || newRate == oldRate, + Errors.LockupNotSettledRateChangeNotAllowed(railId, rail.from, isSettled, oldRate, newRate) + ); + } + + // enqueuing rate change + enqueueRateChange(rail, oldRate, newRate); + + // Calculate the effective lockup period + uint256 effectiveLockupPeriod; + if (isTerminated) { + effectiveLockupPeriod = remainingEpochsForTerminatedRail(rail, railId); + } else { + effectiveLockupPeriod = rail.lockupPeriod; + } + + // Verify one-time payment doesn't exceed fixed lockup + require( + rail.lockupFixed >= oneTimePayment, + Errors.OneTimePaymentExceedsLockup(railId, rail.lockupFixed, oneTimePayment) + ); + + // Update the rail fixed lockup and payment rate + rail.lockupFixed = rail.lockupFixed - oneTimePayment; + rail.paymentRate = newRate; + + OperatorApproval storage operatorApproval = operatorApprovals[rail.token][rail.from][rail.operator]; + + // Update payer's lockup rate - only if the rail is not terminated + // for terminated rails, the payer's lockup rate is already updated during rail termination + if (!isTerminated) { + require( + payer.lockupRate >= oldRate, + Errors.LockupRateLessThanOldRate(railId, rail.from, oldRate, payer.lockupRate) + ); + payer.lockupRate = payer.lockupRate - oldRate + newRate; + updateOperatorRateUsage(operatorApproval, oldRate, newRate); + } + + // Update payer's current lockup with effective lockup period calculation + // Remove old rate lockup for the effective period, add new rate lockup for the same period + payer.lockupCurrent = + payer.lockupCurrent - (oldRate * effectiveLockupPeriod) + (newRate * effectiveLockupPeriod) - oneTimePayment; + + updateOperatorLockupUsage(operatorApproval, oldRate * effectiveLockupPeriod, newRate * effectiveLockupPeriod); + + // Update operator allowance for one-time payment + updateOperatorAllowanceForOneTimePayment(operatorApproval, oneTimePayment); + + emit RailRateModified(railId, oldRate, newRate); + + // --- Process the One-Time Payment --- + processOneTimePayment(railId, payer, payee, rail, oneTimePayment); + } + + function enqueueRateChange(Rail storage rail, uint256 oldRate, uint256 newRate) internal { + // If rate hasn't changed or rail is already settled up to current block, nothing to do + if (newRate == oldRate || rail.settledUpTo == block.number) { + return; + } + + // Skip putting a 0-rate entry on an empty queue + if (oldRate == 0 && rail.rateChangeQueue.isEmpty()) { + rail.settledUpTo = block.number; + return; + } + + // Only queue the previous rate once per epoch + if (rail.rateChangeQueue.isEmpty() || rail.rateChangeQueue.peekTail().untilEpoch != block.number) { + // For validated rails, we need to enqueue the old rate. + // This ensures that the old rate is applied up to and including the current block. + // The new rate will be applicable starting from the next block. + rail.rateChangeQueue.enqueue(oldRate, block.number); + } + } + + function calculateAndPayFees(uint256 amount, IERC20 token, address serviceFeeRecipient, uint256 commissionRateBps) + internal + returns (uint256 netPayeeAmount, uint256 operatorCommission, uint256 fee) + { + // ceil() + fee = (amount * NETWORK_FEE_NUMERATOR + (NETWORK_FEE_DENOMINATOR - 1)) / NETWORK_FEE_DENOMINATOR; + if (token == NATIVE_TOKEN) { + require(FVMPay.burn(fee), Errors.NativeTransferFailed(BURN_ADDRESS, fee)); + } else { + accounts[token][address(this)].funds += fee; + // start fee auction if necessary + AuctionInfo storage auction = auctionInfo[token]; + if (auction.startPrice == 0) { + auction.startPrice = FIRST_AUCTION_START_PRICE; + auction.startTime = uint168(block.timestamp); + } + } + amount -= fee; + + // Calculate operator commission (if any) based on remaining amount + operatorCommission = 0; + if (commissionRateBps > 0) { + operatorCommission = (amount * commissionRateBps) / COMMISSION_MAX_BPS; + } + + // Calculate net amount for payee + netPayeeAmount = amount - operatorCommission; + + // Credit operator (if commission exists) + if (operatorCommission > 0) { + Account storage serviceFeeRecipientAccount = accounts[token][serviceFeeRecipient]; + serviceFeeRecipientAccount.funds += operatorCommission; + } + } + + function processOneTimePayment( + uint256 railId, + Account storage payer, + Account storage payee, + Rail storage rail, + uint256 oneTimePayment + ) internal { + if (oneTimePayment > 0) { + require( + payer.funds >= oneTimePayment, + Errors.InsufficientFundsForOneTimePayment(rail.token, rail.from, oneTimePayment, payer.funds) + ); + + // Transfer funds from payer (full amount) + payer.funds -= oneTimePayment; + + // Calculate fees, pay operator commission and track platform fees + (uint256 netPayeeAmount, uint256 operatorCommission, uint256 networkFee) = + calculateAndPayFees(oneTimePayment, rail.token, rail.serviceFeeRecipient, rail.commissionRateBps); + + // Credit payee (net amount after fees) + payee.funds += netPayeeAmount; + + emit RailOneTimePaymentProcessed(railId, netPayeeAmount, operatorCommission, networkFee); + } + } + + /// @notice Settles payments for a terminated rail without validation. This may only be called by the payee and after the terminated rail's max settlement epoch has passed. It's an escape-hatch to unblock payments in an otherwise stuck rail (e.g., due to a buggy validator contract) and it always pays in full. + /// @param railId The ID of the rail to settle. + /// @return totalSettledAmount The total amount settled and transferred. + /// @return totalNetPayeeAmount The net amount credited to the payee after fees. + /// @return totalOperatorCommission The commission credited to the operator. + /// @return totalNetworkFee The fee accrued for burning FIL. + /// @return finalSettledEpoch The epoch up to which settlement was actually completed. + /// @return note Additional information about the settlement. + function settleTerminatedRailWithoutValidation(uint256 railId) + external + nonReentrant + validateRailActive(railId) + validateRailTerminated(railId) + onlyRailClient(railId) + settleAccountLockupBeforeAndAfterForRail(railId, false, 0) + returns ( + uint256 totalSettledAmount, + uint256 totalNetPayeeAmount, + uint256 totalOperatorCommission, + uint256 totalNetworkFee, + uint256 finalSettledEpoch, + string memory note + ) + { + // Verify the current epoch is greater than the max settlement epoch + uint256 maxSettleEpoch = maxSettlementEpochForTerminatedRail(rails[railId], railId); + require( + block.number > maxSettleEpoch, + Errors.CannotSettleTerminatedRailBeforeMaxEpoch(railId, maxSettleEpoch + 1, block.number) + ); + + return settleRailInternal(railId, maxSettleEpoch, true); + } + + /// @notice Settles payments for a rail up to the specified epoch. Settlement may fail to reach the target epoch if either the client lacks the funds to pay up to the current epoch or the validator refuses to settle the entire requested range. + /// @param railId The ID of the rail to settle. + /// @param untilEpoch The epoch up to which to settle (must not exceed current block number). + /// @return totalSettledAmount The total amount settled and transferred. + /// @return totalNetPayeeAmount The net amount credited to the payee after fees. + /// @return totalOperatorCommission The commission credited to the operator. + /// @return totalNetworkFee The fee accrued to burn FIL. + /// @return finalSettledEpoch The epoch up to which settlement was actually completed. + /// @return note Additional information about the settlement (especially from validation). + function settleRail(uint256 railId, uint256 untilEpoch) + public + nonReentrant + validateRailActive(railId) + settleAccountLockupBeforeAndAfterForRail(railId, false, 0) + returns ( + uint256 totalSettledAmount, + uint256 totalNetPayeeAmount, + uint256 totalOperatorCommission, + uint256 totalNetworkFee, + uint256 finalSettledEpoch, + string memory note + ) + { + return settleRailInternal(railId, untilEpoch, false); + } + + function settleRailInternal(uint256 railId, uint256 untilEpoch, bool skipValidation) + internal + returns ( + uint256 totalSettledAmount, + uint256 totalNetPayeeAmount, + uint256 totalOperatorCommission, + uint256 totalNetworkFee, + uint256 finalSettledEpoch, + string memory note + ) + { + require(untilEpoch <= block.number, Errors.CannotSettleFutureEpochs(railId, untilEpoch, block.number)); + + Rail storage rail = rails[railId]; + Account storage payer = accounts[rail.token][rail.from]; + + // Handle terminated and fully settled rails that are still not finalised + if (isRailTerminated(rail, railId) && rail.settledUpTo >= rail.endEpoch) { + finalizeTerminatedRail(railId, rail, payer); + return (0, 0, 0, 0, rail.settledUpTo, "rail fully settled and finalized"); + } + + // Calculate the maximum settlement epoch based on account lockup + uint256 maxSettlementEpoch; + if (!isRailTerminated(rail, railId)) { + maxSettlementEpoch = min(untilEpoch, payer.lockupLastSettledAt); + } else { + maxSettlementEpoch = min(untilEpoch, rail.endEpoch); + } + + uint256 startEpoch = rail.settledUpTo; + // Nothing to settle (already settled or zero-duration) + if (startEpoch >= maxSettlementEpoch) { + return ( + 0, + 0, + 0, + 0, + startEpoch, + string.concat("already settled up to epoch ", Strings.toString(maxSettlementEpoch)) + ); + } + + // Process settlement depending on whether rate changes exist + if (rail.rateChangeQueue.isEmpty()) { + (totalSettledAmount, totalNetPayeeAmount, totalOperatorCommission, totalNetworkFee, note) = + _settleSegment(railId, startEpoch, maxSettlementEpoch, rail.paymentRate, skipValidation); + + require( + rail.settledUpTo > startEpoch, Errors.NoProgressInSettlement(railId, startEpoch + 1, rail.settledUpTo) + ); + } else { + (totalSettledAmount, totalNetPayeeAmount, totalOperatorCommission, totalNetworkFee, note) = + _settleWithRateChanges(railId, rail.paymentRate, startEpoch, maxSettlementEpoch, skipValidation); + } + finalSettledEpoch = rail.settledUpTo; + note = checkAndFinalizeTerminatedRail(railId, rail, payer, note); + + emit RailSettled( + railId, totalSettledAmount, totalNetPayeeAmount, totalOperatorCommission, totalNetworkFee, finalSettledEpoch + ); + + return + (totalSettledAmount, totalNetPayeeAmount, totalOperatorCommission, totalNetworkFee, finalSettledEpoch, note); + } + + function checkAndFinalizeTerminatedRail( + uint256 railId, + Rail storage rail, + Account storage payer, + string memory regularNote + ) internal returns (string memory) { + // Check if rail is a terminated rail that's now fully settled + if (isRailTerminated(rail, railId) && rail.settledUpTo >= maxSettlementEpochForTerminatedRail(rail, railId)) { + finalizeTerminatedRail(railId, rail, payer); + return string.concat(regularNote, "terminated rail fully settled and finalized."); + } + + return regularNote; + } + + function finalizeTerminatedRail(uint256 railId, Rail storage rail, Account storage payer) internal { + // Reduce the lockup by the fixed amount + require( + payer.lockupCurrent >= rail.lockupFixed, + Errors.LockupInconsistencyDuringRailFinalization( + railId, rail.token, rail.from, rail.lockupFixed, payer.lockupCurrent + ) + ); + payer.lockupCurrent -= rail.lockupFixed; + + // Get operator approval for finalization update + OperatorApproval storage operatorApproval = operatorApprovals[rail.token][rail.from][rail.operator]; + // Calculate current (old) lockup. + uint256 oldLockup = rail.lockupFixed + (rail.paymentRate * rail.lockupPeriod); + + updateOperatorLockupUsage(operatorApproval, oldLockup, 0); + + // Zero out the rail to mark it as inactive + _zeroOutRail(rail); + + emit RailFinalized(railId); + } + + function _settleWithRateChanges( + uint256 railId, + uint256 currentRate, + uint256 startEpoch, + uint256 targetEpoch, + bool skipValidation + ) + internal + returns ( + uint256 totalSettledAmount, + uint256 totalNetPayeeAmount, + uint256 totalOperatorCommission, + uint256 totalNetworkFee, + string memory note + ) + { + Rail storage rail = rails[railId]; + RateChangeQueue.Queue storage rateQueue = rail.rateChangeQueue; + + SettlementState memory state = SettlementState({ + totalSettledAmount: 0, + totalNetPayeeAmount: 0, + totalOperatorCommission: 0, + totalNetworkFee: 0, + processedEpoch: startEpoch, + note: "" + }); + + // Process each segment until we reach the target epoch or hit an early exit condition + while (state.processedEpoch < targetEpoch) { + (uint256 segmentEndBoundary, uint256 segmentRate) = + _getNextSegmentBoundary(rateQueue, currentRate, state.processedEpoch, targetEpoch); + + // if current segment rate is zero, advance settlement to end of this segment and continue + if (segmentRate == 0) { + rail.settledUpTo = segmentEndBoundary; + state.processedEpoch = segmentEndBoundary; + + // Remove the processed rate change from the queue if it exists AND we have processed it entirely + if (!rateQueue.isEmpty() && segmentEndBoundary >= rateQueue.peek().untilEpoch) { + rateQueue.dequeue(); + } + + // Continue to next segment + continue; + } + + // Settle the current segment with potentially validated outcomes + ( + uint256 segmentSettledAmount, + uint256 segmentNetPayeeAmount, + uint256 segmentOperatorCommission, + uint256 segmentNetworkFee, + string memory validationNote + ) = _settleSegment(railId, state.processedEpoch, segmentEndBoundary, segmentRate, skipValidation); + + // If validator returned no progress, exit early without updating state + if (rail.settledUpTo <= state.processedEpoch) { + return ( + state.totalSettledAmount, + state.totalNetPayeeAmount, + state.totalOperatorCommission, + state.totalNetworkFee, + validationNote + ); + } + + // Add the settled amounts to our running totals + state.totalSettledAmount += segmentSettledAmount; + state.totalNetPayeeAmount += segmentNetPayeeAmount; + state.totalNetworkFee += segmentNetworkFee; + state.totalOperatorCommission += segmentOperatorCommission; + + // If validator partially settled the segment, exit early + if (rail.settledUpTo < segmentEndBoundary) { + return ( + state.totalSettledAmount, + state.totalNetPayeeAmount, + state.totalOperatorCommission, + state.totalNetworkFee, + validationNote + ); + } + + // Successfully settled full segment, update tracking values + state.processedEpoch = rail.settledUpTo; + state.note = validationNote; + + // Remove the processed rate change from the queue + if (!rateQueue.isEmpty() && segmentEndBoundary >= rateQueue.peek().untilEpoch) { + rateQueue.dequeue(); + } + } + + // We've successfully settled up to the target epoch + return ( + state.totalSettledAmount, + state.totalNetPayeeAmount, + state.totalOperatorCommission, + state.totalNetworkFee, + state.note + ); + } + + function _getNextSegmentBoundary( + RateChangeQueue.Queue storage rateQueue, + uint256 currentRate, + uint256 processedEpoch, + uint256 targetEpoch + ) internal view returns (uint256 segmentEndBoundary, uint256 segmentRate) { + // Default boundary is the target we want to reach + segmentEndBoundary = targetEpoch; + segmentRate = currentRate; + + // If we have rate changes in the queue, use the rate from the next change + if (!rateQueue.isEmpty()) { + RateChangeQueue.RateChange memory nextRateChange = rateQueue.peek(); + + // Validate rate change queue consistency + require( + nextRateChange.untilEpoch >= processedEpoch, + Errors.InvalidRateChangeQueueState(nextRateChange.untilEpoch, processedEpoch) + ); + + // Boundary is the minimum of our target or the next rate change epoch + segmentEndBoundary = min(targetEpoch, nextRateChange.untilEpoch); + segmentRate = nextRateChange.rate; + } + } + + function _settleSegment(uint256 railId, uint256 epochStart, uint256 epochEnd, uint256 rate, bool skipValidation) + internal + returns ( + uint256 settledAmount, + uint256 netPayeeAmount, + uint256 operatorCommission, + uint256 networkFee, + string memory note + ) + { + Rail storage rail = rails[railId]; + Account storage payer = accounts[rail.token][rail.from]; + Account storage payee = accounts[rail.token][rail.to]; + + if (rate == 0) { + rail.settledUpTo = epochEnd; + return (0, 0, 0, 0, "Zero rate payment rail"); + } + + // Calculate the default settlement values (without validation) + uint256 duration = epochEnd - epochStart; + settledAmount = rate * duration; + uint256 settledUntilEpoch = epochEnd; + note = ""; + + // If this rail has an validator and we're not skipping validation, let it decide on the final settlement amount + if (rail.validator != address(0) && !skipValidation) { + IValidator validator = IValidator(rail.validator); + IValidator.ValidationResult memory result = + validator.validatePayment(railId, settledAmount, epochStart, epochEnd, rate); + + // Ensure validator doesn't settle beyond our segment's end boundary + require( + result.settleUpto <= epochEnd, + Errors.ValidatorSettledBeyondSegmentEnd(railId, epochEnd, result.settleUpto) + ); + require( + result.settleUpto >= epochStart, + Errors.ValidatorSettledBeforeSegmentStart(railId, epochStart, result.settleUpto) + ); + + settledUntilEpoch = result.settleUpto; + settledAmount = result.modifiedAmount; + note = result.note; + + // Ensure validator doesn't allow more payment than the maximum possible + // for the epochs they're confirming + uint256 maxAllowedAmount = rate * (settledUntilEpoch - epochStart); + + require( + result.modifiedAmount <= maxAllowedAmount, + Errors.ValidatorModifiedAmountExceedsMaximum(railId, maxAllowedAmount, result.modifiedAmount) + ); + } + + // Verify payer has sufficient funds for the settlement + require( + payer.funds >= settledAmount, + Errors.InsufficientFundsForSettlement(rail.token, rail.from, settledAmount, payer.funds) + ); + + // Verify payer has sufficient lockup for the settlement + require( + payer.lockupCurrent >= settledAmount, + Errors.InsufficientLockupForSettlement(rail.token, rail.from, payer.lockupCurrent, settledAmount) + ); + uint256 actualSettledDuration = settledUntilEpoch - epochStart; + uint256 requiredLockup = rate * actualSettledDuration; + + // Transfer funds from payer (always pays full settled amount) + payer.funds -= settledAmount; + + // Calculate fees, pay operator commission and track platform fees + (netPayeeAmount, operatorCommission, networkFee) = + calculateAndPayFees(settledAmount, rail.token, rail.serviceFeeRecipient, rail.commissionRateBps); + + // Credit payee + payee.funds += netPayeeAmount; + + // Reduce lockup based on actual settled duration, not requested duration + // so that if the validator only settles for a partial duration, we only reduce the client lockup by the actual locked amount + // for that reduced duration. + payer.lockupCurrent -= requiredLockup; + + // Update the rail's settled epoch + rail.settledUpTo = settledUntilEpoch; + + // Invariant check: lockup should never exceed funds + require( + payer.lockupCurrent <= payer.funds, + Errors.LockupExceedsFundsInvariant(rail.token, rail.from, payer.lockupCurrent, payer.funds) + ); + } + + function isAccountLockupFullySettled(Account storage account) internal view returns (bool) { + return account.lockupLastSettledAt == block.number; + } + + // attempts to settle account lockup up to and including the current epoch + // returns the actual epoch upto and including which the lockup was settled + function settleAccountLockup(IERC20 token, address owner, Account storage account) internal returns (uint256) { + uint256 currentEpoch = block.number; + uint256 elapsedTime = currentEpoch - account.lockupLastSettledAt; + + if (elapsedTime <= 0) { + return account.lockupLastSettledAt; + } + + if (account.lockupRate == 0) { + account.lockupLastSettledAt = currentEpoch; + + // Emit event for zero rate case + emit AccountLockupSettled( + token, owner, account.lockupCurrent, account.lockupRate, account.lockupLastSettledAt + ); + return currentEpoch; + } + + uint256 additionalLockup = account.lockupRate * elapsedTime; + + // we have sufficient funds to cover account lockup upto and including the current epoch + if (account.funds >= account.lockupCurrent + additionalLockup) { + account.lockupCurrent += additionalLockup; + account.lockupLastSettledAt = currentEpoch; + } else { + require( + account.funds >= account.lockupCurrent, + Errors.LockupExceedsFundsInvariant(token, owner, account.lockupCurrent, account.funds) + ); + + // If insufficient, calculate the fractional epoch where funds became insufficient + uint256 availableFunds = account.funds - account.lockupCurrent; + + if (availableFunds == 0) { + return account.lockupLastSettledAt; + } + + // Round down to the nearest whole epoch + uint256 fractionalEpochs = availableFunds / account.lockupRate; + + // Apply lockup up to this point + account.lockupCurrent += account.lockupRate * fractionalEpochs; + account.lockupLastSettledAt = account.lockupLastSettledAt + fractionalEpochs; + } + + // event emission for all other cases where state changed + emit AccountLockupSettled(token, owner, account.lockupCurrent, account.lockupRate, account.lockupLastSettledAt); + return account.lockupLastSettledAt; + } + + function remainingEpochsForTerminatedRail(Rail storage rail, uint256 railId) + internal + view + validateRailTerminated(railId) + returns (uint256) + { + // If current block beyond end epoch, return 0 + if (block.number > rail.endEpoch) { + return 0; + } + + // Return the number of epochs (blocks) remaining until end epoch + return rail.endEpoch - block.number; + } + + function isRailTerminated(Rail storage rail, uint256 railId) internal view returns (bool) { + require(rail.from != address(0), Errors.RailInactiveOrSettled(railId)); + return rail.endEpoch > 0; + } + + // Get the final settlement epoch for a terminated rail + function maxSettlementEpochForTerminatedRail(Rail storage rail, uint256 railId) + internal + view + validateRailTerminated(railId) + returns (uint256) + { + return rail.endEpoch; + } + + function _zeroOutRail(Rail storage rail) internal { + // IMPORTANT: Do not use `require(cond, Errors.Custom(peekTail()))` here, + // because Solidity evaluates all arguments before checking the condition. + // That would call `peekTail()` even if the queue is empty, causing an unwanted revert. + // Use `if (!cond) revert Errors.Custom(peekTail());` to safely handle the error. + // Check if queue is empty before clearing + if (!rail.rateChangeQueue.isEmpty()) { + revert Errors.RateChangeQueueNotEmpty(rail.rateChangeQueue.peekTail().untilEpoch); + } + + rail.token = IERC20(address(0)); + rail.from = address(0); // This now marks the rail as inactive + rail.to = address(0); + rail.operator = address(0); + rail.validator = address(0); + rail.paymentRate = 0; + rail.lockupFixed = 0; + rail.lockupPeriod = 0; + rail.settledUpTo = 0; + rail.endEpoch = 0; + rail.commissionRateBps = 0; + } + + function updateOperatorRateUsage(OperatorApproval storage approval, uint256 oldRate, uint256 newRate) internal { + if (newRate > oldRate) { + uint256 rateIncrease = newRate - oldRate; + // If the increase exceeds the allowance, revert + require( + approval.rateUsage + rateIncrease <= approval.rateAllowance, + Errors.OperatorRateAllowanceExceeded(approval.rateAllowance, approval.rateUsage + rateIncrease) + ); + approval.rateUsage += rateIncrease; + } else if (oldRate > newRate) { + uint256 rateDecrease = oldRate - newRate; + approval.rateUsage = approval.rateUsage > rateDecrease ? approval.rateUsage - rateDecrease : 0; + } + } + + function updateOperatorLockupUsage(OperatorApproval storage approval, uint256 oldLockup, uint256 newLockup) + internal + { + if (newLockup > oldLockup) { + uint256 lockupIncrease = newLockup - oldLockup; + // If the increase exceeds the allowance, revert + require( + approval.lockupUsage + lockupIncrease <= approval.lockupAllowance, + Errors.OperatorLockupAllowanceExceeded(approval.lockupAllowance, approval.lockupUsage + lockupIncrease) + ); + approval.lockupUsage += lockupIncrease; + } else if (oldLockup > newLockup) { + uint256 lockupDecrease = oldLockup - newLockup; + approval.lockupUsage = approval.lockupUsage > lockupDecrease ? approval.lockupUsage - lockupDecrease : 0; + } + } + + function updateOperatorAllowanceForOneTimePayment(OperatorApproval storage approval, uint256 oneTimePayment) + internal + { + if (oneTimePayment == 0) return; + + // Reduce lockup usage + approval.lockupUsage = approval.lockupUsage - oneTimePayment; + + // Reduce lockup allowance + approval.lockupAllowance = + oneTimePayment > approval.lockupAllowance ? 0 : approval.lockupAllowance - oneTimePayment; + } + + /** + * @notice Gets all rails where the given address is the payer for a specific token. + * @param payer The address of the payer to get rails for. + * @param token The token address to filter rails by. + * @param offset The offset to start from. + * @param limit Maximum number of entries to return + * @return results Array of RailInfo structs containing rail IDs and termination status. + * @return nextOffset The next offset to use for pagination. + * @return total The total number of rails. + */ + function getRailsForPayerAndToken(address payer, IERC20 token, uint256 offset, uint256 limit) + external + view + returns (RailInfo[] memory results, uint256 nextOffset, uint256 total) + { + return _getRailsForAddressAndToken(payerRails[token][payer], offset, limit); + } + + /** + * @notice Gets all rails where the given address is the payee for a specific token. + * @param payee The address of the payee to get rails for. + * @param token The token address to filter rails by. + * @param offset The offset to start from. + * @param limit Maximum number of entries to return + * @return results Array of RailInfo structs containing rail IDs and termination status. + * @return nextOffset The next offset to use for pagination. + * @return total The total number of rails. + */ + function getRailsForPayeeAndToken(address payee, IERC20 token, uint256 offset, uint256 limit) + external + view + returns (RailInfo[] memory results, uint256 nextOffset, uint256 total) + { + return _getRailsForAddressAndToken(payeeRails[token][payee], offset, limit); + } + + /** + * @dev Internal function to get rails for either a payer or payee. + * @param allRailIds The array of rail IDs to filter rails by. + * @param offset The offset to start from. + * @param limit Maximum number of entries to return + * @return results Array of RailInfo structs containing rail IDs and termination status. + * @return nextOffset The next offset to use for pagination. + * @return total The total number of rails. + */ + function _getRailsForAddressAndToken(uint256[] storage allRailIds, uint256 offset, uint256 limit) + internal + view + returns (RailInfo[] memory results, uint256 nextOffset, uint256 total) + { + uint256 railsLength = allRailIds.length; + if (limit == 0) limit = railsLength; + if (offset >= railsLength) return (new RailInfo[](0), railsLength, railsLength); + uint256 end = offset + limit > railsLength ? railsLength : offset + limit; + + results = new RailInfo[](end - offset); + uint256 resultCount = 0; + + for (uint256 i = offset; i < end; i++) { + uint256 railId = allRailIds[i]; + Rail storage rail = rails[railId]; + + // Skip non-existent rails + if (rail.from == address(0)) continue; + + // Add rail info to results + results[resultCount] = RailInfo({railId: railId, isTerminated: rail.endEpoch > 0, endEpoch: rail.endEpoch}); + resultCount++; + } + + // Truncate + assembly ("memory-safe") { + mstore(results, resultCount) + } + + return (results, end, railsLength); + } + + /// @notice Number of pending rate-change entries for a rail + function getRateChangeQueueSize(uint256 railId) external view returns (uint256) { + return rails[railId].rateChangeQueue.size(); + } + + /** + * @notice Gets information about an account - when it would go into debt, total balance, available balance, and lockup rate. + * @param token The token address to get account info for. + * @param owner The address of the account owner. + * @return fundedUntilEpoch The epoch at which the account would go into debt given current lockup rate and balance. + * @return currentFunds The current funds in the account. + * @return availableFunds The funds available after accounting for simulated lockup. + * @return currentLockupRate The current lockup rate per epoch. + */ + function getAccountInfoIfSettled(IERC20 token, address owner) + external + view + returns (uint256 fundedUntilEpoch, uint256 currentFunds, uint256 availableFunds, uint256 currentLockupRate) + { + Account storage account = accounts[token][owner]; + + currentFunds = account.funds; + currentLockupRate = account.lockupRate; + + uint256 currentEpoch = block.number; + + fundedUntilEpoch = account.lockupRate == 0 + ? type(uint256).max + : account.lockupLastSettledAt + (account.funds - account.lockupCurrent) / account.lockupRate; + uint256 simulatedSettledAt = fundedUntilEpoch >= currentEpoch ? currentEpoch : fundedUntilEpoch; + uint256 simulatedLockupCurrent = + account.lockupCurrent + account.lockupRate * (simulatedSettledAt - account.lockupLastSettledAt); + availableFunds = account.funds - simulatedLockupCurrent; + + return (fundedUntilEpoch, currentFunds, availableFunds, currentLockupRate); + } + + /** + * @notice Burn FIL to buy the network fees + * @param token Which kind of fees to buy + * @param recipient Receives the purchased fees + * @param requested Exact amount of fees transferred + */ + function burnForFees(IERC20 token, address recipient, uint256 requested) external payable nonReentrant { + Account storage fees = accounts[token][address(this)]; + uint256 available = fees.funds; + require(available >= requested, Errors.WithdrawAmountExceedsAccumulatedFees(token, available, requested)); + + AuctionInfo storage auction = auctionInfo[token]; + uint256 auctionPrice = uint256(auction.startPrice).decay(block.timestamp - auction.startTime); + require(msg.value >= auctionPrice, Errors.InsufficientNativeTokenForBurn(msg.value, auctionPrice)); + + auctionPrice *= Dutch.RESET_FACTOR; + if (auctionPrice > MAX_AUCTION_START_PRICE) { + auctionPrice = MAX_AUCTION_START_PRICE; + } + auction.startPrice = uint88(auctionPrice); + auction.startTime = uint168(block.timestamp); + + require(FVMPay.burn(msg.value), Errors.NativeTransferFailed(BURN_ADDRESS, msg.value)); + + uint256 actual = transferOut(token, recipient, requested); + fees.funds = available - actual; + } +} + +function min(uint256 a, uint256 b) pure returns (uint256) { + return a < b ? a : b; +} diff --git a/packages/pay/src/RateChangeQueue.sol b/packages/pay/src/RateChangeQueue.sol new file mode 100644 index 00000000..d8a3c8e3 --- /dev/null +++ b/packages/pay/src/RateChangeQueue.sol @@ -0,0 +1,57 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +library RateChangeQueue { + struct RateChange { + // The payment rate to apply + uint256 rate; + // The epoch up to and including which this rate will be used to settle a rail + uint256 untilEpoch; + } + + struct Queue { + uint256 head; + RateChange[] changes; + } + + function enqueue(Queue storage queue, uint256 rate, uint256 untilEpoch) internal { + queue.changes.push(RateChange(rate, untilEpoch)); + } + + function dequeue(Queue storage queue) internal returns (RateChange memory) { + RateChange[] storage c = queue.changes; + require(queue.head < c.length, "Queue is empty"); + RateChange memory change = c[queue.head]; + delete c[queue.head]; + + if (isEmpty(queue)) { + queue.head = 0; + // The array is already empty, waste no time zeroing it. + assembly { + sstore(c.slot, 0) + } + } else { + queue.head++; + } + + return change; + } + + function peek(Queue storage queue) internal view returns (RateChange memory) { + require(queue.head < queue.changes.length, "Queue is empty"); + return queue.changes[queue.head]; + } + + function peekTail(Queue storage queue) internal view returns (RateChange memory) { + require(queue.head < queue.changes.length, "Queue is empty"); + return queue.changes[queue.changes.length - 1]; + } + + function isEmpty(Queue storage queue) internal view returns (bool) { + return queue.head == queue.changes.length; + } + + function size(Queue storage queue) internal view returns (uint256) { + return queue.changes.length - queue.head; + } +} diff --git a/packages/pay/src/interfaces/IERC3009.sol b/packages/pay/src/interfaces/IERC3009.sol new file mode 100644 index 00000000..b37fab4c --- /dev/null +++ b/packages/pay/src/interfaces/IERC3009.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + +interface IERC3009 is IERC20 { + /** + * @notice Receive a transfer with a signed authorization from the payer + * @dev This has an additional check to ensure that the payee's address matches + * the caller of this function to prevent front-running attacks. + * @param from Payer's address (Authorizer) + * @param to Payee's address + * @param value Amount to be transferred + * @param validAfter The time after which this is valid (unix time) + * @param validBefore The time before which this is valid (unix time) + * @param nonce Unique nonce + * @param v v of the signature + * @param r r of the signature + * @param s s of the signature + */ + function receiveWithAuthorization( + address from, + address to, + uint256 value, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce, + uint8 v, + bytes32 r, + bytes32 s + ) external; + + function authorizationState(address user, bytes32 nonce) external view returns (bool used); +} diff --git a/packages/pay/test/AccountLockupSettlement.t.sol b/packages/pay/test/AccountLockupSettlement.t.sol new file mode 100644 index 00000000..6f624cc9 --- /dev/null +++ b/packages/pay/test/AccountLockupSettlement.t.sol @@ -0,0 +1,277 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; + +contract AccountLockupSettlementTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + Payments payments; + + // Define constants + uint256 internal constant DEPOSIT_AMOUNT = 100 ether; + uint256 internal constant MAX_LOCKUP_PERIOD = 100; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + // Setup operator approval for potential rails + helper.setupOperatorApproval( + USER1, + OPERATOR, + 10 ether, // rateAllowance + 100 ether, // lockupAllowance + MAX_LOCKUP_PERIOD // maxLockupPeriod + ); + } + + function testSettlementWithNoLockupRate() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // No rails created, so lockup rate should be 0 + + // Advance blocks to create a settlement gap without a rate + helper.advanceBlocks(10); + + // Trigger settlement with a new deposit + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Verify settlement occurred + helper.assertAccountState(USER1, DEPOSIT_AMOUNT * 2, 0, 0, block.number); + } + + function testSimpleLockupAccumulation() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Define a lockup rate + uint256 lockupRate = 2 ether; + uint256 lockupPeriod = 2; + + // Create rail with the desired rate + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, // payment rate + lockupPeriod, // lockup period + 0, // no fixed lockup + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator comission fee receiver + ); + assertEq(railId, 1); + + // Note: Settlement begins at the current block + // Advance blocks to create a settlement gap + uint256 elapsedBlocks = 5; + helper.advanceBlocks(elapsedBlocks); + + // Trigger settlement with a new deposit + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // The correct expected value is: + uint256 initialLockup = lockupRate * lockupPeriod; + uint256 accumulatedLockup = lockupRate * elapsedBlocks; + uint256 expectedLockup = initialLockup + accumulatedLockup; + + // Verify settlement occurred + helper.assertAccountState(USER1, DEPOSIT_AMOUNT * 2, expectedLockup, lockupRate, block.number); + } + + function testPartialSettlement() public { + uint256 lockupRate = 20 ether; + + helper.makeDeposit( + USER1, + USER1, + DEPOSIT_AMOUNT / 2 // 50 + ); + + // Create rail with the high rate (this will set the railway's settledUpTo to the current block) + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, // Very high payment rate (20 ether per block) + 1, // lockup period + 0, // no fixed lockup + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT + ); + + // When a rail is created, its settledUpTo is set to the current block + // Initial account lockup value should be lockupRate * lockupPeriod = 20 ether * 1 = 20 ether + // Initial funds are DEPOSIT_AMOUNT / 2 = 50 ether + + // Advance many blocks to exceed available funds + uint256 advancedBlocks = 10; + helper.advanceBlocks(advancedBlocks); + + // Deposit additional funds, which will trigger settlement + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT / 2); + + // Verify partial settlement + uint256 expectedSettlementBlock = 5; // lockupRate is 20, so we only have enough funds to pay for 5 epochs) + uint256 expectedLockup = DEPOSIT_AMOUNT; + + // Verify settlement state using helper function + helper.assertAccountState( + USER1, + DEPOSIT_AMOUNT, // expected funds + expectedLockup, // expected lockup + lockupRate, // expected lockup rate + expectedSettlementBlock // expected settlement block + ); + } + + function testSettlementAfterGap() public { + helper.makeDeposit( + USER1, + USER1, + DEPOSIT_AMOUNT * 2 // 200 ether + ); + + uint256 lockupRate = 1 ether; // 1 token per block + uint256 lockupPeriod = 30; + uint256 initialLockup = 10 ether; + + // Create rail + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, // 1 token per block + lockupPeriod, // Lockup period of 30 blocks + initialLockup, // initial fixed lockup of 10 ether + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator comission fee receiver + ); + + // Roll forward many blocks + helper.advanceBlocks(30); + + // Trigger settlement with a new deposit + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Verify settlement occurred + uint256 expectedLockup = initialLockup + (lockupRate * 30) + (lockupRate * lockupPeriod); // accumulated lockup // future lockup + + // Verify settlement occurred + helper.assertAccountState( + USER1, + DEPOSIT_AMOUNT * 3, // expected funds + expectedLockup, // expected lockup + lockupRate, // expected lockup rate + block.number // expected settlement block + ); + } + + function testSettlementInvariants() public { + // Setup: deposit a specific amount + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Scenario 1: Lockup exactly matches funds by creating a rail with fixed lockup + // exactly matching the deposit amount + + // Create a rail with fixed lockup = all available funds + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + 0, // no payment rate + 10, // Lockup period + DEPOSIT_AMOUNT, // fixed lockup equal to all funds + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator comission fee receiver + ); + + // Verify the account state + // Verify the account state using helper function + helper.assertAccountState( + USER1, + DEPOSIT_AMOUNT, + DEPOSIT_AMOUNT, + 0, // no payment rate + block.number + ); + + helper.makeDeposit(USER1, USER1, 1); // Adding more funds + + // Scenario 2: Verify we can't create a situation where lockup > funds + // We'll try to create a rail with an impossibly high fixed lockup + + // Increase operator approval allowance + + helper.setupOperatorApproval( + USER1, + OPERATOR, + 0, // no rate allowance needed + DEPOSIT_AMOUNT * 3, // much higher lockup allowance + MAX_LOCKUP_PERIOD // max lockup period + ); + + // Try to set up a rail with lockup > funds which should fail + vm.startPrank(OPERATOR); + uint256 railId = payments.createRail( + helper.testToken(), + USER1, + USER2, + address(0), + 0, + SERVICE_FEE_RECIPIENT // operator comission fee receiver + ); + + // This should fail because lockupFixed > available funds + vm.expectRevert("invariant failure: insufficient funds to cover lockup after function execution"); + payments.modifyRailLockup(railId, 10, DEPOSIT_AMOUNT * 2); + vm.stopPrank(); + } + + function testWithdrawWithLockupSettlement() public { + helper.makeDeposit( + USER1, + USER1, + DEPOSIT_AMOUNT * 2 // Deposit 200 ether + ); + // Set a lockup rate and an existing lockup via a rail + uint256 lockupRate = 1 ether; + uint256 initialLockup = 50 ether; + uint256 lockupPeriod = 10; + + // Create rail with fixed + rate-based lockup + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, // 1 ether per block + lockupPeriod, // Lockup period of 10 blocks + initialLockup, // 50 ether fixed lockup + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Total lockup at rail creation: 50 ether fixed + (1 ether * 10 blocks) = 60 ether + // Available for withdrawal at creation: 200 ether - 60 ether = 140 ether + + // Try to withdraw more than available (should fail) + helper.expectWithdrawalToFail(USER1, 140 ether, 150 ether); + + // Withdraw exactly the available amount (should succeed and also settle account lockup) + helper.makeWithdrawal(USER1, 140 ether); + + // Verify account state after withdrawal + // Remaining funds: 200 - 140 = 60 ether + // Remaining lockup: 60 ether (unchanged because no blocks passed) + helper.assertAccountState( + USER1, + 60 ether, // expected funds + 60 ether, // expected lockup + lockupRate, // expected lockup rate + block.number // expected settlement block + ); + } +} diff --git a/packages/pay/test/AccountManagement.t.sol b/packages/pay/test/AccountManagement.t.sol new file mode 100644 index 00000000..6863bd8f --- /dev/null +++ b/packages/pay/test/AccountManagement.t.sol @@ -0,0 +1,533 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {MockFVMTest} from "fvm-solidity/mocks/MockFVMTest.sol"; +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + +import {Payments} from "../src/Payments.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {Errors} from "../src/Errors.sol"; + +contract AccountManagementTest is MockFVMTest, BaseTestHelper { + PaymentsTestHelpers helper; + Payments payments; + + uint256 internal constant DEPOSIT_AMOUNT = 100 ether; + uint256 internal constant INITIAL_BALANCE = 1000 ether; + uint256 internal constant MAX_LOCKUP_PERIOD = 100; + + IERC20 private constant NATIVE_TOKEN = IERC20(address(0)); + + function setUp() public override { + super.setUp(); + // Create test helpers and setup environment + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + } + + function testBasicDeposit() public { + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + function testNativeDeposit() public { + helper.makeNativeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + function testMultipleDeposits() public { + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT + 1); + } + + function testDepositToAnotherUser() public { + helper.makeDeposit(USER1, USER2, DEPOSIT_AMOUNT); + } + + /*////////////////////////////////////////////////////////////// + DEPOSIT WITH PERMIT TESTS + //////////////////////////////////////////////////////////////*/ + + function testDepositWithPermit() public { + helper.makeDepositWithPermit(user1Sk, USER1, DEPOSIT_AMOUNT); + } + + function testDepositWithPermitExpiredPermitReverts() public { + helper.expectExpiredPermitToRevert(user1Sk, USER1, DEPOSIT_AMOUNT); + } + + function testDepositWithPermitZeroAmountNoEffect() public { + helper.makeDepositWithPermit(user1Sk, USER1, 0); + } + + function testDepositWithPermitMultiple() public { + helper.makeDepositWithPermit(user1Sk, USER1, DEPOSIT_AMOUNT); + helper.makeDepositWithPermit(user1Sk, USER1, DEPOSIT_AMOUNT); + } + + function testDepositWithPermitRevertsForNativeToken() public { + helper.expectNativeTokenDepositWithPermitToRevert(user1Sk, USER1, DEPOSIT_AMOUNT); + } + + function testDepositWithPermitInvalidPermitReverts() public { + helper.expectInvalidPermitToRevert(user1Sk, USER1, DEPOSIT_AMOUNT); + } + + function testDepositWithPermitToAnotherUser() public { + helper.makeDepositWithPermitToAnotherUser(user1Sk, RELAYER, DEPOSIT_AMOUNT); + } + + function testNativeDepositWithInsufficientNativeTokens() public { + vm.startPrank(USER1); + + // Test zero token address + vm.expectRevert( + abi.encodeWithSelector(Errors.MustSendExactNativeAmount.selector, DEPOSIT_AMOUNT, DEPOSIT_AMOUNT - 1) + ); + payments.deposit{value: DEPOSIT_AMOUNT - 1}(NATIVE_TOKEN, USER1, DEPOSIT_AMOUNT); + + vm.stopPrank(); + } + + function testDepositWithZeroRecipient() public { + vm.startPrank(USER1); + + IERC20 testToken = helper.testToken(); + + // Using straightforward expectRevert without message + vm.expectRevert(); + payments.deposit(testToken, address(0), DEPOSIT_AMOUNT); + + vm.stopPrank(); + } + + function testDepositWithInsufficientBalance() public { + vm.startPrank(USER1); + vm.expectRevert(); + helper.makeDeposit(USER1, USER1, INITIAL_BALANCE + 1); + vm.stopPrank(); + } + + function testDepositWithInsufficientAllowance() public { + // Reset allowance to a small amount + vm.startPrank(USER1); + IERC20 testToken = helper.testToken(); + testToken.approve(address(payments), DEPOSIT_AMOUNT / 2); + + // Attempt deposit with more than approved + vm.expectRevert(); + payments.deposit(testToken, USER1, DEPOSIT_AMOUNT); + vm.stopPrank(); + } + + /*////////////////////////////////////////////////////////////// + WITHDRAWAL TESTS + //////////////////////////////////////////////////////////////*/ + + function testBasicWithdrawal() public { + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + helper.makeWithdrawal(USER1, DEPOSIT_AMOUNT / 2); + } + + function testNativeWithdrawal() public { + helper.makeNativeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + helper.makeNativeWithdrawal(USER1, DEPOSIT_AMOUNT / 2); + } + + function testMultipleWithdrawals() public { + // Setup: deposit first + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Test multiple withdrawals + helper.makeWithdrawal(USER1, DEPOSIT_AMOUNT / 4); + helper.makeWithdrawal(USER1, DEPOSIT_AMOUNT / 4); + } + + function testWithdrawToAnotherAddress() public { + // Setup: deposit first + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Test withdrawTo + helper.makeWithdrawalTo(USER1, USER2, DEPOSIT_AMOUNT / 2); + } + + function testWithdrawEntireBalance() public { + // Setup: deposit first + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Withdraw everything + helper.makeWithdrawal(USER1, DEPOSIT_AMOUNT); + } + + function testWithdrawExcessAmount() public { + // Setup: deposit first + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Try to withdraw more than available + helper.expectWithdrawalToFail(USER1, DEPOSIT_AMOUNT, DEPOSIT_AMOUNT + 1); + } + + function testWithdrawToWithZeroRecipient() public { + vm.startPrank(USER1); + + IERC20 testToken = helper.testToken(); + + // Test zero recipient address + vm.expectRevert(); + payments.withdrawTo(testToken, address(0), DEPOSIT_AMOUNT); + + vm.stopPrank(); + } + + /*////////////////////////////////////////////////////////////// + LOCKUP/SETTLEMENT TESTS + //////////////////////////////////////////////////////////////*/ + + function testWithdrawWithLockedFunds() public { + // First, deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Define locked amount to be half of the deposit + uint256 lockedAmount = DEPOSIT_AMOUNT / 2; + + // Create a rail with a fixed lockup amount to achieve the required locked funds + helper.setupOperatorApproval( + USER1, + OPERATOR, + 100 ether, // rateAllowance + lockedAmount, // lockupAllowance exactly matches what we need + MAX_LOCKUP_PERIOD // max lockup period + ); + + // Create rail with the fixed lockup + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + 0, // no payment rate + 0, // no lockup period + lockedAmount, // fixed lockup of half the deposit + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Verify lockup worked by checking account state + helper.assertAccountState( + USER1, + DEPOSIT_AMOUNT, // expected funds + lockedAmount, // expected lockup + 0, // expected rate (not set in this test) + block.number // expected last settled + ); + + // Try to withdraw more than unlocked funds + helper.expectWithdrawalToFail(USER1, DEPOSIT_AMOUNT - lockedAmount, DEPOSIT_AMOUNT); + + // Should be able to withdraw up to unlocked amount + helper.makeWithdrawal(USER1, DEPOSIT_AMOUNT - lockedAmount); + } + + function testSettlementDuringDeposit() public { + // First deposit + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Setup operator approval with sufficient allowances + helper.setupOperatorApproval( + USER1, + OPERATOR, + 100 ether, // rateAllowance + 1000 ether, // lockupAllowance + MAX_LOCKUP_PERIOD // max lockup period + ); + + uint256 lockupRate = 0.5 ether; // 0.5 token per block + + // Create a rail that will set the lockup rate to 0.5 ether per block + // This creates a lockup rate of 0.5 ether/block for the account + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, // payment rate (creates lockup rate) + 10, // lockup period + 0, // no fixed lockup + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Create a second rail to get to 1 ether lockup rate on the account + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, // payment rate (creates another 0.5 ether/block lockup rate) + 10, // lockup period + 0, // no fixed lockup + address(0), // no fixed lockup + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance 10 blocks to create settlement gap + helper.advanceBlocks(10); + + // Make another deposit to trigger settlement + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Check all states match expectations using assertAccountState helper + helper.assertAccountState( + USER1, + DEPOSIT_AMOUNT * 2, // expected funds + 20 ether, // expected lockup (2 rails × 0.5 ether per block × 10 blocks + future lockup of 10 ether) + lockupRate * 2, // expected rate (2 * 0.5 ether) + block.number // expected last settled + ); + } + + /*////////////////////////////////////////////////////////////// + ACCOUNT INFO TESTS + //////////////////////////////////////////////////////////////*/ + + function testGetAccountInfoNoLockups() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Get account info + (uint256 fundedUntil, uint256 totalBalance, uint256 availableBalance, uint256 lockupRate) = + payments.getAccountInfoIfSettled(helper.testToken(), USER1); + + // Verify account state + assertEq(totalBalance, DEPOSIT_AMOUNT, "total balance mismatch"); + assertEq(availableBalance, DEPOSIT_AMOUNT, "available balance mismatch"); + assertEq(lockupRate, 0, "lockup rate should be 0"); + assertEq(fundedUntil, type(uint256).max, "funded until should be max"); + } + + function testGetAccountInfoWithFixedLockup() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Setup operator approval + helper.setupOperatorApproval(USER1, OPERATOR, 100 ether, DEPOSIT_AMOUNT, MAX_LOCKUP_PERIOD); + + // Create rail with fixed lockup + uint256 fixedLockup = DEPOSIT_AMOUNT / 2; + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + 0, + 0, + fixedLockup, + address(0), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Get account info + (uint256 fundedUntil, uint256 totalBalance, uint256 availableBalance, uint256 lockupRate) = + payments.getAccountInfoIfSettled(helper.testToken(), USER1); + + // Verify account state + assertEq(totalBalance, DEPOSIT_AMOUNT, "total balance mismatch"); + assertEq(availableBalance, DEPOSIT_AMOUNT - fixedLockup, "available balance mismatch"); + assertEq(lockupRate, 0, "lockup rate should be 0"); + assertEq(fundedUntil, type(uint256).max, "funded until should be max with no rate"); + } + + // Helper function to calculate simulated lockup and available balance + function calculateSimulatedLockupAndBalance( + uint256 funds, + uint256 lockupCurrent, + uint256 lockupRate, + uint256 lockupLastSettledAt + ) internal view returns (uint256 simulatedLockupCurrent, uint256 availableBalance) { + uint256 currentEpoch = block.number; + uint256 elapsedTime = currentEpoch - lockupLastSettledAt; + simulatedLockupCurrent = lockupCurrent; + + if (elapsedTime > 0 && lockupRate > 0) { + uint256 additionalLockup = lockupRate * elapsedTime; + + if (funds >= lockupCurrent + additionalLockup) { + simulatedLockupCurrent = lockupCurrent + additionalLockup; + } else { + uint256 availableFunds = funds - lockupCurrent; + if (availableFunds > 0) { + uint256 fractionalEpochs = availableFunds / lockupRate; + simulatedLockupCurrent = lockupCurrent + (lockupRate * fractionalEpochs); + } + } + } + + availableBalance = funds > simulatedLockupCurrent ? funds - simulatedLockupCurrent : 0; + } + + function testGetAccountInfoWithRateLockup() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Setup operator approval + helper.setupOperatorApproval(USER1, OPERATOR, 100 ether, DEPOSIT_AMOUNT, MAX_LOCKUP_PERIOD); + + uint256 lockupRate = 1 ether; // 1 token per block + uint256 lockupPeriod = 10; + + // Create rail with rate lockup + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, + lockupPeriod, + 0, + address(0), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance 5 blocks + helper.advanceBlocks(5); + + // Get raw account data for debugging + (uint256 funds, uint256 lockupCurrent, uint256 lockupRate2, uint256 lockupLastSettledAt) = + payments.accounts(helper.testToken(), USER1); + + (, uint256 availableBalance) = + calculateSimulatedLockupAndBalance(funds, lockupCurrent, lockupRate2, lockupLastSettledAt); + + // Get account info + (uint256 fundedUntil, uint256 totalBalance1, uint256 availableBalance1, uint256 lockupRate1) = + payments.getAccountInfoIfSettled(helper.testToken(), USER1); + + // Verify account state + assertEq(totalBalance1, DEPOSIT_AMOUNT, "total balance mismatch"); + assertEq(availableBalance1, availableBalance, "available balance mismatch"); + assertEq(lockupRate1, lockupRate, "lockup rate mismatch"); + assertEq(fundedUntil, block.number + (availableBalance / lockupRate), "funded until mismatch"); + } + + function testGetAccountInfoWithPartialSettlement() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Setup operator approval + helper.setupOperatorApproval(USER1, OPERATOR, 100 ether, DEPOSIT_AMOUNT, MAX_LOCKUP_PERIOD); + + uint256 lockupRate = 2 ether; // 2 tokens per block + uint256 lockupPeriod = 10; + + // Create rail with rate lockup + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, + lockupPeriod, + 0, + address(0), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance blocks to create partial settlement + helper.advanceBlocks(5); + + // Get raw account data for debugging + (uint256 funds, uint256 lockupCurrent, uint256 lockupRate2, uint256 lockupLastSettledAt) = + payments.accounts(helper.testToken(), USER1); + + (, uint256 availableBalance) = + calculateSimulatedLockupAndBalance(funds, lockupCurrent, lockupRate2, lockupLastSettledAt); + + // Get account info + (uint256 fundedUntil, uint256 totalBalance2, uint256 availableBalance2, uint256 lockupRate3) = + payments.getAccountInfoIfSettled(helper.testToken(), USER1); + + // Verify account state + assertEq(totalBalance2, DEPOSIT_AMOUNT, "total balance mismatch"); + assertEq(availableBalance2, availableBalance, "available balance mismatch"); + assertEq(lockupRate3, lockupRate, "lockup rate mismatch"); + assertEq(fundedUntil, block.number + (availableBalance / lockupRate), "funded until mismatch"); + } + + function testGetAccountInfoInDebt() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Setup operator approval + helper.setupOperatorApproval(USER1, OPERATOR, 100 ether, DEPOSIT_AMOUNT, MAX_LOCKUP_PERIOD); + + uint256 lockupRate = 2 ether; // 2 tokens per block + uint256 lockupPeriod = 10; + + // Create rail with rate lockup + helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + lockupRate, + lockupPeriod, + 0, + address(0), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance blocks to create debt + helper.advanceBlocks(60); // This will create debt as 60 * 2 > DEPOSIT_AMOUNT + + // Get account info + (uint256 fundedUntil, uint256 totalBalance3, uint256 availableBalance3, uint256 lockupRate3) = + payments.getAccountInfoIfSettled(helper.testToken(), USER1); + + // Verify account state + assertEq(totalBalance3, DEPOSIT_AMOUNT, "total balance mismatch"); + assertEq(availableBalance3, 0, "available balance should be 0"); + assertEq(lockupRate3, lockupRate, "lockup rate mismatch"); + assertTrue(fundedUntil < block.number, "funded until should be in the past"); + } + + function testGetAccountInfoAfterRateChange() public { + // Setup: deposit funds + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Setup operator approval + helper.setupOperatorApproval(USER1, OPERATOR, 100 ether, DEPOSIT_AMOUNT, MAX_LOCKUP_PERIOD); + + uint256 initialRate = 1 ether; // 1 token per block + uint256 lockupPeriod = 10; + + // Create rail with initial rate + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + initialRate, + lockupPeriod, + 0, + address(0), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance some blocks + helper.advanceBlocks(5); + + // Change the rate + uint256 newRate = 2 ether; // 2 tokens per block + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, newRate, 0); + + // Get raw account data for debugging + (uint256 funds, uint256 lockupCurrent, uint256 lockupRate2, uint256 lockupLastSettledAt) = + payments.accounts(helper.testToken(), USER1); + + (, uint256 availableBalance) = + calculateSimulatedLockupAndBalance(funds, lockupCurrent, lockupRate2, lockupLastSettledAt); + + // Get account info + (uint256 fundedUntil, uint256 totalBalance4, uint256 availableBalance4, uint256 lockupRate4) = + payments.getAccountInfoIfSettled(helper.testToken(), USER1); + + // Verify account state + assertEq(totalBalance4, DEPOSIT_AMOUNT, "total balance mismatch"); + assertEq(availableBalance4, availableBalance, "available balance mismatch"); + assertEq(lockupRate4, newRate, "lockup rate mismatch"); + assertEq(fundedUntil, block.number + (availableBalance / newRate), "funded until mismatch"); + } +} diff --git a/packages/pay/test/Burn.t.sol b/packages/pay/test/Burn.t.sol new file mode 100644 index 00000000..5ca158cf --- /dev/null +++ b/packages/pay/test/Burn.t.sol @@ -0,0 +1,259 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {MockFVMTest} from "fvm-solidity/mocks/MockFVMTest.sol"; +import {BURN_ADDRESS} from "fvm-solidity/FVMActors.sol"; + +import {Dutch} from "../src/Dutch.sol"; +import {Errors} from "../src/Errors.sol"; +import {FIRST_AUCTION_START_PRICE, MAX_AUCTION_START_PRICE, Payments} from "../src/Payments.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; + +contract BurnTest is MockFVMTest { + using Dutch for uint256; + + PaymentsTestHelpers helper = new PaymentsTestHelpers(); + Payments payments; + uint256 testTokenRailId; + uint256 nativeTokenRailId; + + IERC20 private testToken; + IERC20 private constant NATIVE_TOKEN = IERC20(address(0)); + address private payer; + address private payee; + address private operator; + address private recipient; + + function setUp() public override { + // Mock the FVM precompiles + super.setUp(); + + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + testToken = helper.testToken(); + operator = helper.OPERATOR(); + payer = helper.USER1(); + payee = helper.USER2(); + recipient = helper.USER3(); + + vm.prank(payer); + payments.setOperatorApproval(testToken, operator, true, 5 * 10 ** 18, 5 * 10 ** 18, 28800); + vm.prank(payer); + payments.setOperatorApproval(NATIVE_TOKEN, operator, true, 5 * 10 ** 18, 5 * 10 ** 18, 28800); + + vm.prank(operator); + testTokenRailId = payments.createRail(testToken, payer, payee, address(0), 0, address(0)); + vm.prank(operator); + nativeTokenRailId = payments.createRail(NATIVE_TOKEN, payer, payee, address(0), 0, address(0)); + + vm.prank(payer); + testToken.approve(address(payments), 5 * 10 ** 18); + vm.prank(payer); + payments.deposit(testToken, payer, 5 * 10 ** 18); + + vm.prank(payer); + payments.deposit{value: 5 * 10 ** 18}(NATIVE_TOKEN, payer, 5 * 10 ** 18); + } + + function testBurn() public { + uint256 newRate = 9 * 10 ** 16; + vm.prank(operator); + payments.modifyRailPayment(testTokenRailId, newRate, 0); + + vm.roll(vm.getBlockNumber() + 10); + + (uint256 availableBefore,,,) = payments.accounts(testToken, address(payments)); + assertEq(availableBefore, 0); + + vm.prank(payer); + payments.settleRail(testTokenRailId, vm.getBlockNumber()); + + (uint256 available,,,) = payments.accounts(testToken, address(payments)); + assertEq(available, 10 * newRate * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR()); + + vm.expectRevert( + abi.encodeWithSelector( + Errors.WithdrawAmountExceedsAccumulatedFees.selector, testToken, available, available + 1 + ) + ); + payments.burnForFees{value: FIRST_AUCTION_START_PRICE}(testToken, recipient, available + 1); + + vm.expectRevert( + abi.encodeWithSelector( + Errors.InsufficientNativeTokenForBurn.selector, FIRST_AUCTION_START_PRICE - 1, FIRST_AUCTION_START_PRICE + ) + ); + payments.burnForFees{value: FIRST_AUCTION_START_PRICE - 1}(testToken, recipient, available); + + payments.burnForFees{value: FIRST_AUCTION_START_PRICE}(testToken, recipient, available); + uint256 received = testToken.balanceOf(recipient); + assertEq(available, received); + + (uint256 availableAfter,,,) = payments.accounts(testToken, address(payments)); + assertEq(availableAfter, 0); + + assertEq(BURN_ADDRESS.balance, FIRST_AUCTION_START_PRICE); + + uint256 oneTimePayment = 2 * 10 ** 16; + + vm.prank(operator); + payments.modifyRailLockup(testTokenRailId, 20, oneTimePayment); + + newRate = 11 * 10 ** 16; + vm.prank(operator); + payments.modifyRailPayment(testTokenRailId, newRate, oneTimePayment); + + (uint256 startPrice, uint256 startTime) = payments.auctionInfo(testToken); + assertEq(startTime, block.timestamp); + assertEq(startPrice, FIRST_AUCTION_START_PRICE * Dutch.RESET_FACTOR); + + vm.roll(vm.getBlockNumber() + 17); + + (available,,,) = payments.accounts(testToken, address(payments)); + assertEq(available, oneTimePayment * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR()); + + vm.prank(payer); + payments.settleRail(testTokenRailId, vm.getBlockNumber()); + + (available,,,) = payments.accounts(testToken, address(payments)); + assertEq( + available, + (17 * newRate + oneTimePayment) * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR() + ); + + vm.warp(startTime + 11 days); + uint256 expectedPrice = startPrice.decay(11 days); + + vm.expectRevert( + abi.encodeWithSelector( + Errors.WithdrawAmountExceedsAccumulatedFees.selector, testToken, available, available + 1 + ) + ); + payments.burnForFees{value: expectedPrice}(testToken, recipient, available + 1); + + vm.expectRevert( + abi.encodeWithSelector(Errors.InsufficientNativeTokenForBurn.selector, expectedPrice - 1, expectedPrice) + ); + payments.burnForFees{value: expectedPrice - 1}(testToken, recipient, available); + + // can buy less than full amount + uint256 remainder = 113; + payments.burnForFees{value: expectedPrice}(testToken, recipient, available - remainder); + + uint256 totalReceived = testToken.balanceOf(recipient); + assertEq(received + available - remainder, totalReceived); + + (available,,,) = payments.accounts(testToken, address(payments)); + assertEq(available, remainder); + + assertEq(BURN_ADDRESS.balance, FIRST_AUCTION_START_PRICE + expectedPrice); + } + + function testNativeAutoBurned() public { + uint256 newRate = 7 * 10 ** 16; + vm.prank(operator); + payments.modifyRailPayment(nativeTokenRailId, newRate, 0); + + vm.roll(vm.getBlockNumber() + 12); + + assertEq(BURN_ADDRESS.balance, 0); + + (uint256 availableBefore,,,) = payments.accounts(NATIVE_TOKEN, address(payments)); + assertEq(availableBefore, 0); + + vm.prank(payer); + payments.settleRail(nativeTokenRailId, vm.getBlockNumber()); + + (uint256 availableAfter,,,) = payments.accounts(NATIVE_TOKEN, address(payments)); + assertEq(availableAfter, 0); + + assertEq( + BURN_ADDRESS.balance, 12 * newRate * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR() + ); + } + + function testBurnNoOp() public { + uint256 startPrice; + uint256 startTime; + for (uint256 i = 0; i < 5; i++) { + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice.decay(vm.getBlockTimestamp() - startTime), 0); + payments.burnForFees(testToken, recipient, 0); + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice, 0); + assertEq(startTime, vm.getBlockTimestamp()); + } + + uint256 newRate = 9 * 10 ** 16; + vm.prank(operator); + payments.modifyRailPayment(testTokenRailId, newRate, 0); + vm.roll(vm.getBlockNumber() + 10); + // verify that settling rail in this situation still restarts the auction + vm.prank(payer); + payments.settleRail(testTokenRailId, vm.getBlockNumber()); + vm.prank(operator); + payments.modifyRailPayment(testTokenRailId, 0, 0); + + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice, FIRST_AUCTION_START_PRICE); + assertEq(startTime, vm.getBlockTimestamp()); + + // wait until the price is 0 again + uint256 heatDeath = vm.getBlockTimestamp() + 10 ** 24; + vm.warp(heatDeath); + + for (uint256 i = 0; i < 5; i++) { + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice.decay(vm.getBlockTimestamp() - startTime), 0); + payments.burnForFees(testToken, recipient, 0); + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice, 0); + assertEq(startTime, vm.getBlockTimestamp()); + } + + // verify that settling rail in this situation still restarts the auction + vm.roll(vm.getBlockNumber() + 1); + vm.prank(operator); + payments.modifyRailPayment(testTokenRailId, newRate, 0); + vm.roll(vm.getBlockNumber() + 10); + vm.prank(payer); + payments.settleRail(testTokenRailId, vm.getBlockNumber()); + + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice, FIRST_AUCTION_START_PRICE); + assertEq(startTime, vm.getBlockTimestamp()); + } + + // test escalating fees up to uint max + function testInferno() public { + // start the auction + uint256 newRate = 19 * 10 ** 14; + vm.prank(operator); + payments.modifyRailPayment(testTokenRailId, newRate, 0); + vm.roll(vm.getBlockNumber() + 10); + vm.prank(payer); + payments.settleRail(testTokenRailId, vm.getBlockNumber()); + + uint256 startPrice; + uint256 startTime; + uint256 available; + uint256 expectedStartPrice = FIRST_AUCTION_START_PRICE; + // repeatedly end the auction, multiplying the burn + for (uint256 i = 0; i < 256; i++) { + (available,,,) = payments.accounts(testToken, address(payments)); + (startPrice, startTime) = payments.auctionInfo(testToken); + assertEq(startPrice, expectedStartPrice); + assertEq(startTime, vm.getBlockTimestamp()); + vm.deal(recipient, startPrice); + vm.prank(recipient); + payments.burnForFees{value: startPrice}(testToken, recipient, available); + expectedStartPrice *= Dutch.RESET_FACTOR; + if (expectedStartPrice > MAX_AUCTION_START_PRICE) { + expectedStartPrice = MAX_AUCTION_START_PRICE; + } + } + assertEq(expectedStartPrice, MAX_AUCTION_START_PRICE); + } +} diff --git a/packages/pay/test/BurnExtraFeeToken.t.sol b/packages/pay/test/BurnExtraFeeToken.t.sol new file mode 100644 index 00000000..7525b834 --- /dev/null +++ b/packages/pay/test/BurnExtraFeeToken.t.sol @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {MockFVMTest} from "fvm-solidity/mocks/MockFVMTest.sol"; +import {BURN_ADDRESS} from "fvm-solidity/FVMActors.sol"; + +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {ExtraFeeToken} from "./mocks/ExtraFeeToken.sol"; +import {FIRST_AUCTION_START_PRICE, Payments} from "../src/Payments.sol"; + +contract BurnFeeOnTransferTokenTest is MockFVMTest { + PaymentsTestHelpers helper = new PaymentsTestHelpers(); + Payments payments; + ExtraFeeToken feeToken; + + uint256 railId; + + address operator; + address payer; + address payee; + address recipient; + + function setUp() public override { + // Mock the FVM precompiles + super.setUp(); + + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + operator = helper.OPERATOR(); + payer = helper.USER1(); + payee = helper.USER2(); + recipient = helper.USER3(); + } + + function testBurnFeeOnTransferToken() public { + feeToken = new ExtraFeeToken(10 ** 16); + + feeToken.mint(payer, 50000 * 10 ** 18); + vm.prank(payer); + feeToken.approve(address(payments), 50000 * 10 ** 18); + vm.prank(payer); + payments.deposit(feeToken, payer, 500 * 10 ** 18); + + (uint256 balance,,,) = payments.accounts(feeToken, payer); + assertEq(balance, 500 * 10 ** 18); + + vm.prank(payer); + payments.setOperatorApproval(feeToken, operator, true, 50000 * 10 ** 18, 500 * 10 ** 18, 28800); + + vm.prank(operator); + railId = payments.createRail(feeToken, payer, payee, address(0), 0, address(0)); + + uint256 newRate = 100 * 10 ** 16; + + vm.prank(operator); + payments.modifyRailPayment(railId, newRate, 0); + + vm.roll(vm.getBlockNumber() + 10); + + vm.prank(payer); + payments.settleRail(railId, vm.getBlockNumber()); + + (uint256 available,,,) = payments.accounts(feeToken, address(payments)); + assertEq(available, 10 * newRate * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR()); + + vm.expectRevert(); + payments.burnForFees{value: FIRST_AUCTION_START_PRICE}(feeToken, recipient, available); + + uint256 requested = available - feeToken.transferFee(); + vm.expectRevert(); + payments.burnForFees{value: FIRST_AUCTION_START_PRICE}(feeToken, recipient, requested + 1); + + payments.burnForFees{value: FIRST_AUCTION_START_PRICE}(feeToken, recipient, requested); + uint256 received = feeToken.balanceOf(recipient); + assertEq(requested, received); + + (uint256 availableAfter,,,) = payments.accounts(feeToken, address(payments)); + assertEq(availableAfter, 0); + + assertEq(BURN_ADDRESS.balance, FIRST_AUCTION_START_PRICE); + } +} diff --git a/packages/pay/test/BurnFeeOnTransferToken.t.sol b/packages/pay/test/BurnFeeOnTransferToken.t.sol new file mode 100644 index 00000000..3674c4f1 --- /dev/null +++ b/packages/pay/test/BurnFeeOnTransferToken.t.sol @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {MockFVMTest} from "fvm-solidity/mocks/MockFVMTest.sol"; +import {BURN_ADDRESS} from "fvm-solidity/FVMActors.sol"; + +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {MockFeeOnTransferTokenWithPermit} from "./mocks/MockFeeOnTransferTokenWithPermit.sol"; +import {FIRST_AUCTION_START_PRICE, Payments} from "../src/Payments.sol"; + +contract BurnFeeOnTransferTokenTest is MockFVMTest { + PaymentsTestHelpers helper = new PaymentsTestHelpers(); + Payments payments; + MockFeeOnTransferTokenWithPermit feeToken; + + uint256 railId; + + address operator; + address payer; + address payee; + address recipient; + + function setUp() public override { + // Mock the FVM precompiles + super.setUp(); + + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + operator = helper.OPERATOR(); + payer = helper.USER1(); + payee = helper.USER2(); + recipient = helper.USER3(); + } + + function testBurnFeeOnTransferToken() public { + feeToken = new MockFeeOnTransferTokenWithPermit("FeeToken", "FEE", 100); + + feeToken.mint(payer, 50000 * 10 ** 18); + vm.prank(payer); + feeToken.approve(address(payments), 50000 * 10 ** 18); + vm.prank(payer); + payments.deposit(feeToken, payer, 500 * 10 ** 18); + + (uint256 balance,,,) = payments.accounts(feeToken, payer); + assertEq(balance, 495 * 10 ** 18); + + vm.prank(payer); + payments.setOperatorApproval(feeToken, operator, true, 50000 * 10 ** 18, 500 * 10 ** 18, 28800); + + vm.prank(operator); + railId = payments.createRail(feeToken, payer, payee, address(0), 0, address(0)); + + uint256 newRate = 100 * 10 ** 16; + + vm.prank(operator); + payments.modifyRailPayment(railId, newRate, 0); + + vm.roll(vm.getBlockNumber() + 10); + + vm.prank(payer); + payments.settleRail(railId, vm.getBlockNumber()); + + (uint256 available,,,) = payments.accounts(feeToken, address(payments)); + assertEq(available, 10 * newRate * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR()); + + payments.burnForFees{value: FIRST_AUCTION_START_PRICE}(feeToken, recipient, available); + uint256 received = feeToken.balanceOf(recipient); + assertEq(available * 99 / 100, received); + + (uint256 availableAfter,,,) = payments.accounts(feeToken, address(payments)); + assertEq(availableAfter, 0); + + assertEq(BURN_ADDRESS.balance, FIRST_AUCTION_START_PRICE); + } +} diff --git a/packages/pay/test/DepositWithAuthorization.t.sol b/packages/pay/test/DepositWithAuthorization.t.sol new file mode 100644 index 00000000..b8f1b6bc --- /dev/null +++ b/packages/pay/test/DepositWithAuthorization.t.sol @@ -0,0 +1,289 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; + +contract DepositWithAuthorization is Test, BaseTestHelper { + MockERC20 testToken; + PaymentsTestHelpers helper; + Payments payments; + + uint256 constant DEPOSIT_AMOUNT = 1000 ether; + uint256 constant RATE_ALLOWANCE = 100 ether; + uint256 constant LOCKUP_ALLOWANCE = 1000 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + uint256 internal constant INITIAL_BALANCE = 1000 ether; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + testToken = helper.testToken(); + } + + function testDepositWithAuthorization_HappyPath() public { + uint256 fromPrivateKey = user1Sk; + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validForSeconds = 60; + uint256 amount = DEPOSIT_AMOUNT; + + // Windows + uint256 validAfter = 0; // valid immediately + uint256 validBefore = block.timestamp + validForSeconds; + + // Nonce: generate a unique bytes32 per authorization + // For tests you can make it deterministic: + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + // Pre-state capture + uint256 fromBalanceBefore = helper._balanceOf(from, false); + uint256 paymentsBalanceBefore = helper._balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = helper._getAccountData(to, false); + + // Build signature + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + fromPrivateKey, + testToken, + from, + address(payments), // receiveWithAuthorization pays to Payments contract + amount, + validAfter, + validBefore, + nonce + ); + + // Execute deposit via authorization + vm.startPrank(from); + + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + + vm.stopPrank(); + + // Post-state capture + uint256 fromBalanceAfter = helper._balanceOf(from, false); + uint256 paymentsBalanceAfter = helper._balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = helper._getAccountData(from, false); + + // Assertions + helper._assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + amount + ); + + // Verify authorization is consumed on the token + bool used = testToken.authorizationState(from, nonce); + assertTrue(used); + } + + function testDepositWithAuthorization_Revert_ReplayNonceUsed() public { + uint256 fromPrivateKey = user1Sk; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validForSeconds = 60; + + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + validForSeconds; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + fromPrivateKey, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + // Second attempt with same nonce must revert + vm.expectRevert("EIP3009: authorization already used"); + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + } + + function testDepositWithAuthorization_Revert_InvalidSignature_WrongSigner() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 60; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + // Generate signature with a different private key + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user2Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + vm.expectRevert("EIP3009: invalid signature"); + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + } + + function testDepositWithAuthorization_Revert_InvalidSignature_Corrupted() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 60; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // Corrupt r + r = bytes32(uint256(r) ^ 3); + + vm.startPrank(from); + vm.expectRevert("EIP712: invalid signature"); // invalid signature should revert + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + } + + function testDepositWithAuthorization_Revert_ExpiredAuthorization() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 1; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // advance beyond validBefore + vm.warp(validBefore + 1); + + vm.startPrank(from); + vm.expectRevert("EIP3009: authorization expired"); // expired window should revert + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + } + + function testDepositWithAuthorization_Revert_NotYetValidAuthorization() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = block.timestamp + 60; + uint256 validBefore = validAfter + 300; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + // Pre-state capture + uint256 fromBalanceBefore = helper._balanceOf(from, false); + uint256 paymentsBalanceBefore = helper._balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = helper._getAccountData(to, false); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + vm.expectRevert("EIP3009: authorization not yet valid"); // not yet valid + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + + // Now advance to validAfter + 1 and succeed + vm.warp(validAfter + 1); + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + + // Post-state capture + uint256 fromBalanceAfter = helper._balanceOf(from, false); + uint256 paymentsBalanceAfter = helper._balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = helper._getAccountData(from, false); + + // Assertions + helper._assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + amount + ); + + // Verify authorization is consumed on the token + bool used = testToken.authorizationState(from, nonce); + assertTrue(used); + } + + function testDepositWithAuthorization_SubmittedByDifferentSender() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 300; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // Pre-state capture + uint256 fromBalanceBefore = helper._balanceOf(from, false); + uint256 paymentsBalanceBefore = helper._balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = helper._getAccountData(to, false); + + // Attempt to submit as a different user + address relayer = vm.addr(user2Sk); + vm.startPrank(relayer); + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + + // Post-state capture + uint256 fromBalanceAfter = helper._balanceOf(from, false); + uint256 paymentsBalanceAfter = helper._balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = helper._getAccountData(to, false); + + // Assertions + helper._assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + amount + ); + + // Verify authorization is consumed on the token + bool used = testToken.authorizationState(from, nonce); + assertTrue(used); + } + + function testDepositWithAuthorization_Revert_InsufficientBalance() public { + helper.depositWithAuthorizationInsufficientBalance(user1Sk); + } + + function testDepositWithAuthorization_Revert_DomainMismatchWrongToken() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 300; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + // Create a second token + MockERC20 otherToken = new MockERC20("OtherToken", "OTK"); + + // Sign against otherToken domain + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, otherToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + vm.expectRevert("EIP3009: invalid signature"); // domain mismatch + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + } +} diff --git a/packages/pay/test/DepositWithAuthorizationAndOperatorApproval.t.sol b/packages/pay/test/DepositWithAuthorizationAndOperatorApproval.t.sol new file mode 100644 index 00000000..63ddd277 --- /dev/null +++ b/packages/pay/test/DepositWithAuthorizationAndOperatorApproval.t.sol @@ -0,0 +1,530 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {Errors} from "../src/Errors.sol"; + +contract DepositWithAuthorization is Test, BaseTestHelper { + MockERC20 testToken; + PaymentsTestHelpers helper; + Payments payments; + + uint256 constant DEPOSIT_AMOUNT = 1000 ether; + uint256 constant RATE_ALLOWANCE = 100 ether; + uint256 constant LOCKUP_ALLOWANCE = 1000 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + uint256 internal constant INITIAL_BALANCE = 1000 ether; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + testToken = helper.testToken(); + } + + function testDepositWithAuthorizationAndOperatorApproval_HappyPath() public { + uint256 fromPrivateKey = user1Sk; + uint256 validForSeconds = 60; + uint256 amount = DEPOSIT_AMOUNT; + + helper.depositWithAuthorizationAndOperatorApproval( + fromPrivateKey, amount, validForSeconds, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithAuthorizationAndOperatorApproval_ZeroAmount() public { + uint256 fromPrivateKey = user1Sk; + uint256 validForSeconds = 60; + uint256 amount = 0; // Zero amount + + helper.depositWithAuthorizationAndOperatorApproval( + fromPrivateKey, amount, validForSeconds, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithAuthorizationAndOperatorApproval_Revert_InvalidSignature() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 60; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + // Build signature with wrong private key + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user2Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + + vm.expectRevert("EIP3009: invalid signature"); + payments.depositWithAuthorizationAndApproveOperator( + testToken, + to, + amount, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + MAX_LOCKUP_PERIOD + ); + + vm.stopPrank(); + } + + function testDepositWithAuthorizationAndOperatorApproval_Revert_InvalidSignature_Corrupted() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 60; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // Corrupt r + r = bytes32(uint256(r) ^ 3); + + vm.startPrank(from); + vm.expectRevert("EIP712: invalid signature"); // invalid signature should revert + payments.depositWithAuthorizationAndApproveOperator( + testToken, + to, + amount, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithAuthorizationAndOperatorApproval_Revert_ExpiredAuthorization() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 1; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // advance beyond validBefore + vm.warp(validBefore + 1); + + vm.startPrank(from); + vm.expectRevert("EIP3009: authorization expired"); // expired window should revert + payments.depositWithAuthorizationAndApproveOperator( + testToken, + to, + amount, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithAuthorizationAndOperatorApproval_Revert_NotYetValidAuthorization() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = block.timestamp + 60; + uint256 validBefore = validAfter + 300; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + vm.expectRevert("EIP3009: authorization not yet valid"); // not yet valid + payments.depositWithAuthorizationAndApproveOperator( + testToken, + to, + amount, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithAuthorizationAndOperatorApproval_Revert_DifferentSender() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 60; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // Attempt to submit as a different user + from = vm.addr(user2Sk); + vm.startPrank(from); + vm.expectRevert(abi.encodeWithSelector(Errors.SignerMustBeMsgSender.selector, from, to)); + payments.depositWithAuthorizationAndApproveOperator( + testToken, + to, + amount, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + } + + function testDepositWithAuthorizationAndIncreaseOperatorApproval_HappyPath() public { + uint256 fromPrivateKey = user1Sk; + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validForSeconds = 60 * 60; + uint256 amount = DEPOSIT_AMOUNT; + + // Step 1: First establish initial operator approval with deposit + helper.depositWithAuthorizationAndOperatorApproval( + fromPrivateKey, amount, validForSeconds, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Step 2: Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Step 3: Prepare for the increase operation + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 50 ether; + uint256 lockupIncrease = 500 ether; + + // Give USER1 more tokens for the additional deposit + testToken.mint(USER1, additionalDeposit); + + uint256 validAfter = 0; + uint256 validBefore = validAfter + validForSeconds; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, additionalDeposit, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), additionalDeposit, validAfter, validBefore, nonce + ); + + // Record initial account state + (uint256 initialFunds,,,) = payments.accounts(testToken, USER1); + + // Step 4: Execute depositWithAuthorizationAndIncreaseOperatorApproval + vm.startPrank(USER1); + payments.depositWithAuthorizationAndIncreaseOperatorApproval( + testToken, + to, + additionalDeposit, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + rateIncrease, + lockupIncrease + ); + + vm.stopPrank(); + + // Step 5: Verify results + // Check deposit was successful + (uint256 finalFunds,,,) = payments.accounts(testToken, USER1); + assertEq(finalFunds, initialFunds + additionalDeposit); + + // Check operator approval was increased + (, uint256 finalRateAllowance, uint256 finalLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, initialRateAllowance + rateIncrease); + assertEq(finalLockupAllowance, initialLockupAllowance + lockupIncrease); + } + + function testDepositWithAuthorizationAndIncreaseOperatorApproval_ZeroIncrease() public { + uint256 fromPrivateKey = user1Sk; + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validForSeconds = 60 * 60; + uint256 amount = DEPOSIT_AMOUNT; + + // Step 1: First establish initial operator approval with deposit + helper.depositWithAuthorizationAndOperatorApproval( + fromPrivateKey, amount, validForSeconds, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Step 2: Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Step 3: Prepare for the increase operation + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 0; + uint256 lockupIncrease = 0; + + // Give USER1 more tokens for the additional deposit + testToken.mint(USER1, additionalDeposit); + + uint256 validAfter = 0; + uint256 validBefore = validAfter + validForSeconds; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, additionalDeposit, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), additionalDeposit, validAfter, validBefore, nonce + ); + + // Record initial account state + (uint256 initialFunds,,,) = payments.accounts(testToken, USER1); + + // Step 4: Execute depositWithAuthorizationAndIncreaseOperatorApproval + vm.startPrank(USER1); + payments.depositWithAuthorizationAndIncreaseOperatorApproval( + testToken, + to, + additionalDeposit, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + rateIncrease, + lockupIncrease + ); + + vm.stopPrank(); + + // Step 5: Verify results + // Check deposit was successful + (uint256 finalFunds,,,) = payments.accounts(testToken, USER1); + assertEq(finalFunds, initialFunds + additionalDeposit); + + (, uint256 finalRateAllowance, uint256 finalLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, initialRateAllowance); // No change + assertEq(finalLockupAllowance, initialLockupAllowance); // No change + } + + function testDepositWithAuthorizationAndIncreaseOperatorApproval_InvalidSignature() public { + uint256 fromPrivateKey = user1Sk; + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validForSeconds = 60 * 60; + uint256 amount = DEPOSIT_AMOUNT; + + // First establish initial operator approval with deposit + helper.depositWithAuthorizationAndOperatorApproval( + fromPrivateKey, amount, validForSeconds, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Prepare for the increase operation + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 0; + uint256 lockupIncrease = 0; + + // Give USER1 more tokens for the additional deposit + testToken.mint(USER1, additionalDeposit); + + uint256 validAfter = 0; + uint256 validBefore = validAfter + validForSeconds; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, additionalDeposit, block.number)); + + // Create invalid permit signature (wrong private key) + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user2Sk, testToken, from, address(payments), additionalDeposit, validAfter, validBefore, nonce + ); + + vm.startPrank(USER1); + vm.expectRevert("EIP3009: invalid signature"); + payments.depositWithAuthorizationAndIncreaseOperatorApproval( + testToken, + to, + additionalDeposit, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + rateIncrease, + lockupIncrease + ); + vm.stopPrank(); + + (, uint256 finalRateAllowance, uint256 finalLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, initialRateAllowance); // No change + assertEq(finalLockupAllowance, initialLockupAllowance); // No change + } + + function testDepositWithAuthorizationAndIncreaseOperatorApproval_WithExistingUsage() public { + uint256 fromPrivateKey = user1Sk; + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validForSeconds = 60 * 60; + uint256 amount = DEPOSIT_AMOUNT; + + // First establish initial operator approval with deposit + helper.depositWithAuthorizationAndOperatorApproval( + fromPrivateKey, amount, validForSeconds, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Create rail and use some allowance to establish existing usage + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + uint256 paymentRate = 30 ether; + uint256 lockupFixed = 200 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, 0, lockupFixed); + vm.stopPrank(); + + // Verify some allowance is used + (, uint256 preRateAllowance, uint256 preLockupAllowance, uint256 preRateUsage, uint256 preLockupUsage,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(preRateUsage, paymentRate); + assertEq(preLockupUsage, lockupFixed); + + // Setup for additional deposit with increase + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 70 ether; + uint256 lockupIncrease = 800 ether; + + testToken.mint(USER1, additionalDeposit); + + uint256 validAfter = 0; + uint256 validBefore = validAfter + validForSeconds; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, additionalDeposit, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), additionalDeposit, validAfter, validBefore, nonce + ); + + (uint256 initialFunds,,,) = payments.accounts(testToken, USER1); + + // Execute increase with existing usage + vm.startPrank(USER1); + payments.depositWithAuthorizationAndIncreaseOperatorApproval( + testToken, + to, + additionalDeposit, + validAfter, + validBefore, + nonce, + v, + r, + s, + OPERATOR, + rateIncrease, + lockupIncrease + ); + vm.stopPrank(); + + // Verify results + (uint256 finalFunds,,,) = payments.accounts(testToken, USER1); + assertEq(finalFunds, initialFunds + additionalDeposit); + + (, uint256 finalRateAllowance, uint256 finalLockupAllowance, uint256 finalRateUsage, uint256 finalLockupUsage,) + = payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, preRateAllowance + rateIncrease); + assertEq(finalLockupAllowance, preLockupAllowance + lockupIncrease); + assertEq(finalRateUsage, preRateUsage); // Usage unchanged + assertEq(finalLockupUsage, preLockupUsage); // Usage unchanged + } + + function testDepositWithAuthorizationAndIncreaseOperatorApproval_Revert_DifferentSender() public { + address from = vm.addr(user1Sk); + address to = from; + uint256 amount = DEPOSIT_AMOUNT; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 60; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = helper.getReceiveWithAuthorizationSignature( + user1Sk, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + // First establish initial operator approval with deposit + helper.depositWithAuthorizationAndOperatorApproval( + user1Sk, amount, 60 * 60, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Prepare for the increase operation + uint256 rateIncrease = 10 ether; + uint256 lockupIncrease = 10 ether; + + // Give USER1 more tokens for the additional deposit + testToken.mint(USER1, amount); + + // Attempt to submit as a different user + from = vm.addr(user2Sk); + vm.startPrank(from); + vm.expectRevert(abi.encodeWithSelector(Errors.SignerMustBeMsgSender.selector, from, to)); + payments.depositWithAuthorizationAndIncreaseOperatorApproval( + testToken, to, amount, validAfter, validBefore, nonce, v, r, s, OPERATOR, rateIncrease, lockupIncrease + ); + vm.stopPrank(); + } +} diff --git a/packages/pay/test/DepositWithPermitAndOperatorApproval.t.sol b/packages/pay/test/DepositWithPermitAndOperatorApproval.t.sol new file mode 100644 index 00000000..77cdf563 --- /dev/null +++ b/packages/pay/test/DepositWithPermitAndOperatorApproval.t.sol @@ -0,0 +1,304 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {Errors} from "../src/Errors.sol"; + +contract DepositWithPermitAndOperatorApproval is Test, BaseTestHelper { + MockERC20 testToken; + PaymentsTestHelpers helper; + Payments payments; + + uint256 constant DEPOSIT_AMOUNT = 1000 ether; + uint256 constant RATE_ALLOWANCE = 100 ether; + uint256 constant LOCKUP_ALLOWANCE = 1000 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + uint256 internal constant INITIAL_BALANCE = 1000 ether; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + testToken = helper.testToken(); + } + + function testDepositWithPermitAndOperatorApproval_HappyPath() public { + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithPermitAndOperatorApproval_ZeroAmount() public { + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, 0, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithPermitAndOperatorApproval_MultipleDeposits() public { + uint256 firstDepositAmount = 500 ether; + uint256 secondDepositAmount = 300 ether; + + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, firstDepositAmount, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, secondDepositAmount, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithPermitAndOperatorApproval_InvalidPermitReverts() public { + helper.expectInvalidPermitAndOperatorApprovalToRevert( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + } + + function testDepositWithPermitAndOperatorApproval_Revert_DifferentSender() public { + address from = USER1; + uint256 deadline = block.timestamp + 1 hours; + + // get signature for permit + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(user1Sk, from, address(payments), DEPOSIT_AMOUNT, deadline); + + vm.startPrank(RELAYER); + vm.expectRevert(abi.encodeWithSelector(Errors.SignerMustBeMsgSender.selector, RELAYER, from)); + payments.depositWithPermitAndApproveOperator( + testToken, + from, + DEPOSIT_AMOUNT, + deadline, + v, + r, + s, + OPERATOR, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + } + + // SECTION: Deposit With Permit And Increase Operator Approval Tests + + function testDepositWithPermitAndIncreaseOperatorApproval_HappyPath() public { + // Step 1: First establish initial operator approval with deposit + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Step 2: Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Step 3: Prepare for the increase operation + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 50 ether; + uint256 lockupIncrease = 500 ether; + + // Give USER1 more tokens for the additional deposit + testToken.mint(USER1, additionalDeposit); + + // Get permit signature for the additional deposit + uint256 deadline = block.timestamp + 1 hours; + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(user1Sk, USER1, address(payments), additionalDeposit, deadline); + + // Record initial account state + (uint256 initialFunds,,,) = payments.accounts(testToken, USER1); + + // Step 4: Execute depositWithPermitAndIncreaseOperatorApproval + vm.startPrank(USER1); + payments.depositWithPermitAndIncreaseOperatorApproval( + testToken, USER1, additionalDeposit, deadline, v, r, s, OPERATOR, rateIncrease, lockupIncrease + ); + vm.stopPrank(); + + // Step 5: Verify results + // Check deposit was successful + (uint256 finalFunds,,,) = payments.accounts(testToken, USER1); + assertEq(finalFunds, initialFunds + additionalDeposit); + + // Check operator approval was increased + (, uint256 finalRateAllowance, uint256 finalLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, initialRateAllowance + rateIncrease); + assertEq(finalLockupAllowance, initialLockupAllowance + lockupIncrease); + } + + function testDepositWithPermitAndIncreaseOperatorApproval_ZeroIncrease() public { + // First establish initial operator approval with deposit + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Setup for additional deposit with zero increases + uint256 additionalDeposit = 500 ether; + testToken.mint(USER1, additionalDeposit); + + uint256 deadline = block.timestamp + 1 hours; + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(user1Sk, USER1, address(payments), additionalDeposit, deadline); + + (uint256 initialFunds,,,) = payments.accounts(testToken, USER1); + + // Execute with zero increases + vm.startPrank(USER1); + payments.depositWithPermitAndIncreaseOperatorApproval( + testToken, + USER1, + additionalDeposit, + deadline, + v, + r, + s, + OPERATOR, + 0, // Zero rate increase + 0 // Zero lockup increase + ); + vm.stopPrank(); + + // Verify deposit occurred but allowances unchanged + (uint256 finalFunds,,,) = payments.accounts(testToken, USER1); + assertEq(finalFunds, initialFunds + additionalDeposit); + + (, uint256 finalRateAllowance, uint256 finalLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, initialRateAllowance); // No change + assertEq(finalLockupAllowance, initialLockupAllowance); // No change + } + + function testDepositWithPermitAndIncreaseOperatorApproval_InvalidPermit() public { + // First establish initial operator approval with deposit + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Setup for additional deposit with invalid permit + uint256 additionalDeposit = 500 ether; + testToken.mint(USER1, additionalDeposit); + + uint256 deadline = block.timestamp + 1 hours; + + // Create invalid permit signature (wrong private key) + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(user2Sk, USER1, address(payments), additionalDeposit, deadline); + + vm.startPrank(USER1); + vm.expectRevert( + abi.encodeWithSignature( + "ERC2612InvalidSigner(address,address)", + vm.addr(user2Sk), // Wrong signer address + USER1 // Intended recipient + ) + ); + payments.depositWithPermitAndIncreaseOperatorApproval( + testToken, USER1, additionalDeposit, deadline, v, r, s, OPERATOR, 50 ether, 500 ether + ); + vm.stopPrank(); + } + + function testDepositWithPermitAndIncreaseOperatorApproval_WithExistingUsage() public { + // First establish initial operator approval with deposit + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Create rail and use some allowance to establish existing usage + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + uint256 paymentRate = 30 ether; + uint256 lockupFixed = 200 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, 0, lockupFixed); + vm.stopPrank(); + + // Verify some allowance is used + (, uint256 preRateAllowance, uint256 preLockupAllowance, uint256 preRateUsage, uint256 preLockupUsage,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(preRateUsage, paymentRate); + assertEq(preLockupUsage, lockupFixed); + + // Setup for additional deposit with increase + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 70 ether; + uint256 lockupIncrease = 800 ether; + + testToken.mint(USER1, additionalDeposit); + + uint256 deadline = block.timestamp + 1 hours; + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(user1Sk, USER1, address(payments), additionalDeposit, deadline); + + (uint256 initialFunds,,,) = payments.accounts(testToken, USER1); + + // Execute increase with existing usage + vm.startPrank(USER1); + payments.depositWithPermitAndIncreaseOperatorApproval( + testToken, USER1, additionalDeposit, deadline, v, r, s, OPERATOR, rateIncrease, lockupIncrease + ); + vm.stopPrank(); + + // Verify results + (uint256 finalFunds,,,) = payments.accounts(testToken, USER1); + assertEq(finalFunds, initialFunds + additionalDeposit); + + (, uint256 finalRateAllowance, uint256 finalLockupAllowance, uint256 finalRateUsage, uint256 finalLockupUsage,) + = payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(finalRateAllowance, preRateAllowance + rateIncrease); + assertEq(finalLockupAllowance, preLockupAllowance + lockupIncrease); + assertEq(finalRateUsage, preRateUsage); // Usage unchanged + assertEq(finalLockupUsage, preLockupUsage); // Usage unchanged + } + + function testDepositWithPermitAndIncreaseOperatorApproval_Revert_DifferentSender() public { + address from = USER1; + + // Step 1: First establish initial operator approval with deposit + helper.makeDepositWithPermitAndOperatorApproval( + user1Sk, DEPOSIT_AMOUNT, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + + // Step 2: Verify initial approval state + (bool isApproved, uint256 initialRateAllowance, uint256 initialLockupAllowance,,,) = + payments.operatorApprovals(testToken, USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(initialRateAllowance, RATE_ALLOWANCE); + assertEq(initialLockupAllowance, LOCKUP_ALLOWANCE); + + // Step 3: Prepare for the increase operation + uint256 additionalDeposit = 500 ether; + uint256 rateIncrease = 50 ether; + uint256 lockupIncrease = 500 ether; + + // Give USER1 more tokens for the additional deposit + testToken.mint(USER1, additionalDeposit); + + // Get permit signature for the additional deposit + uint256 deadline = block.timestamp + 1 hours; + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(user1Sk, USER1, address(payments), additionalDeposit, deadline); + + vm.startPrank(RELAYER); + vm.expectRevert(abi.encodeWithSelector(Errors.SignerMustBeMsgSender.selector, RELAYER, from)); + payments.depositWithPermitAndIncreaseOperatorApproval( + testToken, USER1, additionalDeposit, deadline, v, r, s, OPERATOR, rateIncrease, lockupIncrease + ); + vm.stopPrank(); + } +} diff --git a/packages/pay/test/Dutch.t.sol b/packages/pay/test/Dutch.t.sol new file mode 100644 index 00000000..1e6a7b0c --- /dev/null +++ b/packages/pay/test/Dutch.t.sol @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.30; + +import {Test} from "forge-std/Test.sol"; +import {Dutch} from "../src/Dutch.sol"; + +contract ExternalDutch { + using Dutch for uint256; + + function dutch(uint256 startPrice, uint256 elapsed) external pure returns (uint256) { + return startPrice.decay(elapsed); + } +} + +contract DutchTest is Test { + using Dutch for uint256; + + function checkExactDecay(uint256 startPrice) internal pure { + assertEq(startPrice.decay(0), startPrice); + assertEq(startPrice.decay(3.5 days), startPrice / 2); + assertEq(startPrice.decay(7 days), startPrice / 4); + assertEq(startPrice.decay(14 days), startPrice / 16); + assertEq(startPrice.decay(21 days), startPrice / 64); + assertEq(startPrice.decay(28 days), startPrice / 256); + assertEq(startPrice.decay(35 days), startPrice / 1024); + } + + function testDecay() public pure { + checkExactDecay(0.00000001 ether); + checkExactDecay(0.01 ether); + checkExactDecay(9 ether); + checkExactDecay(11 ether); + checkExactDecay(13 ether); + checkExactDecay(1300000 ether); + } + + function testMaxDecayU256() public pure { + uint256 maxPrice = 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff; + + assertEq(maxPrice.decay(0), maxPrice); + assertEq(maxPrice.decay(10000000), 12852371374314799914919560702529050018701224735495877087613516410500); + assertEq(maxPrice.decay(50000000), 1950746206018947071427216775); + assertEq(maxPrice.decay(58060000), 18480601319969968529); + assertEq(maxPrice.decay(Dutch.MAX_DECAY - 1), 18446828639436756833); + assertEq(maxPrice.decay(Dutch.MAX_DECAY), 18446786356524694827); + assertEq(maxPrice.decay(Dutch.MAX_DECAY + 1), 0); + } + + function testMaxDecayFIL() public pure { + uint256 maxPrice = 2 * 10 ** 27; // max FIL supply + + assertEq(maxPrice.decay(0), maxPrice); + assertEq(maxPrice.decay(90 days), 36329437917604310558); + assertEq(maxPrice.decay(10000000), 221990491042506894); + assertEq(maxPrice.decay(20000000), 24639889); + assertEq(maxPrice.decay(23000000), 25423); + assertEq(maxPrice.decay(26000000), 26); + assertEq(maxPrice.decay(26500000), 8); + assertEq(maxPrice.decay(27000000), 2); + assertEq(maxPrice.decay(27425278), 1); + assertEq(maxPrice.decay(27425279), 0); + assertEq(maxPrice.decay(Dutch.MAX_DECAY - 1), 0); + assertEq(maxPrice.decay(Dutch.MAX_DECAY), 0); + assertEq(maxPrice.decay(Dutch.MAX_DECAY + 1), 0); + } +} diff --git a/packages/pay/test/FeeOnTransferVulnerability.t.sol b/packages/pay/test/FeeOnTransferVulnerability.t.sol new file mode 100644 index 00000000..a658484a --- /dev/null +++ b/packages/pay/test/FeeOnTransferVulnerability.t.sol @@ -0,0 +1,180 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test, console} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockFeeOnTransferTokenWithPermit} from "./mocks/MockFeeOnTransferTokenWithPermit.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {MessageHashUtils} from "@openzeppelin/contracts/utils/cryptography/MessageHashUtils.sol"; + +contract FeeOnTransferVulnerabilityTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + Payments payments; + MockFeeOnTransferTokenWithPermit feeToken; + + uint256 internal constant INITIAL_BALANCE = 10000 ether; + uint256 internal constant DEPOSIT_AMOUNT = 1000 ether; + uint256 internal constant FEE_PERCENTAGE = 200; // 2% fee + + function setUp() public { + // Create test helpers and setup environment + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + // Create fee-on-transfer token with 2% fee + feeToken = new MockFeeOnTransferTokenWithPermit("PermitFeeToken", "PFEE", FEE_PERCENTAGE); + + // Mint tokens to users + feeToken.mint(USER1, INITIAL_BALANCE); + feeToken.mint(USER2, INITIAL_BALANCE); + + // Approve payments contract + vm.prank(USER1); + feeToken.approve(address(payments), type(uint256).max); + + vm.prank(USER2); + feeToken.approve(address(payments), type(uint256).max); + } + + function testFeeOnTransferVulnerabilityBasic() public { + // Record initial balances + uint256 contractBalanceBefore = feeToken.balanceOf(address(payments)); + + // User1 deposits 1000 tokens, but due to 2% fee, only 980 actually reach the contract + vm.prank(USER1); + payments.deposit(feeToken, USER1, DEPOSIT_AMOUNT); + + // Check actual token balance vs recorded balance + uint256 contractBalanceAfter = feeToken.balanceOf(address(payments)); + uint256 actualTokensReceived = contractBalanceAfter - contractBalanceBefore; + uint256 expectedTokensReceived = DEPOSIT_AMOUNT - (DEPOSIT_AMOUNT * FEE_PERCENTAGE / 10000); + + // The contract actually received less due to fee + assertEq(actualTokensReceived, expectedTokensReceived, "Contract received expected amount after fee"); + assertLt(actualTokensReceived, DEPOSIT_AMOUNT, "Contract received less than deposit amount"); + + // The payments contract also knows it does not have the full amount + (, uint256 recordedFunds,,) = payments.getAccountInfoIfSettled(feeToken, USER1); + assertEq(recordedFunds, expectedTokensReceived, "Contract recorded full deposit amount"); + } + + function testFeeOnTransferWithDepositWithPermit() public { + // Record initial balances + uint256 contractBalanceBefore = feeToken.balanceOf(address(payments)); + + // Prepare permit parameters + uint256 deadline = block.timestamp + 1 hours; + + // Get permit signature + (uint8 v, bytes32 r, bytes32 s) = + getPermitSignature(feeToken, user1Sk, USER1, address(payments), DEPOSIT_AMOUNT, deadline); + + // User1 deposits 1000 tokens using permit, but due to 2% fee, only 980 actually reach the contract + vm.prank(USER1); + payments.depositWithPermit(feeToken, USER1, DEPOSIT_AMOUNT, deadline, v, r, s); + + // Check actual token balance vs recorded balance + uint256 contractBalanceAfter = feeToken.balanceOf(address(payments)); + uint256 actualTokensReceived = contractBalanceAfter - contractBalanceBefore; + uint256 expectedTokensReceived = DEPOSIT_AMOUNT - (DEPOSIT_AMOUNT * FEE_PERCENTAGE / 10000); + + // The contract actually received less due to fee + assertEq(actualTokensReceived, expectedTokensReceived, "Contract received expected amount after fee"); + assertLt(actualTokensReceived, DEPOSIT_AMOUNT, "Contract received less than deposit amount"); + + // With the fix, the payments contract should record the actual amount received + (, uint256 recordedFunds,,) = payments.getAccountInfoIfSettled(feeToken, USER1); + assertEq(recordedFunds, expectedTokensReceived, "Contract recorded actual received amount"); + + console.log("Deposit amount:", DEPOSIT_AMOUNT); + console.log("Actual tokens received:", actualTokensReceived); + console.log("Recorded balance:", recordedFunds); + console.log("Discrepancy:", recordedFunds > actualTokensReceived ? recordedFunds - actualTokensReceived : 0); + } + + function getPermitSignature( + MockFeeOnTransferTokenWithPermit token, + uint256 privateKey, + address owner, + address spender, + uint256 value, + uint256 deadline + ) internal view returns (uint8 v, bytes32 r, bytes32 s) { + uint256 nonce = token.nonces(owner); + bytes32 domainSeparator = token.DOMAIN_SEPARATOR(); + + bytes32 structHash = keccak256( + abi.encode( + keccak256("Permit(address owner,address spender,uint256 value,uint256 nonce,uint256 deadline)"), + owner, + spender, + value, + nonce, + deadline + ) + ); + + bytes32 digest = MessageHashUtils.toTypedDataHash(domainSeparator, structHash); + + (v, r, s) = vm.sign(privateKey, digest); + } + + function testFeeOnTransferWithDepositWithPermitAndApproveOperator() public { + // Record initial balances + uint256 contractBalanceBefore = feeToken.balanceOf(address(payments)); + + // Prepare permit and operator approval parameters + uint256 deadline = block.timestamp + 1 hours; + uint256 rateAllowance = 10 ether; + uint256 lockupAllowance = 100 ether; + uint256 maxLockupPeriod = 100; + + // Get permit signature + (uint8 v, bytes32 r, bytes32 s) = + getPermitSignature(feeToken, user1Sk, USER1, address(payments), DEPOSIT_AMOUNT, deadline); + + // User1 deposits 1000 tokens using permit and approves operator, but due to 2% fee, only 980 actually reach the contract + vm.prank(USER1); + payments.depositWithPermitAndApproveOperator( + feeToken, + USER1, + DEPOSIT_AMOUNT, + deadline, + v, + r, + s, + OPERATOR, + rateAllowance, + lockupAllowance, + maxLockupPeriod + ); + + // Check actual token balance vs recorded balance + uint256 contractBalanceAfter = feeToken.balanceOf(address(payments)); + uint256 actualTokensReceived = contractBalanceAfter - contractBalanceBefore; + uint256 expectedTokensReceived = DEPOSIT_AMOUNT - (DEPOSIT_AMOUNT * FEE_PERCENTAGE / 10000); + + // The contract actually received less due to fee + assertEq(actualTokensReceived, expectedTokensReceived, "Contract received expected amount after fee"); + assertLt(actualTokensReceived, DEPOSIT_AMOUNT, "Contract received less than deposit amount"); + + // With the fix, the payments contract should record the actual amount received + (, uint256 recordedFunds,,) = payments.getAccountInfoIfSettled(feeToken, USER1); + assertEq(recordedFunds, expectedTokensReceived, "Contract recorded actual received amount"); + + // Verify operator approval was set correctly + (bool isApproved, uint256 actualRateAllowance, uint256 actualLockupAllowance,,, uint256 actualMaxLockupPeriod) = + payments.operatorApprovals(feeToken, USER1, OPERATOR); + assertEq(isApproved, true, "Operator should be approved"); + assertEq(actualRateAllowance, rateAllowance, "Rate allowance should be set"); + assertEq(actualLockupAllowance, lockupAllowance, "Lockup allowance should be set"); + assertEq(actualMaxLockupPeriod, maxLockupPeriod, "Max lockup period should be set"); + + console.log("Deposit amount:", DEPOSIT_AMOUNT); + console.log("Actual tokens received:", actualTokensReceived); + console.log("Recorded balance:", recordedFunds); + console.log("Operator approved:", isApproved); + } +} diff --git a/packages/pay/test/Fees.t.sol b/packages/pay/test/Fees.t.sol new file mode 100644 index 00000000..c5ccfb65 --- /dev/null +++ b/packages/pay/test/Fees.t.sol @@ -0,0 +1,153 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {RailSettlementHelpers} from "./helpers/RailSettlementHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; + +contract FeesTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + RailSettlementHelpers settlementHelper; + Payments payments; + + // Multiple tokens for testing + MockERC20 token1; + MockERC20 token2; + MockERC20 token3; + + uint256 constant INITIAL_BALANCE = 5000 ether; + uint256 constant DEPOSIT_AMOUNT = 200 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + + // Payment rates for each rail + uint256 constant RAIL1_RATE = 5 ether; + uint256 constant RAIL2_RATE = 10 ether; + uint256 constant RAIL3_RATE = 15 ether; + + // Rail IDs + uint256 rail1Id; + uint256 rail2Id; + uint256 rail3Id; + + function setUp() public { + // Initialize helpers + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + settlementHelper = new RailSettlementHelpers(); + settlementHelper.initialize(payments, helper); + + // Set up 3 different tokens + token1 = MockERC20(helper.testToken()); // Use the default token from the helper + token2 = new MockERC20("Token 2", "TK2"); + token3 = new MockERC20("Token 3", "TK3"); + + // Initialize tokens and make deposits + setupTokensAndDeposits(); + + // Create rails with different tokens + createRails(); + } + + function setupTokensAndDeposits() internal { + // Mint tokens to users + // Token 1 is already handled by the helper + token2.mint(USER1, INITIAL_BALANCE); + token3.mint(USER1, INITIAL_BALANCE); + + // Approve transfers for all tokens + vm.startPrank(USER1); + token1.approve(address(payments), type(uint256).max); + token2.approve(address(payments), type(uint256).max); + token3.approve(address(payments), type(uint256).max); + vm.stopPrank(); + + // Make deposits with all tokens + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); // Uses token1 + + // Make deposits with token2 and token3 + vm.startPrank(USER1); + payments.deposit(token2, USER1, DEPOSIT_AMOUNT); + payments.deposit(token3, USER1, DEPOSIT_AMOUNT); + vm.stopPrank(); + } + + function createRails() internal { + // Set up operator approvals for each token + helper.setupOperatorApproval( + USER1, // from + OPERATOR, // operator + RAIL1_RATE, // rate allowance for token1 + RAIL1_RATE * 10, // lockup allowance (enough for the period) + MAX_LOCKUP_PERIOD // max lockup period + ); + + // Operator approvals for token2 and token3 + vm.startPrank(USER1); + payments.setOperatorApproval( + token2, + OPERATOR, + true, // approved + RAIL2_RATE, // rate allowance for token2 + RAIL2_RATE * 10, // lockup allowance (enough for the period) + MAX_LOCKUP_PERIOD // max lockup period + ); + + payments.setOperatorApproval( + token3, + OPERATOR, + true, // approved + RAIL3_RATE, // rate allowance for token3 + RAIL3_RATE * 10, // lockup allowance (enough for the period) + MAX_LOCKUP_PERIOD // max lockup period + ); + vm.stopPrank(); + + // Create rails with different tokens + rail1Id = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + RAIL1_RATE, + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Create a rail with token2 + vm.startPrank(OPERATOR); + rail2Id = payments.createRail( + token2, + USER1, // from + USER2, // to + address(0), // no validator + 0, // no commission + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Set rail2 parameters + payments.modifyRailPayment(rail2Id, RAIL2_RATE, 0); + payments.modifyRailLockup(rail2Id, 10, 0); // 10 blocks, no fixed lockup + + // Create a rail with token3 + rail3Id = payments.createRail( + token3, + USER1, // from + USER2, // to + address(0), // no validator + 0, // no commission + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Set rail3 parameters + payments.modifyRailPayment(rail3Id, RAIL3_RATE, 0); + payments.modifyRailLockup(rail3Id, 10, 0); // 10 blocks, no fixed lockup + vm.stopPrank(); + } +} diff --git a/packages/pay/test/OperatorApproval.t.sol b/packages/pay/test/OperatorApproval.t.sol new file mode 100644 index 00000000..3e6b6530 --- /dev/null +++ b/packages/pay/test/OperatorApproval.t.sol @@ -0,0 +1,957 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +pragma solidity ^0.8.27; + +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {Errors} from "../src/Errors.sol"; + +contract OperatorApprovalTest is Test, BaseTestHelper { + MockERC20 secondToken; + PaymentsTestHelpers helper; + Payments payments; + + uint256 constant DEPOSIT_AMOUNT = 1000 ether; + uint256 constant RATE_ALLOWANCE = 100 ether; + uint256 constant LOCKUP_ALLOWANCE = 1000 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + IERC20 private constant NATIVE_TOKEN = IERC20(address(0)); + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + // Deposit funds for client + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + function testNativeFIL() public { + vm.startPrank(USER1); + payments.setOperatorApproval(NATIVE_TOKEN, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + vm.stopPrank(); + } + + function testInvalidAddresses() public { + // Test zero operator address + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.ZeroAddressNotAllowed.selector, "operator")); + payments.setOperatorApproval( + IERC20(address(0x1)), address(0), true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + } + + function testModifyingAllowances() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Increase allowances + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE * 2, LOCKUP_ALLOWANCE * 2, MAX_LOCKUP_PERIOD); + + // Decrease allowances + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE / 2, LOCKUP_ALLOWANCE / 2, MAX_LOCKUP_PERIOD); + } + + function testRevokingAndReapprovingOperator() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Revoke approval + helper.revokeOperatorApprovalAndVerify(USER1, OPERATOR); + + // Reapprove operator + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + } + + function testRateTrackingWithMultipleRails() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create a rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Verify no allowance consumed yet + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, 0, 0, MAX_LOCKUP_PERIOD + ); + + // 1. Set initial payment rate + uint256 initialRate = 10 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, initialRate, 0); + vm.stopPrank(); + + // Verify rate usage matches initial rate + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, initialRate, 0, MAX_LOCKUP_PERIOD + ); + + // 2. Increase payment rate + uint256 increasedRate = 15 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, increasedRate, 0); + vm.stopPrank(); + + // Verify rate usage increased + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, increasedRate, 0, MAX_LOCKUP_PERIOD + ); + + // 3. Decrease payment rate + uint256 decreasedRate = 5 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, decreasedRate, 0); + vm.stopPrank(); + + // Verify rate usage decreased + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, decreasedRate, 0, MAX_LOCKUP_PERIOD + ); + + // 4. Create second rail and set rate + uint256 railId2 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + uint256 rate2 = 15 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId2, rate2, 0); + vm.stopPrank(); + + // Verify combined rate usage + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, decreasedRate + rate2, 0, MAX_LOCKUP_PERIOD + ); + } + + function testRateLimitEnforcement() public { + // Setup initial approval with limited rate allowance + uint256 limitedRateAllowance = 10 ether; + helper.setupOperatorApproval(USER1, OPERATOR, limitedRateAllowance, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set rate to exactly the limit + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, limitedRateAllowance, 0); + vm.stopPrank(); + + // Now try to exceed the limit - should revert + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.OperatorRateAllowanceExceeded.selector, limitedRateAllowance, limitedRateAllowance + 1 ether + ) + ); + payments.modifyRailPayment(railId, limitedRateAllowance + 1 ether, 0); + vm.stopPrank(); + } + + // SECTION: Lockup Allowance Tracking + + function testLockupTracking() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set payment rate + uint256 paymentRate = 10 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + vm.stopPrank(); + + // 1. Set initial lockup + uint256 lockupPeriod = 5; // 5 blocks + uint256 initialFixedLockup = 100 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, lockupPeriod, initialFixedLockup); + vm.stopPrank(); + + // Calculate expected lockup usage + uint256 expectedLockupUsage = initialFixedLockup + (paymentRate * lockupPeriod); + + // Verify lockup usage + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, paymentRate, expectedLockupUsage, MAX_LOCKUP_PERIOD + ); + + // 2. Increase fixed lockup + uint256 increasedFixedLockup = 200 ether; + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, lockupPeriod, increasedFixedLockup); + vm.stopPrank(); + + // Calculate updated expected lockup usage + uint256 updatedExpectedLockupUsage = increasedFixedLockup + (paymentRate * lockupPeriod); + + // Verify increased lockup usage + helper.verifyOperatorAllowances( + USER1, + OPERATOR, + true, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + paymentRate, + updatedExpectedLockupUsage, + MAX_LOCKUP_PERIOD + ); + + // 3. Decrease fixed lockup + uint256 decreasedFixedLockup = 50 ether; + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, lockupPeriod, decreasedFixedLockup); + vm.stopPrank(); + + // Calculate reduced expected lockup usage + uint256 finalExpectedLockupUsage = decreasedFixedLockup + (paymentRate * lockupPeriod); + + // Verify decreased lockup usage + helper.verifyOperatorAllowances( + USER1, + OPERATOR, + true, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + paymentRate, + finalExpectedLockupUsage, + MAX_LOCKUP_PERIOD + ); + } + + function testLockupLimitEnforcement() public { + // Setup initial approval with limited lockup allowance + uint256 limitedLockupAllowance = 100 ether; + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, limitedLockupAllowance, MAX_LOCKUP_PERIOD); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set payment rate + uint256 paymentRate = 10 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + vm.stopPrank(); + + // Try to set fixed lockup that exceeds allowance + uint256 excessiveLockup = 110 ether; + (,,,, uint256 currentLockupUsage,) = payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + uint256 attemptedUsage = currentLockupUsage + excessiveLockup; + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.OperatorLockupAllowanceExceeded.selector, limitedLockupAllowance, attemptedUsage + ) + ); + payments.modifyRailLockup(railId, 0, excessiveLockup); + vm.stopPrank(); + } + + function testAllowanceEdgeCases() public { + // 1. Test exact allowance consumption + uint256 exactRateAllowance = 10 ether; + uint256 exactLockupAllowance = 100 ether; + helper.setupOperatorApproval(USER1, OPERATOR, exactRateAllowance, exactLockupAllowance, MAX_LOCKUP_PERIOD); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Use exactly the available rate allowance + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, exactRateAllowance, 0); + vm.stopPrank(); + + // Use exactly the available lockup allowance + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, 0, exactLockupAllowance); + vm.stopPrank(); + + // Verify allowances are fully consumed + helper.verifyOperatorAllowances( + USER1, + OPERATOR, + true, + exactRateAllowance, + exactLockupAllowance, + exactRateAllowance, + exactLockupAllowance, + MAX_LOCKUP_PERIOD + ); + + // 2. Test zero allowance behavior + helper.setupOperatorApproval(USER1, OPERATOR, 0, 0, MAX_LOCKUP_PERIOD); + + // Create rail with zero allowances + uint256 railId2 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Attempt to set non-zero rate (should fail) + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector(Errors.OperatorRateAllowanceExceeded.selector, 0, exactRateAllowance + 1) + ); + payments.modifyRailPayment(railId2, 1, 0); + vm.stopPrank(); + + // Attempt to set non-zero lockup (should fail) + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector(Errors.OperatorLockupAllowanceExceeded.selector, 0, exactLockupAllowance + 1) + ); + payments.modifyRailLockup(railId2, 0, 1); + vm.stopPrank(); + } + + function testOperatorAuthorizationBoundaries() public { + // 1. Test unapproved operator + // Try to create a rail and expect it to fail + helper.expectcreateRailToRevertWithoutOperatorApproval(); + + // 2. Setup approval and create rail + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // 3. Test non-operator rail modification + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, USER1)); + payments.modifyRailPayment(railId, 10 ether, 0); + vm.stopPrank(); + + // 4. Revoke approval and verify operator can't create new rails + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), OPERATOR, false, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Verify operator approval was revoked + // Try to create a rail and expect it to fail + helper.expectcreateRailToRevertWithoutOperatorApproval(); + + // 5. Verify operator can still modify existing rails after approval revocation + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 5 ether, 0); + vm.stopPrank(); + + // 6. Test client authorization (operator can't set approvals for client) + vm.startPrank(OPERATOR); + payments.setOperatorApproval( + helper.testToken(), USER2, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Verify operator approval was not set for client + (bool isApproved,,,,,) = payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertFalse(isApproved, "Second operator should not be approved for client"); + } + + function testOneTimePaymentScenarios() public { + // Setup approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create rail with fixed lockup + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + uint256 paymentRate = 10 ether; + uint256 fixedLockup = 100 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, 0, fixedLockup); + vm.stopPrank(); + + uint256 oneTimeAmount = 30 ether; + helper.executeOneTimePayment(railId, OPERATOR, oneTimeAmount); + + // 2. Test complete fixed lockup consumption using one time payment + uint256 remainingFixedLockup = fixedLockup - oneTimeAmount; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, remainingFixedLockup); + vm.stopPrank(); + + // Verify fixed lockup is now zero + Payments.RailView memory rail = payments.getRail(railId); + assertEq(rail.lockupFixed, 0, "Fixed lockup should be zero"); + + // 3. Test excessive payment reverts + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector(Errors.OneTimePaymentExceedsLockup.selector, railId, rail.lockupFixed, 1) + ); + payments.modifyRailPayment(railId, paymentRate, 1); // Lockup is now 0, so any payment should fail + vm.stopPrank(); + } + + function testAllowanceChangesWithOneTimePayments() public { + // Setup approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, 1000 ether, MAX_LOCKUP_PERIOD); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + uint256 paymentRate = 10 ether; + uint256 fixedLockup = 800 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, 0, fixedLockup); + vm.stopPrank(); + + // 1. Test allowance reduction after fixed lockup set + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + RATE_ALLOWANCE, + 500 ether, // below fixed lockup of 800 ether, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should still be able to make one-time payments up to the fixed lockup + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 300 ether); + vm.stopPrank(); + + // Check that one-time payment succeeded despite reduced allowance + Payments.RailView memory rail = payments.getRail(railId); + assertEq(rail.lockupFixed, fixedLockup - 300 ether, "Fixed lockup not reduced correctly"); + + // 2. Test zero allowance after fixed lockup set + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + RATE_ALLOWANCE, + 0, // zero allowance + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should still be able to make one-time payments up to the fixed lockup + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 200 ether); + vm.stopPrank(); + + // Check that one-time payment succeeded despite zero allowance + rail = payments.getRail(railId); + assertEq(rail.lockupFixed, 300 ether, "Fixed lockup not reduced correctly"); + } + + function test_OperatorCanReduceUsageOfExistingRailDespiteInsufficientAllowance() public { + // Client allows operator to use up to 90 rate/30 lockup + helper.setupOperatorApproval(USER1, OPERATOR, 90 ether, 30 ether, MAX_LOCKUP_PERIOD); + + // Operator creates a rail using 50 rate/20 lockup + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 50 ether, 0); + payments.modifyRailLockup(railId, 0, 20 ether); + vm.stopPrank(); + + // Client reduces allowance to below what's already being used + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + 40 ether, // below current usage of 50 ether + 15 ether, // below current usage of 20 ether + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should still be able to reduce usage of rate/lockup on existing rail + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 30 ether, 0); + payments.modifyRailLockup(railId, 0, 10 ether); + vm.stopPrank(); + + // Allowance - usage should be 40 - 30 = 10 for rate, 15 - 10 = 5 for lockup + ( + , + /*bool isApproved*/ + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 rateUsage, + uint256 lockupUsage, + ) = helper.payments().operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(rateAllowance - rateUsage, 10 ether); + assertEq(lockupAllowance - lockupUsage, 5 ether); + + // Even though the operator can reduce usage on existing rails despite insufficient allowance, + // they should not be able to create new rail configurations with non-zero rate/lockup + + // Create a new rail, which should succeed + uint256 railId2 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + uint256 attemptedUsage = rateUsage + 11 ether; + + // But attempting to set non-zero rate on the new rail should fail due to insufficient allowance + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector(Errors.OperatorRateAllowanceExceeded.selector, rateAllowance, attemptedUsage) + ); + payments.modifyRailPayment(railId2, 11 ether, 0); + vm.stopPrank(); + + (,,,, lockupUsage,) = payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + uint256 oldLockupFixed = payments.getRail(railId2).lockupFixed; + uint256 newLockupFixed = 6 ether; + uint256 lockupIncrease = 0; + if (newLockupFixed > oldLockupFixed) { + lockupIncrease = newLockupFixed - oldLockupFixed; + } + attemptedUsage = lockupUsage + lockupIncrease; + + // Similarly, attempting to set non-zero lockup on the new rail should fail + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector(Errors.OperatorLockupAllowanceExceeded.selector, lockupAllowance, attemptedUsage) + ); + payments.modifyRailLockup(railId2, 0, 6 ether); + vm.stopPrank(); + } + + function testAllowanceReductionScenarios() public { + // 1. Test reducing rate allowance below current usage + // Setup approval + helper.setupOperatorApproval( + USER1, + OPERATOR, + 100 ether, // 100 ether rate allowance + 1000 ether, + MAX_LOCKUP_PERIOD + ); + + // Create rail and set rate + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 50 ether, 0); + vm.stopPrank(); + + // Client reduces rate allowance below current usage + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + 30 ether, // below current usage of 50 ether + 1000 ether, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should be able to decrease rate + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 30 ether, 0); // Decrease to allowance + vm.stopPrank(); + + ( + , // isApproved + uint256 rateAllowance, + , + , + , + ) = payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + uint256 attemptedRateUsage = 40 ether; + // Operator should not be able to increase rate above current allowance + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector(Errors.OperatorRateAllowanceExceeded.selector, rateAllowance, attemptedRateUsage) + ); + payments.modifyRailPayment(railId, attemptedRateUsage, 0); // Try to increase above allowance + vm.stopPrank(); + + // 2. Test zeroing rate allowance after usage + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + 0, // zero allowance + 1000 ether, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should be able to decrease rate + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 20 ether, 0); + vm.stopPrank(); + + // Operator should not be able to increase rate at all + vm.startPrank(OPERATOR); + // Payments.OperatorApproval approval = payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + vm.expectRevert(abi.encodeWithSelector(Errors.OperatorRateAllowanceExceeded.selector, 0, 21 ether)); + payments.modifyRailPayment(railId, 21 ether, 0); + vm.stopPrank(); + + // 3. Test reducing lockup allowance below current usage + // Create a new rail for lockup testing + uint256 railId2 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Reset approval with high lockup + helper.setupOperatorApproval(USER1, OPERATOR, 50 ether, 1000 ether, MAX_LOCKUP_PERIOD); + + // Set fixed lockup + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId2, 10 ether, 0); + payments.modifyRailLockup(railId2, 0, 500 ether); + vm.stopPrank(); + + // Client reduces lockup allowance below current usage + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + 50 ether, + 300 ether, // below current usage of 500 ether + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should be able to decrease fixed lockup + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId2, 0, 200 ether); + vm.stopPrank(); + + // Operator should not be able to increase fixed lockup above current allowance + vm.startPrank(OPERATOR); + vm.expectRevert(abi.encodeWithSelector(Errors.OperatorLockupAllowanceExceeded.selector, 300 ether, 400 ether)); + payments.modifyRailLockup(railId2, 0, 400 ether); + vm.stopPrank(); + } + + function testComprehensiveApprovalLifecycle() public { + // This test combines multiple approval lifecycle aspects into one comprehensive test + + // Setup approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create two rails with different parameters + uint256 railId1 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + uint256 railId2 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set parameters for first rail + uint256 rate1 = 10 ether; + uint256 lockupPeriod1 = 5; + uint256 fixedLockup1 = 50 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId1, rate1, 0); + payments.modifyRailLockup(railId1, lockupPeriod1, fixedLockup1); + vm.stopPrank(); + + // Set parameters for second rail + uint256 rate2 = 15 ether; + uint256 lockupPeriod2 = 3; + uint256 fixedLockup2 = 30 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId2, rate2, 0); + payments.modifyRailLockup(railId2, lockupPeriod2, fixedLockup2); + vm.stopPrank(); + + // Calculate expected usage + uint256 expectedRateUsage = rate1 + rate2; + uint256 expectedLockupUsage = fixedLockup1 + (rate1 * lockupPeriod1) + fixedLockup2 + (rate2 * lockupPeriod2); + + // Verify combined usage + helper.verifyOperatorAllowances( + USER1, + OPERATOR, + true, + RATE_ALLOWANCE, + LOCKUP_ALLOWANCE, + expectedRateUsage, + expectedLockupUsage, + MAX_LOCKUP_PERIOD + ); + + // Make one-time payment for first rail + uint256 oneTimeAmount = 20 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId1, rate1, oneTimeAmount); + vm.stopPrank(); + + // Revoke approval + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), OPERATOR, false, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should still be able to modify existing rails + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId1, rate1 - 2 ether, 0); + payments.modifyRailLockup(railId2, lockupPeriod2, fixedLockup2 - 10 ether); + vm.stopPrank(); + + // Testing that operator shouldn't be able to create a new rail using try/catch + helper.expectcreateRailToRevertWithoutOperatorApproval(); + + // Reapprove with reduced allowances + vm.startPrank(USER1); + payments.setOperatorApproval( + helper.testToken(), + OPERATOR, + true, + 20 ether, // Only enough for current rails + 100 ether, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + + // Operator should be able to create a new rail + uint256 railId3 = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // But should not be able to exceed the new allowance + vm.startPrank(OPERATOR); + (, uint256 rateAllowance,, uint256 rateUsage,,) = + payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + uint256 attempted = rateUsage + 10 ether; // Attempt to set rate above allowance + vm.expectRevert(abi.encodeWithSelector(Errors.OperatorRateAllowanceExceeded.selector, rateAllowance, attempted)); + payments.modifyRailPayment(railId3, 10 ether, 0); // Would exceed new rate allowance + vm.stopPrank(); + } + + function testMaxLockupPeriodEnforcement() public { + // Setup initial approval with limited lockup period + uint256 limitedMaxLockupPeriod = 5; // 5 blocks max lockup period + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, limitedMaxLockupPeriod); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set payment rate + uint256 paymentRate = 10 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + vm.stopPrank(); + + // Set lockup period exactly at the limit + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, limitedMaxLockupPeriod, 50 ether); + vm.stopPrank(); + + // Now try to exceed the max lockup period - should revert + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.LockupPeriodExceedsOperatorMaximum.selector, + helper.testToken(), + OPERATOR, + limitedMaxLockupPeriod, + limitedMaxLockupPeriod + 1 + ) + ); + payments.modifyRailLockup(railId, limitedMaxLockupPeriod + 1, 50 ether); + vm.stopPrank(); + } + + // Verify that operators can reduce lockup period even if it's over the max + function testReducingLockupPeriodBelowMax() public { + // Setup initial approval with high max lockup period + uint256 initialMaxLockupPeriod = 20; // 20 blocks initially + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, initialMaxLockupPeriod); + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + // Set payment rate and high lockup period + uint256 paymentRate = 10 ether; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, 15, 50 ether); // 15 blocks period + vm.stopPrank(); + + // Now client reduces max lockup period + vm.startPrank(USER1); + uint256 finalMaxLockupPeriod = 5; // Reduce to 5 blocks + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, finalMaxLockupPeriod); + vm.stopPrank(); + + // Operator should be able to reduce period below the new max + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, 4, 50 ether); // Lower to 4 blocks + vm.stopPrank(); + + // But not increase it above the new max, even though it's lower than what it was + vm.startPrank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.LockupPeriodExceedsOperatorMaximum.selector, + helper.testToken(), + OPERATOR, + finalMaxLockupPeriod, + 6 + ) + ); + payments.modifyRailLockup(railId, 6, 50 ether); // Try to increase to 6 blocks, which is over the new max of 5 + vm.stopPrank(); + } + + // SECTION: Increase Operator Approval Tests + + function testIncreaseOperatorApproval_HappyPath() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Verify initial state + (bool isApproved, uint256 rateAllowance, uint256 lockupAllowance,,, uint256 maxLockupPeriod) = + payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(rateAllowance, RATE_ALLOWANCE); + assertEq(lockupAllowance, LOCKUP_ALLOWANCE); + assertEq(maxLockupPeriod, MAX_LOCKUP_PERIOD); + + // Increase allowances + uint256 rateIncrease = 50 ether; + uint256 lockupIncrease = 500 ether; + + vm.startPrank(USER1); + payments.increaseOperatorApproval(helper.testToken(), OPERATOR, rateIncrease, lockupIncrease); + vm.stopPrank(); + + // Verify increased allowances + (isApproved, rateAllowance, lockupAllowance,,, maxLockupPeriod) = + payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(rateAllowance, RATE_ALLOWANCE + rateIncrease); + assertEq(lockupAllowance, LOCKUP_ALLOWANCE + lockupIncrease); + assertEq(maxLockupPeriod, MAX_LOCKUP_PERIOD); // Should remain unchanged + } + + function testIncreaseOperatorApproval_ZeroIncrease() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Increase by zero (should work but not change anything) + vm.startPrank(USER1); + payments.increaseOperatorApproval(helper.testToken(), OPERATOR, 0, 0); + vm.stopPrank(); + + // Verify allowances remain the same + (bool isApproved, uint256 rateAllowance, uint256 lockupAllowance,,, uint256 maxLockupPeriod) = + payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(isApproved, true); + assertEq(rateAllowance, RATE_ALLOWANCE); + assertEq(lockupAllowance, LOCKUP_ALLOWANCE); + assertEq(maxLockupPeriod, MAX_LOCKUP_PERIOD); + } + + function testIncreaseOperatorApproval_OperatorNotApproved() public { + // Get token address before setting up expectRevert + IERC20 tokenAddress = helper.testToken(); + + // Try to increase approval for non-approved operator + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.OperatorNotApproved.selector, USER1, OPERATOR)); + payments.increaseOperatorApproval(tokenAddress, OPERATOR, 50 ether, 500 ether); + vm.stopPrank(); + } + + function testIncreaseOperatorApproval_ZeroOperatorAddress() public { + // Get token address before setting up expectRevert + IERC20 tokenAddress = helper.testToken(); + + // Try to increase approval for zero address operator + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.ZeroAddressNotAllowed.selector, "operator")); + payments.increaseOperatorApproval(tokenAddress, address(0), 50 ether, 500 ether); + vm.stopPrank(); + } + + function testIncreaseOperatorApproval_AfterRevocation() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Revoke approval + helper.revokeOperatorApprovalAndVerify(USER1, OPERATOR); + + // Get token address before setting up expectRevert + IERC20 tokenAddress = helper.testToken(); + + // Try to increase revoked approval + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.OperatorNotApproved.selector, USER1, OPERATOR)); + payments.increaseOperatorApproval(tokenAddress, OPERATOR, 50 ether, 500 ether); + vm.stopPrank(); + } + + function testIncreaseOperatorApproval_WithExistingUsage() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create rail and use some allowance + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + uint256 paymentRate = 30 ether; + uint256 lockupFixed = 200 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, 0, lockupFixed); + vm.stopPrank(); + + // Verify usage before increase + (, uint256 rateAllowanceBefore, uint256 lockupAllowanceBefore, uint256 rateUsage, uint256 lockupUsage,) = + payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(rateUsage, paymentRate); + assertEq(lockupUsage, lockupFixed); + + // Increase allowances + uint256 rateIncrease = 70 ether; + uint256 lockupIncrease = 800 ether; + + vm.startPrank(USER1); + payments.increaseOperatorApproval(helper.testToken(), OPERATOR, rateIncrease, lockupIncrease); + vm.stopPrank(); + + // Verify allowances increased but usage remains the same + (, uint256 rateAllowanceAfter, uint256 lockupAllowanceAfter, uint256 rateUsageAfter, uint256 lockupUsageAfter,) + = payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(rateAllowanceAfter, rateAllowanceBefore + rateIncrease); + assertEq(lockupAllowanceAfter, lockupAllowanceBefore + lockupIncrease); + assertEq(rateUsageAfter, rateUsage); // Usage should remain unchanged + assertEq(lockupUsageAfter, lockupUsage); // Usage should remain unchanged + } + + function testIncreaseOperatorApproval_MultipleIncreases() public { + // Setup initial approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // First increase + uint256 firstRateIncrease = 25 ether; + uint256 firstLockupIncrease = 250 ether; + + vm.startPrank(USER1); + payments.increaseOperatorApproval(helper.testToken(), OPERATOR, firstRateIncrease, firstLockupIncrease); + vm.stopPrank(); + + // Second increase + uint256 secondRateIncrease = 35 ether; + uint256 secondLockupIncrease = 350 ether; + + vm.startPrank(USER1); + payments.increaseOperatorApproval(helper.testToken(), OPERATOR, secondRateIncrease, secondLockupIncrease); + vm.stopPrank(); + + // Verify cumulative increases + (, uint256 rateAllowance, uint256 lockupAllowance,,,) = + payments.operatorApprovals(helper.testToken(), USER1, OPERATOR); + assertEq(rateAllowance, RATE_ALLOWANCE + firstRateIncrease + secondRateIncrease); + assertEq(lockupAllowance, LOCKUP_ALLOWANCE + firstLockupIncrease + secondLockupIncrease); + } +} diff --git a/packages/pay/test/OperatorApprovalUsageLeak.t.sol b/packages/pay/test/OperatorApprovalUsageLeak.t.sol new file mode 100644 index 00000000..60e3d6b9 --- /dev/null +++ b/packages/pay/test/OperatorApprovalUsageLeak.t.sol @@ -0,0 +1,162 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +pragma solidity ^0.8.27; + +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {console} from "forge-std/console.sol"; + +contract OperatorApprovalUsageLeakTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + Payments payments; + IERC20 testToken; + + uint256 constant DEPOSIT_AMOUNT = 1000 ether; + uint256 constant RATE_ALLOWANCE = 200 ether; + uint256 constant LOCKUP_ALLOWANCE = 2000 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + testToken = helper.testToken(); + + // Deposit funds for client + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + function testOperatorLockupUsageLeakOnRailFinalization() public { + // Setup operator approval + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, MAX_LOCKUP_PERIOD); + + // Create a rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set payment rate and lockup + uint256 paymentRate = 10 ether; + uint256 lockupPeriod = 10; // 10 blocks + uint256 lockupFixed = 100 ether; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, lockupPeriod, lockupFixed); + vm.stopPrank(); + + // Calculate expected lockup usage + uint256 expectedLockupUsage = lockupFixed + (paymentRate * lockupPeriod); + + console.log("Initial lockup usage calculation:"); + console.log(" Fixed lockup:", lockupFixed); + console.log(" Rate-based lockup:", paymentRate * lockupPeriod); + console.log(" Total expected:", expectedLockupUsage); + + // Verify initial lockup usage is correct + helper.verifyOperatorAllowances( + USER1, OPERATOR, true, RATE_ALLOWANCE, LOCKUP_ALLOWANCE, paymentRate, expectedLockupUsage, MAX_LOCKUP_PERIOD + ); + + // Terminate the rail (by client) + vm.startPrank(USER1); + payments.terminateRail(railId); + vm.stopPrank(); + + // Get the account's lockup settled epoch + (,,, uint256 lockupLastSettledAt) = payments.accounts(testToken, USER1); + + // Calculate the rail's end epoch + uint256 endEpoch = lockupLastSettledAt + lockupPeriod; + + console.log("\nAfter termination:"); + console.log(" Current block:", block.number); + console.log(" Lockup last settled at:", lockupLastSettledAt); + console.log(" Rail end epoch:", endEpoch); + + // Move time forward to after the rail's end epoch + vm.roll(endEpoch + 1); + + console.log("\nAfter time advance:"); + console.log(" Current block:", block.number); + + // Settle the rail completely - this will trigger finalizeTerminatedRail + vm.startPrank(USER2); // Payee can settle + (uint256 settledAmount,,,, uint256 finalEpoch,) = payments.settleRail(railId, endEpoch); + vm.stopPrank(); + + console.log("\nAfter settlement:"); + console.log(" Settled amount:", settledAmount); + console.log(" Final epoch:", finalEpoch); + + // Check operator lockup usage after finalization + (,,, uint256 rateUsageAfter, uint256 lockupUsageAfter,) = payments.operatorApprovals(testToken, USER1, OPERATOR); + + console.log("\nFinal operator usage:"); + console.log(" Rate usage:", rateUsageAfter); + console.log(" Lockup usage:", lockupUsageAfter); + + // Assert the correct behavior: lockup usage should be 0 after finalization + assertEq(lockupUsageAfter, 0, "Lockup usage should be 0 after rail finalization"); + assertEq(rateUsageAfter, 0, "Rate usage should be 0 after rail finalization"); + } + + function testMultipleRailsShowCumulativeLeak() public { + // Setup operator approval with higher allowances + helper.setupOperatorApproval(USER1, OPERATOR, RATE_ALLOWANCE * 5, LOCKUP_ALLOWANCE * 5, MAX_LOCKUP_PERIOD); + + uint256 totalLeakedUsage = 0; + + // Create and terminate multiple rails to show cumulative effect + for (uint256 i = 1; i <= 3; i++) { + console.log("\n=== Rail", i, "==="); + + // Create rail + uint256 railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set payment rate and lockup + uint256 paymentRate = 10 ether * i; + uint256 lockupPeriod = 5 * i; + uint256 lockupFixed = 50 ether * i; + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, lockupPeriod, lockupFixed); + vm.stopPrank(); + + // Terminate the rail + vm.startPrank(USER1); + payments.terminateRail(railId); + vm.stopPrank(); + + // Get end epoch + (,,, uint256 lockupLastSettledAt) = payments.accounts(testToken, USER1); + uint256 endEpoch = lockupLastSettledAt + lockupPeriod; + + // Move time forward + vm.roll(endEpoch + 1); + + // Settle to trigger finalization + vm.startPrank(USER2); + payments.settleRail(railId, endEpoch); + vm.stopPrank(); + + // Track leaked usage + uint256 leakedForThisRail = paymentRate * lockupPeriod; + totalLeakedUsage += leakedForThisRail; + + console.log(" Leaked usage from this rail:", leakedForThisRail); + } + + // Check final operator lockup usage + (,,,, uint256 finalLockupUsage,) = payments.operatorApprovals(testToken, USER1, OPERATOR); + + console.log("\n=== FINAL OPERATOR USAGE ==="); + console.log("Final operator lockup usage:", finalLockupUsage); + console.log("Expected (correct) lockup usage: 0"); + + // Assert the correct behavior: all lockup usage should be cleared after all rails are finalized + assertEq(finalLockupUsage, 0, "All lockup usage should be cleared after finalizing all rails"); + } +} diff --git a/packages/pay/test/PayeeFaultArbitrationBug.t.sol b/packages/pay/test/PayeeFaultArbitrationBug.t.sol new file mode 100644 index 00000000..fcc84bdf --- /dev/null +++ b/packages/pay/test/PayeeFaultArbitrationBug.t.sol @@ -0,0 +1,140 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {MockValidator} from "./mocks/MockValidator.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {console} from "forge-std/console.sol"; + +contract PayeeFaultArbitrationBugTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + Payments payments; + MockERC20 token; + MockValidator validator; + + uint256 constant DEPOSIT_AMOUNT = 200 ether; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + token = MockERC20(address(helper.testToken())); + + // Create an validator that will reduce payment when payee fails + validator = new MockValidator(MockValidator.ValidatorMode.REDUCE_AMOUNT); + validator.configure(20); // Only approve 20% of requested payment (simulating payee fault) + + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + function testLockupReturnedWithFaultTermination() public { + uint256 paymentRate = 5 ether; + uint256 lockupPeriod = 12; + uint256 fixedLockup = 10 ether; + + uint256 railId = helper.setupRailWithParameters( + USER1, USER2, OPERATOR, paymentRate, lockupPeriod, fixedLockup, address(validator), SERVICE_FEE_RECIPIENT + ); + + uint256 expectedTotalLockup = fixedLockup + (paymentRate * lockupPeriod); + + console.log("\n=== FIXED LOCKUP TEST ==="); + console.log("Fixed lockup:", fixedLockup); + console.log("Rate-based lockup:", paymentRate * lockupPeriod); + console.log("Expected total lockup:", expectedTotalLockup); + + // SP fails immediately, terminate + vm.prank(OPERATOR); + payments.terminateRail(railId); + + // Verify that railTerminated was called on the validator with correct parameters + assertTrue(validator.railTerminatedCalled(), "railTerminated should have been called"); + assertEq(validator.lastTerminatedRailId(), railId, "Incorrect railId passed to validator"); + assertEq(validator.lastTerminator(), OPERATOR, "Incorrect terminator passed to validator"); + + // Get the rail to verify the endEpoch matches + Payments.RailView memory rail = payments.getRail(railId); + assertEq(validator.lastEndEpoch(), rail.endEpoch, "Incorrect endEpoch passed to validator"); + + helper.advanceBlocks(15); + + vm.prank(USER1); + payments.settleRail(railId, block.number); + + Payments.Account memory payerFinal = helper.getAccountData(USER1); + + console.log("Lockup after:", payerFinal.lockupCurrent); + console.log("Expected lockup:", expectedTotalLockup); + + require(payerFinal.lockupCurrent == 0, "Payee fault bug: Fixed lockup not fully returned"); + } + + function testLockupReturnedWithFault() public { + uint256 paymentRate = 5 ether; + uint256 lockupPeriod = 12; + uint256 fixedLockup = 10 ether; + + uint256 railId = helper.setupRailWithParameters( + USER1, USER2, OPERATOR, paymentRate, lockupPeriod, fixedLockup, address(validator), SERVICE_FEE_RECIPIENT + ); + + uint256 expectedTotalLockup = fixedLockup + (paymentRate * lockupPeriod); + + console.log("\n=== FIXED LOCKUP TEST ==="); + console.log("Fixed lockup:", fixedLockup); + console.log("Rate-based lockup:", paymentRate * lockupPeriod); + console.log("Expected total lockup:", expectedTotalLockup); + + vm.prank(OPERATOR); + helper.advanceBlocks(15); + + vm.prank(USER1); + payments.settleRail(railId, block.number); + + Payments.Account memory payerFinal = helper.getAccountData(USER1); + + console.log("Lockup after:", payerFinal.lockupCurrent); + console.log("Expected lockup:", expectedTotalLockup); + + require(payerFinal.lockupCurrent == expectedTotalLockup, "Payee fault bug: Fixed lockup not fully returned"); + } + + function testLockupReturnedWithFaultReducedDuration() public { + uint256 paymentRate = 5 ether; + uint256 lockupPeriod = 12; + uint256 fixedLockup = 10 ether; + + MockValidator dv = new MockValidator(MockValidator.ValidatorMode.REDUCE_DURATION); + dv.configure(20); // Only approve 20% of requested duration + + uint256 railId = helper.setupRailWithParameters( + USER1, USER2, OPERATOR, paymentRate, lockupPeriod, fixedLockup, address(dv), SERVICE_FEE_RECIPIENT + ); + + // we will try to settle for 15 epochs, but the validator will only approve 20% of the duration i.e. 3 epochs + // this means that funds for the remaining 12 epochs will still be locked up. + uint256 expectedTotalLockup = fixedLockup + (paymentRate * lockupPeriod) + (12 * paymentRate); + + console.log("\n=== FIXED LOCKUP TEST ==="); + console.log("Fixed lockup:", fixedLockup); + console.log("Rate-based lockup:", paymentRate * lockupPeriod); + console.log("Expected total lockup:", expectedTotalLockup); + + vm.prank(OPERATOR); + helper.advanceBlocks(15); + + vm.prank(USER1); + payments.settleRail(railId, block.number); + + Payments.Account memory payerFinal = helper.getAccountData(USER1); + + console.log("Lockup after:", payerFinal.lockupCurrent); + console.log("Expected lockup:", expectedTotalLockup); + + require(payerFinal.lockupCurrent == expectedTotalLockup, "Payee fault bug: Fixed lockup not fully returned"); + } +} diff --git a/packages/pay/test/PaymentsAccessControl.t.sol b/packages/pay/test/PaymentsAccessControl.t.sol new file mode 100644 index 00000000..de06e30a --- /dev/null +++ b/packages/pay/test/PaymentsAccessControl.t.sol @@ -0,0 +1,170 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {Errors} from "../src/Errors.sol"; + +contract AccessControlTest is Test, BaseTestHelper { + Payments payments; + PaymentsTestHelpers helper; + + uint256 constant DEPOSIT_AMOUNT = 100 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + uint256 railId; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + + // Setup operator approval + helper.setupOperatorApproval( + USER1, + OPERATOR, + 10 ether, // rateAllowance + 100 ether, // lockupAllowance + MAX_LOCKUP_PERIOD // maxLockupPeriod + ); + + // Deposit funds for client + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // Create a rail for testing + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set up rail parameters + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 1 ether, 0); // 1 ether per block + payments.modifyRailLockup(railId, 10, 10 ether); // 10 block lockup period, 10 ether fixed + vm.stopPrank(); + } + + function testTerminateRail_SucceedsWhenCalledByClient() public { + vm.startPrank(USER1); + payments.terminateRail(railId); + vm.stopPrank(); + } + + function testTerminateRail_SucceedsWhenCalledByOperator() public { + vm.startPrank(OPERATOR); + payments.terminateRail(railId); + vm.stopPrank(); + } + + function testTerminateRail_RevertsWhenCalledByRecipient() public { + vm.startPrank(USER2); + vm.expectRevert( + abi.encodeWithSelector(Errors.NotAuthorizedToTerminateRail.selector, railId, USER1, OPERATOR, USER2) + ); + payments.terminateRail(railId); + vm.stopPrank(); + } + + function testTerminateRail_RevertsWhenCalledByUnauthorized() public { + vm.startPrank(address(0x99)); + vm.expectRevert( + abi.encodeWithSelector(Errors.NotAuthorizedToTerminateRail.selector, railId, USER1, OPERATOR, address(0x99)) + ); + payments.terminateRail(railId); + vm.stopPrank(); + } + + function testModifyRailLockup_SucceedsWhenCalledByOperator() public { + vm.startPrank(OPERATOR); + payments.modifyRailLockup(railId, 20, 20 ether); + vm.stopPrank(); + } + + function testModifyRailLockup_RevertsWhenCalledByClient() public { + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, USER1)); + payments.modifyRailLockup(railId, 20, 20 ether); + vm.stopPrank(); + } + + function testModifyRailLockup_RevertsWhenCalledByRecipient() public { + vm.startPrank(USER2); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, USER2)); + payments.modifyRailLockup(railId, 20, 20 ether); + vm.stopPrank(); + } + + function testModifyRailLockup_RevertsWhenCalledByUnauthorized() public { + vm.startPrank(address(0x99)); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, address(0x99))); + payments.modifyRailLockup(railId, 20, 20 ether); + vm.stopPrank(); + } + + function testModifyRailPayment_SucceedsWhenCalledByOperator() public { + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 2 ether, 0); + vm.stopPrank(); + } + + function testModifyRailPayment_RevertsWhenCalledByClient() public { + vm.startPrank(USER1); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, USER1)); + payments.modifyRailPayment(railId, 2 ether, 0); + vm.stopPrank(); + } + + function testModifyRailPayment_RevertsWhenCalledByRecipient() public { + vm.startPrank(USER2); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, USER2)); + payments.modifyRailPayment(railId, 2 ether, 0); + vm.stopPrank(); + } + + function testModifyRailPayment_RevertsWhenCalledByUnauthorized() public { + vm.startPrank(address(0x99)); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailOperatorAllowed.selector, OPERATOR, address(0x99))); + payments.modifyRailPayment(railId, 2 ether, 0); + vm.stopPrank(); + } + + function testSettleTerminatedRailWithoutValidation_RevertsWhenCalledByOperator() public { + // 2. Add more funds + helper.makeDeposit( + USER1, + USER1, + 100 ether // Plenty of funds + ); + + // Terminate the rail + vm.startPrank(USER1); + payments.terminateRail(railId); + vm.stopPrank(); + + // Attempt to settle from operator account + vm.startPrank(OPERATOR); + vm.expectRevert(abi.encodeWithSelector(Errors.OnlyRailClientAllowed.selector, USER1, OPERATOR)); + payments.settleTerminatedRailWithoutValidation(railId); + vm.stopPrank(); + } + + function testTerminateRail_OnlyOperatorCanTerminateWhenLockupNotFullySettled() public { + // Advance blocks to create an unsettled state + helper.advanceBlocks(500); + + // Client should not be able to terminate because lockup is not fully settled + vm.startPrank(USER1); + vm.expectRevert( + abi.encodeWithSelector(Errors.NotAuthorizedToTerminateRail.selector, railId, USER1, OPERATOR, USER1) + ); + payments.terminateRail(railId); + vm.stopPrank(); + + // Operator should be able to terminate even when lockup is not fully settled + vm.startPrank(OPERATOR); + payments.terminateRail(railId); + vm.stopPrank(); + + // Verify the rail was terminated by checking its end epoch is set + Payments.RailView memory railView = payments.getRail(railId); + assertTrue(railView.endEpoch > 0, "Rail was not terminated properly"); + } +} diff --git a/packages/pay/test/PaymentsEvents.t.sol b/packages/pay/test/PaymentsEvents.t.sol new file mode 100644 index 00000000..c2edd9d1 --- /dev/null +++ b/packages/pay/test/PaymentsEvents.t.sol @@ -0,0 +1,368 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; + +/** + * @title PaymentsEventsTest + * @dev Test contract for verifying all events emitted by the Payments contract + */ +contract PaymentsEventsTest is Test, BaseTestHelper { + Payments public payments; + PaymentsTestHelpers public helper; + MockERC20 public testToken; + + uint256 constant DEPOSIT_AMOUNT = 100 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + uint256 railId; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + testToken = helper.testToken(); + + // Setup operator approval + helper.setupOperatorApproval( + USER1, + OPERATOR, + 10 ether, // rateAllowance + 100 ether, // lockupAllowance + MAX_LOCKUP_PERIOD // maxLockupPeriod + ); + + // Deposit funds for client + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + /** + * @dev Test for AccountLockupSettled event + */ + function testAccountLockupSettledEvent() public { + // Create a rail to trigger account lockup changes + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set up rail parameters which will trigger account settlement + vm.startPrank(OPERATOR); + + payments.modifyRailLockup(railId, 5, 0 ether); + + // This will trigger account lockup settlement + // account.lockupCurrent = rate * period = 25 ether + payments.modifyRailPayment(railId, 5 ether, 0); // 1 ether per block + + vm.stopPrank(); + + helper.advanceBlocks(5); + + vm.startPrank(OPERATOR); + + // Expect the event to be emitted + // lockupCurrent = 25 ether ( from modifyRailPayment ) + 5 * 5 ether ( elapsedTime * lockupRate) + vm.expectEmit(true, true, true, true); + emit Payments.AccountLockupSettled(testToken, USER1, 50 ether, 5 ether, block.number); + emit Payments.RailLockupModified(railId, 5, 10, 0, 0); + + payments.modifyRailLockup(railId, 10, 0 ether); + + vm.stopPrank(); + } + + /** + * @dev Test for OperatorApprovalSet event + */ + function testOperatorApprovalUpdatedEvent() public { + vm.startPrank(USER1); + + // Expect the event to be emitted + vm.expectEmit(true, true, true, true); + emit Payments.OperatorApprovalUpdated(testToken, USER1, OPERATOR2, true, 5 ether, 50 ether, MAX_LOCKUP_PERIOD); + + // Set operator approval + payments.setOperatorApproval( + testToken, + OPERATOR2, + true, + 5 ether, // rateAllowance + 50 ether, // lockupAllowance + MAX_LOCKUP_PERIOD // maxLockupPeriod + ); + + vm.stopPrank(); + } + + /** + * @dev Test for RailCreated event + */ + function testRailCreatedEvent() public { + vm.startPrank(OPERATOR); + + // Expect the event to be emitted + vm.expectEmit(true, true, true, true); + emit Payments.RailCreated( + 1, // railId (assuming this is the first rail) + USER1, // payer + USER2, // payee + testToken, // token + OPERATOR, // operator + address(0), // validator + SERVICE_FEE_RECIPIENT, // serviceFeeRecipient + 0 // commissionRateBps + ); + + // Create rail + payments.createRail( + testToken, + USER1, + USER2, + address(0), // validator + 0, // commissionRateBps + SERVICE_FEE_RECIPIENT // serviceFeeRecipient + ); + + vm.stopPrank(); + } + + /** + * @dev Test for RailLockupModified event + */ + function testRailLockupModifiedEvent() public { + // Create a rail first + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + + // Expect the event to be emitted + vm.expectEmit(true, false, false, true); + emit Payments.RailLockupModified(railId, 0, 10, 0, 10 ether); + + // Modify rail lockup + payments.modifyRailLockup(railId, 10, 10 ether); + + vm.stopPrank(); + } + + /** + * @dev Test for RailOneTimePayment event + */ + function testRailOneTimePaymentEvent() public { + // Create a rail first + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + // Set up rail parameters + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 1 ether, 0); + payments.modifyRailLockup(railId, 10, 10 ether); + + // calcualate expected values + Payments.RailView memory rail = payments.getRail(railId); + uint256 oneTimeAmount = 5 ether; + uint256 expectedNetworkFee = + oneTimeAmount * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR(); + uint256 expectedOperatorCommission = + ((oneTimeAmount - expectedNetworkFee) * rail.commissionRateBps) / payments.COMMISSION_MAX_BPS(); + uint256 expectedNetPayeeAmount = oneTimeAmount - expectedOperatorCommission - expectedNetworkFee; + + // expect the event to be emitted + vm.expectEmit(true, false, false, true); + emit Payments.RailOneTimePaymentProcessed( + railId, expectedNetPayeeAmount, expectedOperatorCommission, expectedNetworkFee + ); + + // Execute one-time payment by calling modifyRailPayment with the current rate and a one-time payment amount + + payments.modifyRailPayment(railId, 1 ether, oneTimeAmount); + + vm.stopPrank(); + } + + /** + * @dev Test for RailPaymentRateModified event + */ + function testRailPaymentRateModifiedEvent() public { + // Create a rail first + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + + // Expect the event to be emitted + vm.expectEmit(true, false, false, true); + emit Payments.RailRateModified(railId, 0, 1 ether); + + // Modify rail payment rate + payments.modifyRailPayment(railId, 1 ether, 0); + + vm.stopPrank(); + } + + /** + * @dev Test for RailSettled event + */ + function testRailSettledEvent() public { + // Create and set up a rail + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 1 ether, 0); + payments.modifyRailLockup(railId, 10, 10 ether); + vm.stopPrank(); + + // Advance blocks to accumulate payment + helper.advanceBlocks(5); + + vm.startPrank(USER1); + + // expected values + Payments.RailView memory rail = payments.getRail(railId); + uint256 totalSettledAmount = 5 * rail.paymentRate; + uint256 totalNetworkFee = + 5 * rail.paymentRate * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR(); + uint256 totalOperatorCommission = + ((totalSettledAmount - totalNetworkFee) * rail.commissionRateBps) / payments.COMMISSION_MAX_BPS(); + uint256 totalNetPayeeAmount = totalSettledAmount - totalNetworkFee - totalOperatorCommission; + + // Expect the event to be emitted + vm.expectEmit(true, true, false, true); + emit Payments.RailSettled( + railId, totalSettledAmount, totalNetPayeeAmount, totalOperatorCommission, totalNetworkFee, block.number + ); + + // Settle rail + payments.settleRail(railId, block.number); + + vm.stopPrank(); + } + + /** + * @dev Test for RailTerminated event + */ + function testRailTerminatedEvent() public { + // Create and set up a rail + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 1 ether, 0); + payments.modifyRailLockup(railId, 10, 10 ether); + vm.stopPrank(); + + vm.startPrank(USER1); + + // expected end epoch + Payments.RailView memory rail = payments.getRail(railId); + uint256 expectedEndEpoch = block.number + rail.lockupPeriod; + // Expect the event to be emitted + vm.expectEmit(true, true, false, true); + emit Payments.RailTerminated(railId, USER1, expectedEndEpoch); + + // Terminate rail + payments.terminateRail(railId); + + vm.stopPrank(); + } + + /** + * @dev Test for RailFinalized event + */ + function testRailFinalizedEvent() public { + // Create and set up a rail + railId = helper.createRail(USER1, USER2, OPERATOR, address(0), SERVICE_FEE_RECIPIENT); + + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, 1 ether, 0); + payments.modifyRailLockup(railId, 10, 10 ether); + vm.stopPrank(); + + // Terminate the rail + vm.startPrank(USER1); + payments.terminateRail(railId); + vm.stopPrank(); + + // Get the rail to check its end epoch + Payments.RailView memory rail = payments.getRail(railId); + + // Advance blocks past the end epoch + helper.advanceBlocks(rail.lockupPeriod + 1); + + vm.startPrank(USER1); + + // Expect the event to be emitted + vm.expectEmit(true, false, false, true); + emit Payments.RailFinalized(railId); + + // Settle terminated rail to trigger finalization + payments.settleTerminatedRailWithoutValidation(railId); + + vm.stopPrank(); + } + + /** + * @dev Test for DepositRecorded event + */ + function testDepositRecordedEvent() public { + vm.startPrank(USER1); + + // Make sure we have approval + testToken.approve(address(payments), 10 ether); + + // Expect the event to be emitted + // Only check the first three indexed parameters + vm.expectEmit(true, true, true, true); + emit Payments.AccountLockupSettled(testToken, USER2, 0, 0, block.number); + emit Payments.DepositRecorded(testToken, USER1, USER2, 10 ether); // Amount not checked + + // Deposit tokens + payments.deposit(testToken, USER2, 10 ether); + + vm.stopPrank(); + + // Test event in DepositWithPermit + // Use a private key for signing + uint256 privateKey = 1; + address signer = vm.addr(privateKey); + + // Mint tokens to the signer + MockERC20(testToken).mint(signer, 50 ether); + + uint256 depositAmount = 10 ether; + uint256 deadline = block.timestamp + 1 hours; + + // Get signature components + (uint8 v, bytes32 r, bytes32 s) = + helper.getPermitSignature(privateKey, signer, address(payments), depositAmount, deadline); + + vm.startPrank(signer); + + // Expect the event to be emitted + vm.expectEmit(true, true, false, true); + emit Payments.AccountLockupSettled(testToken, signer, 0, 0, block.number); + emit Payments.DepositRecorded(testToken, signer, signer, depositAmount); + + // Deposit with permit + payments.depositWithPermit(testToken, signer, depositAmount, deadline, v, r, s); + + vm.stopPrank(); + } + + /** + * @dev Test for WithdrawRecorded event + */ + function testWithdrawRecordedEvent() public { + // First make a deposit to USER2 + helper.makeDeposit(USER1, USER2, 10 ether); + + vm.startPrank(USER2); + + // Expect the event to be emitted + vm.expectEmit(true, true, true, true); + emit Payments.WithdrawRecorded(testToken, USER2, USER2, 5 ether); + + // Withdraw tokens + payments.withdraw(testToken, 5 ether); + + vm.stopPrank(); + } +} diff --git a/packages/pay/test/RailGetters.t.sol b/packages/pay/test/RailGetters.t.sol new file mode 100644 index 00000000..f9366187 --- /dev/null +++ b/packages/pay/test/RailGetters.t.sol @@ -0,0 +1,378 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +pragma solidity ^0.8.27; + +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {RailSettlementHelpers} from "./helpers/RailSettlementHelpers.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; + +contract PayeeRailsTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + RailSettlementHelpers settlementHelper; + Payments payments; + MockERC20 token; + + // Secondary token for multi-token testing + MockERC20 token2; + + uint256 constant INITIAL_BALANCE = 5000 ether; + uint256 constant DEPOSIT_AMOUNT = 200 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + + // Rail IDs for tests + uint256 rail1Id; + uint256 rail2Id; + uint256 rail3Id; + uint256 rail4Id; // Different token + uint256 rail5Id; // Different payee + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + token = MockERC20(address(helper.testToken())); + + // Create settlement helper + settlementHelper = new RailSettlementHelpers(); + settlementHelper.initialize(payments, helper); + + // Create a second token for multi-token tests + token2 = new MockERC20("Token 2", "TK2"); + token2.mint(USER1, INITIAL_BALANCE); + + // Make deposits to test accounts + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + + // For token2 + vm.startPrank(USER1); + token2.approve(address(payments), type(uint256).max); + payments.deposit(token2, USER1, DEPOSIT_AMOUNT); + vm.stopPrank(); + + // Setup operator approvals + helper.setupOperatorApproval( + USER1, // from + OPERATOR, // operator + 15 ether, // rate allowance (sum of all rates: 5+3+2+1 = 11 ether) + 200 ether, // lockup allowance, + MAX_LOCKUP_PERIOD // maximum lockup period + ); + + // Setup approval for token2 + vm.startPrank(USER1); + payments.setOperatorApproval( + token2, + OPERATOR, + true, // approved + 10 ether, // rate allowance + 100 ether, // lockup allowance + MAX_LOCKUP_PERIOD // maximum lockup period + ); + vm.stopPrank(); + + // Create different rails for testing + createTestRails(); + } + + function createTestRails() internal { + // Rail 1: Standard rail with token1 and USER2 as payee + rail1Id = helper.setupRailWithParameters( + USER1, // from + USER2, // to (payee) + OPERATOR, // operator + 5 ether, // rate + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Rail 2: Another rail with token1 and USER2 as payee + rail2Id = helper.setupRailWithParameters( + USER1, // from + USER2, // to (payee) + OPERATOR, // operator + 3 ether, // rate + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Rail 3: Will be terminated + rail3Id = helper.setupRailWithParameters( + USER1, // from + USER2, // to (payee) + OPERATOR, // operator + 2 ether, // rate + 5, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Rail 4: With token2 and USER2 as payee + vm.startPrank(OPERATOR); + rail4Id = payments.createRail( + token2, + USER1, // from + USER2, // to (payee) + address(0), // no validator + 0, // no commission + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + payments.modifyRailPayment(rail4Id, 4 ether, 0); + payments.modifyRailLockup(rail4Id, 10, 0); + vm.stopPrank(); + + // Rail 5: With token1 but USER3 as payee + rail5Id = helper.setupRailWithParameters( + USER1, // from + USER3, // to (payee) + OPERATOR, // operator + 1 ether, // rate + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Terminate Rail 3 + vm.prank(OPERATOR); + payments.terminateRail(rail3Id); + } + + function testGetRailsForPayeeAndToken() public view { + // Test getting all rails for USER2 and token1 (should include terminated) + (Payments.RailInfo[] memory rails,,) = payments.getRailsForPayeeAndToken(USER2, token, 0, 3); + + // Should include 3 rails: rail1Id, rail2Id, and rail3Id (terminated) + assertEq(rails.length, 3, "Should have 3 rails for USER2 with token1"); + + // Verify the rail IDs and their termination status + bool foundRail1 = false; + bool foundRail2 = false; + bool foundRail3 = false; + + for (uint256 i = 0; i < rails.length; i++) { + if (rails[i].railId == rail1Id) { + foundRail1 = true; + assertFalse(rails[i].isTerminated, "Rail 1 should not be terminated"); + assertEq(rails[i].endEpoch, 0, "Rail 1 should have 0 endEpoch"); + } else if (rails[i].railId == rail2Id) { + foundRail2 = true; + assertFalse(rails[i].isTerminated, "Rail 2 should not be terminated"); + assertEq(rails[i].endEpoch, 0, "Rail 2 should have 0 endEpoch"); + } else if (rails[i].railId == rail3Id) { + foundRail3 = true; + assertTrue(rails[i].isTerminated, "Rail 3 should be terminated"); + assertTrue(rails[i].endEpoch > 0, "Rail 3 should have non-zero endEpoch"); + } + } + + assertTrue(foundRail1, "Rail 1 not found"); + assertTrue(foundRail2, "Rail 2 not found"); + assertTrue(foundRail3, "Rail 3 not found"); + + // Test different token (should only return rails for that token) + (Payments.RailInfo[] memory token2Rail,,) = payments.getRailsForPayeeAndToken(USER2, token2, 0, 0); + + // Should include only 1 rail with token2: rail4Id + assertEq(token2Rail.length, 1, "Should have 1 rail for USER2 with token2"); + assertEq(token2Rail[0].railId, rail4Id, "Rail ID should match rail4Id"); + + // Test different payee (should only return rails for that payee) + (Payments.RailInfo[] memory user3Rails,,) = payments.getRailsForPayeeAndToken(USER3, token, 0, 0); + + // Should include only 1 rail for USER3: rail5Id + assertEq(user3Rails.length, 1, "Should have 1 rail for USER3 with token1"); + assertEq(user3Rails[0].railId, rail5Id, "Rail ID should match rail5Id"); + } + + function testGetRailsForPayerAndToken() public view { + // Test getting all rails for USER1 (payer) and token1 (should include terminated) + (Payments.RailInfo[] memory rails,,) = payments.getRailsForPayerAndToken(USER1, token, 0, 4); + + // Should include 4 rails: rail1Id, rail2Id, rail3Id (terminated), and rail5Id + assertEq(rails.length, 4, "Should have 4 rails for USER1 with token1"); + + // Verify the rail IDs and their termination status + bool foundRail1 = false; + bool foundRail2 = false; + bool foundRail3 = false; + bool foundRail5 = false; + + for (uint256 i = 0; i < rails.length; i++) { + if (rails[i].railId == rail1Id) { + foundRail1 = true; + assertFalse(rails[i].isTerminated, "Rail 1 should not be terminated"); + assertEq(rails[i].endEpoch, 0, "Rail 1 should have 0 endEpoch"); + } else if (rails[i].railId == rail2Id) { + foundRail2 = true; + assertFalse(rails[i].isTerminated, "Rail 2 should not be terminated"); + assertEq(rails[i].endEpoch, 0, "Rail 2 should have 0 endEpoch"); + } else if (rails[i].railId == rail3Id) { + foundRail3 = true; + assertTrue(rails[i].isTerminated, "Rail 3 should be terminated"); + assertTrue(rails[i].endEpoch > 0, "Rail 3 should have non-zero endEpoch"); + } else if (rails[i].railId == rail5Id) { + foundRail5 = true; + assertFalse(rails[i].isTerminated, "Rail 5 should not be terminated"); + assertEq(rails[i].endEpoch, 0, "Rail 5 should have 0 endEpoch"); + } + } + + assertTrue(foundRail1, "Rail 1 not found"); + assertTrue(foundRail2, "Rail 2 not found"); + assertTrue(foundRail3, "Rail 3 not found"); + assertTrue(foundRail5, "Rail 5 not found"); + + // Test different token (should only return rails for that token) + (Payments.RailInfo[] memory token2Rails,,) = payments.getRailsForPayerAndToken(USER1, token2, 0, 0); + + // Should include only 1 rail with token2: rail4Id + assertEq(token2Rails.length, 1, "Should have 1 rail for USER1 with token2"); + assertEq(token2Rails[0].railId, rail4Id, "Rail ID should match rail4Id"); + } + + function testRailsBeyondEndEpoch() public { + // Get the initial rails when Rail 3 is terminated but not beyond its end epoch + (Payments.RailInfo[] memory initialPayeeRails,,) = payments.getRailsForPayeeAndToken(USER2, token, 0, 3); + (Payments.RailInfo[] memory initialPayerRails,,) = payments.getRailsForPayerAndToken(USER1, token, 0, 4); + + // Should include all 3 rails for payee + assertEq(initialPayeeRails.length, 3, "Should have 3 rails initially for payee"); + // Should include all 4 rails for payer + assertEq(initialPayerRails.length, 4, "Should have 4 rails initially for payer"); + + // Get the endEpoch for Rail 3 + uint256 endEpoch; + for (uint256 i = 0; i < initialPayeeRails.length; i++) { + if (initialPayeeRails[i].railId == rail3Id) { + endEpoch = initialPayeeRails[i].endEpoch; + break; + } + } + + // Advance blocks beyond the end epoch of Rail 3 + uint256 blocksToAdvance = endEpoch - block.number + 1; + helper.advanceBlocks(blocksToAdvance); + + // IMPORTANT: Settle the rail now that we're beyond its end epoch + // This will finalize the rail (set rail.from = address(0)) + vm.prank(USER1); // Settle as the client + payments.settleRail(rail3Id, endEpoch); + + // Get rails again for both payee and payer + (Payments.RailInfo[] memory finalPayeeRails,,) = payments.getRailsForPayeeAndToken(USER2, token, 0, 3); + (Payments.RailInfo[] memory finalPayerRails,,) = payments.getRailsForPayerAndToken(USER1, token, 0, 4); + + // Should include only 2 rails now for payee, as Rail 3 is beyond its end epoch + assertEq(finalPayeeRails.length, 2, "Should have 2 rails for payee after advancing beyond end epoch"); + + // Should include only 3 rails now for payer, as Rail 3 is beyond its end epoch + assertEq(finalPayerRails.length, 3, "Should have 3 rails for payer after advancing beyond end epoch"); + + // Verify Rail 3 is no longer included in payee rails + bool railFoundInPayeeRails = false; + for (uint256 i = 0; i < finalPayeeRails.length; i++) { + if (finalPayeeRails[i].railId == rail3Id) { + railFoundInPayeeRails = true; + break; + } + } + + // Verify Rail 3 is no longer included in payer rails + bool railFoundInPayerRails = false; + for (uint256 i = 0; i < finalPayerRails.length; i++) { + if (finalPayerRails[i].railId == rail3Id) { + railFoundInPayerRails = true; + break; + } + } + + assertFalse(railFoundInPayeeRails, "Rail 3 should not be included in payee rails after its end epoch"); + + assertFalse(railFoundInPayerRails, "Rail 3 should not be included in payer rails after its end epoch"); + } + + function testEmptyResult() public view { + // Test non-existent payee + (Payments.RailInfo[] memory nonExistentPayee,,) = payments.getRailsForPayeeAndToken(address(0x123), token, 0, 0); + assertEq(nonExistentPayee.length, 0, "Should return empty array for non-existent payee"); + + // Test non-existent payer + (Payments.RailInfo[] memory nonExistentPayer,,) = payments.getRailsForPayerAndToken(address(0x123), token, 0, 0); + assertEq(nonExistentPayer.length, 0, "Should return empty array for non-existent payer"); + + // Test non-existent token for payee + (Payments.RailInfo[] memory nonExistentTokenForPayee,,) = + payments.getRailsForPayeeAndToken(USER2, IERC20(address(0x456)), 0, 0); + assertEq(nonExistentTokenForPayee.length, 0, "Should return empty array for non-existent token with payee"); + + // Test non-existent token for payer + (Payments.RailInfo[] memory nonExistentTokenForPayer,,) = + payments.getRailsForPayerAndToken(USER1, IERC20(address(0x456)), 0, 0); + assertEq(nonExistentTokenForPayer.length, 0, "Should return empty array for non-existent token with payer"); + } + + function testPagination() public view { + // Test pagination for payee rails (USER2 has 3 rails with token1) + + // Test getting first 2 rails + (Payments.RailInfo[] memory page1, uint256 nextOffset1, uint256 total1) = + payments.getRailsForPayeeAndToken(USER2, token, 0, 2); + + assertEq(page1.length, 2, "First page should have 2 rails"); + assertEq(nextOffset1, 2, "Next offset should be 2"); + assertEq(total1, 3, "Total should be 3"); + + // Test getting remaining rail + (Payments.RailInfo[] memory page2, uint256 nextOffset2, uint256 total2) = + payments.getRailsForPayeeAndToken(USER2, token, nextOffset1, 2); + + assertEq(page2.length, 1, "Second page should have 1 rail"); + assertEq(nextOffset2, 3, "Next offset should be 3 (end of array)"); + assertEq(total2, 3, "Total should still be 3"); + + // Verify no duplicate rails between pages + bool duplicateFound = false; + for (uint256 i = 0; i < page1.length; i++) { + for (uint256 j = 0; j < page2.length; j++) { + if (page1[i].railId == page2[j].railId) { + duplicateFound = true; + break; + } + } + } + assertFalse(duplicateFound, "No duplicate rails should exist between pages"); + + // Test offset beyond array length + (Payments.RailInfo[] memory emptyPage, uint256 nextOffset3, uint256 total3) = + payments.getRailsForPayeeAndToken(USER2, token, 10, 2); + + assertEq(emptyPage.length, 0, "Should return empty array for offset beyond length"); + assertEq(nextOffset3, 3, "Next offset should equal total length"); + assertEq(total3, 3, "Total should still be 3"); + + // Test pagination for payer rails (USER1 has 4 rails with token1) + (Payments.RailInfo[] memory payerPage1, uint256 payerNext1, uint256 payerTotal1) = + payments.getRailsForPayerAndToken(USER1, token, 0, 3); + + assertEq(payerPage1.length, 3, "Payer first page should have 3 rails"); + assertEq(payerNext1, 3, "Payer next offset should be 3"); + assertEq(payerTotal1, 4, "Payer total should be 4"); + + (Payments.RailInfo[] memory payerPage2, uint256 payerNext2, uint256 payerTotal2) = + payments.getRailsForPayerAndToken(USER1, token, payerNext1, 3); + + assertEq(payerPage2.length, 1, "Payer second page should have 1 rail"); + assertEq(payerNext2, 4, "Payer next offset should be 4 (end of array)"); + assertEq(payerTotal2, 4, "Payer total should still be 4"); + } +} diff --git a/packages/pay/test/RailSettlement.t.sol b/packages/pay/test/RailSettlement.t.sol new file mode 100644 index 00000000..b6f95d5b --- /dev/null +++ b/packages/pay/test/RailSettlement.t.sol @@ -0,0 +1,962 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../src/Payments.sol"; +import {MockERC20} from "./mocks/MockERC20.sol"; +import {MockValidator} from "./mocks/MockValidator.sol"; +import {PaymentsTestHelpers} from "./helpers/PaymentsTestHelpers.sol"; +import {RailSettlementHelpers} from "./helpers/RailSettlementHelpers.sol"; +import {console} from "forge-std/console.sol"; +import {BaseTestHelper} from "./helpers/BaseTestHelper.sol"; +import {Errors} from "../src/Errors.sol"; + +contract RailSettlementTest is Test, BaseTestHelper { + PaymentsTestHelpers helper; + RailSettlementHelpers settlementHelper; + Payments payments; + MockERC20 token; + + uint256 constant DEPOSIT_AMOUNT = 200 ether; + uint256 constant MAX_LOCKUP_PERIOD = 100; + + function setUp() public { + helper = new PaymentsTestHelpers(); + helper.setupStandardTestEnvironment(); + payments = helper.payments(); + token = MockERC20(address(helper.testToken())); + + // Create settlement helper with the helper that has the initialized payment contract + settlementHelper = new RailSettlementHelpers(); + // Initialize the settlement helper with our Payments instance + settlementHelper.initialize(payments, helper); + + // Make deposits to test accounts for testing + helper.makeDeposit(USER1, USER1, DEPOSIT_AMOUNT); + } + + //-------------------------------- + // 1. Basic Settlement Flow Tests + //-------------------------------- + + function testBasicSettlement() public { + // Create a rail with a simple rate + uint256 rate = 5 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance a few blocks + helper.advanceBlocks(5); + + // Settle for the elapsed blocks + uint256 expectedAmount = rate * 5; // 5 blocks * 5 ether + console.log("block.number", block.number); + + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + } + + function testSettleRailInDebt() public { + uint256 rate = 50 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 3, // lockupPeriod - total locked: 150 ether (3 * 50) + 0, // No fixed lockup + address(0), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance 7 blocks + helper.advanceBlocks(7); + + // With 200 ether deposit and 150 ether locked, we can only pay for 1 epoch (50 ether) + uint256 expectedAmount = 50 ether; + uint256 expectedEpoch = 2; // Initial epoch (1) + 1 epoch + + // First settlement + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, expectedEpoch); + + // Settle again - should be a no-op since we're already settled to the expected epoch + settlementHelper.settleRailAndVerify(railId, block.number, 0, expectedEpoch); + + // Add more funds and settle again + uint256 additionalDeposit = 300 ether; + helper.makeDeposit(USER1, USER1, additionalDeposit); + + // Should be able to settle the remaining 6 epochs + uint256 expectedAmount2 = rate * 6; // 6 more epochs * 50 ether + + // Third settlement + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount2, block.number); + } + + function testSettleRailWithRateChange() public { + // Set up a rail + uint256 rate = 5 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // Standard validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + uint256 newRate1 = 6 ether; + uint256 newRate2 = 7 ether; + + // Set the rate to 6 ether after 7 blocks + helper.advanceBlocks(7); + + // Increase operator allowances to allow rate modification + // We increase rate allowance = 5 + 6 + 7 ether and add buffer for lockup + uint256 rateAllowance = rate + newRate1 + newRate2; + uint256 lockupAllowance = (rate + newRate1 + newRate2) * 10; + helper.setupOperatorApproval(USER1, OPERATOR, rateAllowance, lockupAllowance, MAX_LOCKUP_PERIOD); + + // Operator increases the payment rate from 5 ETH to 6 ETH per block for epochs (9-14) + // This creates a rate change queue + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, newRate1, 0); + vm.stopPrank(); + + // Advance 6 blocks + helper.advanceBlocks(6); + + // Operator increases the payment rate from 6 ETH to 7 ETH per block for epochs (15-21) + // This creates a rate change queue + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, newRate2, 0); + vm.stopPrank(); + + // Advance 6 blocks + helper.advanceBlocks(7); + + // expectedAmount = 5 * 7 + 6 * 6 + 7 * 7 = 120 ether + uint256 expectedAmount = rate * 7 + newRate1 * 6 + newRate2 * 7; + + // settle and verify + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + } + + //-------------------------------- + // 2. Validation Scenarios + //-------------------------------- + + function testValidationWithStandardApproval() public { + // Deploy a standard validator that approves everything + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.STANDARD); + + // Create a rail with the validator + uint256 rate = 5 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(validator), // Standard validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance several blocks + helper.advanceBlocks(5); + + // Verify standard validator approves full amount + uint256 expectedAmount = rate * 5; // 5 blocks * 5 ether + + // Settle with validation + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + + // Verify validaton note + assertEq(result.note, "Standard approved payment", "Validator note should match"); + } + + function testValidationWithMultipleRateChanges() public { + // Deploy a standard validator that approves everything + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.STANDARD); + + // Setup operator approval first + helper.setupOperatorApproval( + USER1, // from + OPERATOR, + 10, + 100 ether, + MAX_LOCKUP_PERIOD // lockup period + ); + + // Create a rail with the validator + uint256 rate = 1; + uint256 expectedAmount = 0; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(validator), // Standard validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + vm.startPrank(OPERATOR); + while (rate++ < 10) { + // Advance several blocks + payments.modifyRailPayment(railId, rate, 0); + expectedAmount += rate * 5; + helper.advanceBlocks(5); + } + vm.stopPrank(); + + // Settle with validation + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + + // Verify validator note + assertEq(result.note, "Standard approved payment", "Validator note should match"); + } + + function testValidationWithReducedAmount() public { + // Deploy an validator that reduces payment amounts + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.REDUCE_AMOUNT); + validator.configure(80); // 80% of the original amount + + // Create a rail with the validator + uint256 rate = 10 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(validator), // Reduced amount validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance several blocks + helper.advanceBlocks(5); + + // Verify reduced amount (80% of original) + uint256 expectedAmount = (rate * 5 * 80) / 100; // 5 blocks * 10 ether * 80% + uint256 expectedNetworkFee = + expectedAmount * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR(); + uint256 expectedNetPayeeAmount = expectedAmount - expectedNetworkFee; + + // Settle with validation - verify against NET payee amount + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + + assertEq(result.netPayeeAmount, expectedNetPayeeAmount, "Net payee amount incorrect"); + assertEq(result.operatorCommission, 0, "Operator commission incorrect"); + + // Verify validator note + assertEq(result.note, "Validator reduced payment amount", "Validator note should match"); + } + + function testValidationWithReducedDuration() public { + // Deploy an validator that reduces settlement duration + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.REDUCE_DURATION); + validator.configure(60); // 60% of the original duration + + // Create a rail with the validator + uint256 rate = 10 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(validator), // Reduced duration validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance several blocks + uint256 advanceBlocks = 5; + helper.advanceBlocks(advanceBlocks); + + // Calculate expected settlement duration (60% of 5 blocks) + uint256 expectedDuration = (advanceBlocks * 60) / 100; + uint256 expectedSettledUpto = block.number - advanceBlocks + expectedDuration; + uint256 expectedAmount = rate * expectedDuration; // expectedDuration blocks * 10 ether + + // Settle with validation + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, expectedSettledUpto); + + // Verify validator note + assertEq(result.note, "Validator reduced settlement duration", "Validator note should match"); + } + + function testMaliciousValidatorHandling() public { + // Deploy a malicious validator + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.MALICIOUS); + + // Create a rail with the validator + uint256 rate = 5 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(validator), // Malicious validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance several blocks + helper.advanceBlocks(5); + + // Attempt settlement with malicious validator - should revert + vm.prank(USER1); + vm.expectRevert( + abi.encodeWithSelector( + Errors.ValidatorSettledBeyondSegmentEnd.selector, railId, block.number, block.number + 10 + ) + ); + payments.settleRail(railId, block.number); + + // Set the validator to return invalid amount but valid settlement duration + validator.setMode(MockValidator.ValidatorMode.CUSTOM_RETURN); + uint256 proposedAmount = rate * 5; // 5 blocks * 5 ether + uint256 invalidAmount = proposedAmount * 2; // Double the correct amount + validator.setCustomValues(invalidAmount, block.number, "Attempting excessive payment"); + + // Attempt settlement with excessive amount - should also revert + vm.prank(USER1); + // error ValidatorModifiedAmountExceedsMaximum(uint256 railId, uint256 maxAllowed, uint256 attempted); + vm.expectRevert( + abi.encodeWithSelector( + Errors.ValidatorModifiedAmountExceedsMaximum.selector, railId, proposedAmount, invalidAmount + ) + ); + payments.settleRail(railId, block.number); + } + + //-------------------------------- + // 3. Termination and Edge Cases + //-------------------------------- + + function testRailTerminationAndSettlement() public { + uint256 rate = 10 ether; + uint256 lockupPeriod = 5; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + lockupPeriod, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance several blocks + helper.advanceBlocks(3); + + // First settlement + uint256 expectedAmount1 = rate * 3; // 3 blocks * 10 ether + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount1, block.number); + + // Terminate the rail + vm.prank(OPERATOR); + payments.terminateRail(railId); + + // Verify rail was terminated - check endEpoch is set + Payments.RailView memory rail = payments.getRail(railId); + assertTrue(rail.endEpoch > 0, "Rail should be terminated"); + + // Verify endEpoch calculation: should be the lockupLastSettledAt (current block) + lockupPeriod + Payments.Account memory account = helper.getAccountData(USER1); + assertEq( + rail.endEpoch, + account.lockupLastSettledAt + rail.lockupPeriod, + "End epoch should be account lockup last settled at + lockup period" + ); + + // Advance more blocks + helper.advanceBlocks(10); + + // Get balances before final settlement + Payments.Account memory userBefore = helper.getAccountData(USER1); + Payments.Account memory recipientBefore = helper.getAccountData(USER2); + + // Final settlement after termination + vm.prank(USER1); + + ( + uint256 settledAmount, + uint256 netPayeeAmount, + uint256 totalOperatorCommission, + uint256 totalNetworkFee, + uint256 settledUpto, + ) = payments.settleRail(railId, block.number); + + // Verify that total settled amount is equal to the sum of net payee amount and operator commission + assertEq( + settledAmount, + netPayeeAmount + totalOperatorCommission + totalNetworkFee, + "Mismatch in settled amount breakdown" + ); + + // Should settle up to endEpoch, which is lockupPeriod blocks after the last settlement + uint256 expectedAmount2 = rate * lockupPeriod; // lockupPeriod = 5 blocks + assertEq(settledAmount, expectedAmount2, "Final settlement amount incorrect"); + assertEq(settledUpto, rail.endEpoch, "Final settled up to incorrect"); + + // Get balances after settlement + Payments.Account memory userAfter = helper.getAccountData(USER1); + Payments.Account memory recipientAfter = helper.getAccountData(USER2); + + assertEq( + userBefore.funds - userAfter.funds, expectedAmount2, "User funds not reduced correctly in final settlement" + ); + assertEq( + recipientAfter.funds - recipientBefore.funds, + netPayeeAmount, + "Recipient funds not increased correctly in final settlement" + ); + + // Verify account lockup is cleared after full settlement + assertEq(userAfter.lockupCurrent, 0, "Account lockup should be cleared after full rail settlement"); + assertEq(userAfter.lockupRate, 0, "Account lockup rate should be zero after full rail settlement"); + } + + function testSettleAlreadyFullySettledRail() public { + // Create a rail with standard rate + uint256 rate = 5 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No validator + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Settle immediately without advancing blocks - should be a no-op + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, 0, block.number); + + console.log("result.note", result.note); + + // Verify the note indicates already settled + assertTrue( + bytes(result.note).length > 0 + && stringsEqual(result.note, string.concat("already settled up to epoch ", vm.toString(block.number))), + "Note should indicate already settled" + ); + } + + function testSettleRailWithRateChangeQueueForReducedAmountValidation() public { + // Deploy an validator that reduces the payment amount by a percentage + uint256 factor = 80; // 80% of the original amount + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.REDUCE_AMOUNT); + validator.configure(factor); + + // Create a rail with the validator + uint256 rate = 5 ether; + uint256 lockupPeriod = 10; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + lockupPeriod, + 0, // No fixed lockup + address(validator), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Simulate 5 blocks passing (blocks 1-5) + helper.advanceBlocks(5); + + // Increase operator allowances to allow rate modification + // We double the rate allowance and add buffer for lockup + (, uint256 rateAllowance, uint256 lockupAllowance,,,) = helper.getOperatorAllowanceAndUsage(USER1, OPERATOR); + helper.setupOperatorApproval(USER1, OPERATOR, rateAllowance * 2, lockupAllowance + 10 * rate, MAX_LOCKUP_PERIOD); + + // Operator doubles the payment rate from 5 ETH to 10 ETH per block + // This creates a rate change in the queue + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, rate * 2, 0); + vm.stopPrank(); + + // Simulate 5 blocks passing (blocks 6-10) + helper.advanceBlocks(5); + + // Calculate expected settlement: + // Phase 1 (blocks 1-5): 5 blocks at 5 ETH/block → 25 ETH total -> after validation (80%) -> 20 ETH total + // Phase 2 (blocks 6-10): 5 blocks at 10 ETH/block → 50 ETH total -> after validation (80%) -> 40 ETH total + // Total after validation (80%) -> 60 ETH total + uint256 expectedDurationOldRate = 5; // Epochs 1-5 ( rate = 5 ) + uint256 expectedDurationNewRate = 5; // Epochs 6-10 ( rate = 10 ) + uint256 expectedAmountOldRate = (rate * expectedDurationOldRate * factor) / 100; // 20 ETH (25 * 0.8) + uint256 expectedAmountNewRate = ((rate * 2) * expectedDurationNewRate * factor) / 100; // 40 ETH (50 * 0.8) + uint256 expectedAmount = expectedAmountOldRate + expectedAmountNewRate; // 60 ETH total + + // settle and verify rail + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + + console.log("result.note", result.note); + } + + function testSettleRailWithRateChangeQueueForReducedDurationValidation() public { + // Deploy an validator that reduces the duration by a percentage + uint256 factor = 60; // 60% of the original duration + MockValidator validator = new MockValidator(MockValidator.ValidatorMode.REDUCE_DURATION); + validator.configure(factor); + + // Create a rail with the validator + uint256 rate = 5 ether; + uint256 lockupPeriod = 10; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + lockupPeriod, + 0, // No fixed lockup + address(validator), + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Simulate 5 blocks passing (blocks 1-5) + helper.advanceBlocks(5); + + // Initial settlement for the first 5 blocks ( epochs 1-5 ) + // Duration reduction: 5 blocks * 60% = 3 blocks settled + // Amount: 3 blocks * 5 ETH = 15 ETH + // LastSettledUpto: 1 + (6 - 1) * 60% = 4 + vm.prank(USER1); + payments.settleRail(railId, block.number); + uint256 lastSettledUpto = 1 + ((block.number - 1) * factor) / 100; // validator only settles for 60% of the duration (block.number - lastSettledUpto = epoch 1) + vm.stopPrank(); + + // update operator allowances for rate modification + (, uint256 rateAllowance, uint256 lockupAllowance,,,) = helper.getOperatorAllowanceAndUsage(USER1, OPERATOR); + helper.setupOperatorApproval(USER1, OPERATOR, rateAllowance * 2, lockupAllowance + 10 * rate, MAX_LOCKUP_PERIOD); + + // Operator doubles the payment rate from 5 ETH to 10 ETH per block + // This creates a rate change in the queue + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, rate * 2, 0); + vm.stopPrank(); + + // Simulate 5 blocks passing (blocks 6-10) + helper.advanceBlocks(5); + + // Expected settlement calculation: + // - Rate change was at block 5, creating a boundary + // - Duration reduction applies only to the first rate segment (epochs 1-5) + // - We already settled 3 blocks (1-3) in the first settlement + // - Remaining in first segment: 2 blocks (4-5) at original rate + // - Duration reduction: 2 blocks * 60% = 1.2 blocks (truncated to 1 block) + // - Amount: 1 epoch * 5 ETH/epoch = 5 ETH + // - rail.settledUpto = 4 + 1 = 5 < segmentBoundary ( 6 ) => doesn't go to next settlement segment (epochs 6-10) + uint256 firstSegmentEndBoundary = 6; // Block where rate change occurred + uint256 expectedDuration = ((firstSegmentEndBoundary - lastSettledUpto) * factor) / 100; // (6-3)*0.6 = 1.8 → 1 block + uint256 expectedSettledUpto = lastSettledUpto + expectedDuration; // 4 + 1 = 5 + uint256 expectedAmount = rate * expectedDuration; // 5 ETH/epoch * 1 epoch = 5 ETH + + // settle and verify rail + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, expectedSettledUpto); + + console.log("result.note", result.note); + } + + function testModifyRailPayment_SkipsZeroRateEnqueue() public { + uint256 initialRate = 0; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + initialRate, + 10, // lockupPeriod + 0, // fixed lockup + address(0), // no arbiter + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // give the operator enough allowance to change the rate + helper.setupOperatorApproval(USER1, OPERATOR, 10 ether, 100 ether, MAX_LOCKUP_PERIOD); + + // advance a few blocks so there is “history” to mark as settled + helper.advanceBlocks(4); + uint256 beforeBlock = block.number; + + // change rate: 0 → 5 ether + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, 5 ether, 0); + vm.stopPrank(); + + // queue must still be empty + assertEq(payments.getRateChangeQueueSize(railId), 0, "queue should stay empty"); + + // settledUpTo must equal the block where modification occurred + Payments.RailView memory rv = payments.getRail(railId); + assertEq(rv.settledUpTo, beforeBlock, "settledUpTo should equal current block"); + } + + //-------------------------------- + // Helper Functions + //-------------------------------- + + // Helper to compare strings + function stringsEqual(string memory a, string memory b) internal pure returns (bool) { + return keccak256(abi.encodePacked(a)) == keccak256(abi.encodePacked(b)); + } + + function testSettlementWithOperatorCommission() public { + // Setup operator approval first + helper.setupOperatorApproval( + USER1, // from + OPERATOR, + 10 ether, // rate allowance + 100 ether, // lockup allowance + MAX_LOCKUP_PERIOD // max lockup period + ); + + // Create rail with 2% operator commission (200 BPS) + uint256 operatorCommissionBps = 200; + uint256 railId; + vm.startPrank(OPERATOR); + railId = payments.createRail( + token, + USER1, + USER2, + address(0), // no validator + operatorCommissionBps, + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + vm.stopPrank(); + + // Set rail parameters using modify functions + uint256 rate = 10 ether; + uint256 lockupPeriod = 5; + vm.startPrank(OPERATOR); + payments.modifyRailPayment(railId, rate, 0); + payments.modifyRailLockup(railId, lockupPeriod, 0); // no fixed lockup + vm.stopPrank(); + + // Advance time + uint256 elapsedBlocks = 5; + helper.advanceBlocks(elapsedBlocks); + + // --- Balances Before --- + Payments.Account memory payerBefore = helper.getAccountData(USER1); + Payments.Account memory payeeBefore = helper.getAccountData(USER2); + Payments.Account memory operatorBefore = helper.getAccountData(OPERATOR); + Payments.Account memory serviceFeeRecipientBefore = helper.getAccountData(SERVICE_FEE_RECIPIENT); + + // --- Expected Calculations --- + uint256 expectedSettledAmount = rate * elapsedBlocks; + uint256 expectedNetworkFee = + expectedSettledAmount * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR(); + uint256 expectedOperatorCommission = + ((expectedSettledAmount - expectedNetworkFee) * operatorCommissionBps) / payments.COMMISSION_MAX_BPS(); + uint256 expectedNetPayeeAmount = expectedSettledAmount - expectedNetworkFee - expectedOperatorCommission; + + // --- Settle Rail --- + vm.startPrank(USER1); // Any participant can settle + ( + uint256 settledAmount, + uint256 netPayeeAmount, + uint256 operatorCommission, + uint256 totalNetworkFee, + uint256 settledUpto, + ) = payments.settleRail(railId, block.number); + vm.stopPrank(); + + // --- Verification --- + + // 1. Return values from settleRail + assertEq(settledAmount, expectedSettledAmount, "Returned settledAmount incorrect"); + assertEq(netPayeeAmount, expectedNetPayeeAmount, "Returned netPayeeAmount incorrect"); + assertEq(operatorCommission, expectedOperatorCommission, "Returned operatorCommission incorrect"); + assertEq(totalNetworkFee, expectedNetworkFee, "Returned networkFee incorrect"); + assertEq(settledUpto, block.number, "Returned settledUpto incorrect"); + + // 2. Balances after settlement + Payments.Account memory payerAfter = helper.getAccountData(USER1); + Payments.Account memory payeeAfter = helper.getAccountData(USER2); + Payments.Account memory operatorAfter = helper.getAccountData(OPERATOR); + Payments.Account memory serviceFeeRecipientAfter = helper.getAccountData(SERVICE_FEE_RECIPIENT); + + assertEq(payerAfter.funds, payerBefore.funds - expectedSettledAmount, "Payer funds mismatch"); + assertEq(payeeAfter.funds, payeeBefore.funds + expectedNetPayeeAmount, "Payee funds mismatch"); + assertEq(operatorAfter.funds, operatorBefore.funds, "Operator funds mismatch"); + assertEq( + serviceFeeRecipientAfter.funds, + serviceFeeRecipientBefore.funds + expectedOperatorCommission, + "Service fee recipient funds mismatch" + ); + } + + function testSettleRailWithNonZeroZeroNonZeroRateSequence() public { + // Setup operator approval for rate modifications + helper.setupOperatorApproval( + USER1, + OPERATOR, + 25 ether, // rate allowance + 200 ether, // lockup allowance + MAX_LOCKUP_PERIOD + ); + + // Create a rail with initial rate + uint256 initialRate = 5 ether; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + initialRate, + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No arbiter + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance 3 blocks at initial rate (5 ether/block) + helper.advanceBlocks(3); + + // Change rate to zero + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, 0, 0); + vm.stopPrank(); + + // Advance 4 blocks at zero rate (no payment) + helper.advanceBlocks(4); + + // Change rate to new non-zero rate + uint256 finalRate = 8 ether; + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, finalRate, 0); + vm.stopPrank(); + + // Advance 5 blocks at final rate (8 ether/block) + helper.advanceBlocks(5); + + // Calculate expected settlement: + // Phase 1 (blocks 1-3): 3 blocks at 5 ether/block = 15 ether + // Phase 2 (blocks 4-7): 4 blocks at 0 ether/block = 0 ether + // Phase 3 (blocks 8-12): 5 blocks at 8 ether/block = 40 ether + // Total expected: 15 + 0 + 40 = 55 ether + uint256 expectedAmount = (initialRate * 3) + (0 * 4) + (finalRate * 5); + + // Settle and verify + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + + console.log("Non-zero -> Zero -> Non-zero settlement note:", result.note); + } + + function testSettleRailWithZeroNonZeroZeroRateSequence() public { + // Setup operator approval for rate modifications + helper.setupOperatorApproval( + USER1, + OPERATOR, + 15 ether, // rate allowance + 150 ether, // lockup allowance + MAX_LOCKUP_PERIOD + ); + + // Create a rail starting with zero rate + uint256 initialRate = 0; + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + initialRate, + 10, // lockupPeriod + 0, // No fixed lockup + address(0), // No arbiter + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + // Advance 2 blocks at zero rate (no payment) + helper.advanceBlocks(2); + + // Change rate to non-zero + uint256 middleRate = 6 ether; + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, middleRate, 0); + vm.stopPrank(); + + // Advance 4 blocks at middle rate (6 ether/block) + helper.advanceBlocks(4); + + // Change rate back to zero + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, 0, 0); + vm.stopPrank(); + + // Advance 3 blocks at zero rate again (no payment) + helper.advanceBlocks(3); + + // Calculate expected settlement: + // Phase 1 (blocks 1-2): 2 blocks at 0 ether/block = 0 ether + // Phase 2 (blocks 3-6): 4 blocks at 6 ether/block = 24 ether + // Phase 3 (blocks 7-9): 3 blocks at 0 ether/block = 0 ether + // Total expected: 0 + 24 + 0 = 24 ether + uint256 expectedAmount = (0 * 2) + (middleRate * 4) + (0 * 3); + + // Settle and verify + RailSettlementHelpers.SettlementResult memory result = + settlementHelper.settleRailAndVerify(railId, block.number, expectedAmount, block.number); + + console.log("Zero -> Non-zero -> Zero settlement note:", result.note); + } + + function testPartialSettleOfZeroSegment() public { + uint256 rateOn = 1; + uint256 rateOff = 0; + scaffoldPartialSettleOfSegment(rateOn, rateOff); + } + + function testPartialSettleOfNonZeroSegment() public { + uint256 rateOn = 2; + uint256 rateOff = 1; + scaffoldPartialSettleOfSegment(rateOn, rateOff); + } + + function scaffoldPartialSettleOfSegment(uint256 rateOn, uint256 rateOff) public { + helper.setupOperatorApproval(USER1, OPERATOR, 1000 ether, 100000 ether, MAX_LOCKUP_PERIOD); + + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rateOn, + 0, // No lockup period + 0, // No fixed lockup + address(0), // No arbiter + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + + /* + | rate == 1 | rate == 0 | rate == 1 | + | 100 blocks | 100 blocks | 100 blocks | + X^ Y^ + First settle Second settle + */ + // Advance 100 blocks and turn rate off + // This adds a rate == 1, untilEpoch == 100 segment to the queue + helper.advanceBlocks(100); + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, rateOff, 0); + vm.stopPrank(); + + // Advance 100 blocks and turn rate on + // This adds a rate == 0, untilEpoch == 200 segment to the queue + helper.advanceBlocks(100); + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, rateOn, 0); + vm.stopPrank(); + + // Advance 100 blocks and turn rate off + // This adds a final rate == 1, untilEpoch == 300 segment to the queue + helper.advanceBlocks(100); + vm.prank(OPERATOR); + payments.modifyRailPayment(railId, rateOff, 0); + vm.stopPrank(); + + // Settle partway through the second segment + settlementHelper.settleRailAndVerify(railId, 151, 100 * rateOn + 50 * rateOff, 151); + + // Settle the whole rail, we should see another 100 tokens transferred + settlementHelper.settleRailAndVerify(railId, 301, 50 * rateOff + 100 * rateOn, 301); + } + + function testModifyTerminatedRailBeyondEndEpoch() public { + // Create a rail with standard parameters including fixed lockup + uint256 rate = 10 ether; + uint256 lockupPeriod = 5; + uint256 fixedLockup = 10 ether; // Add fixed lockup for one-time payment tests + uint256 railId = helper.setupRailWithParameters( + USER1, + USER2, + OPERATOR, + rate, + lockupPeriod, + fixedLockup, + address(0), // No validator + SERVICE_FEE_RECIPIENT + ); + + // Advance and settle to ensure the rail is active + helper.advanceBlocks(3); + vm.prank(USER1); + payments.settleRail(railId, block.number); + + // Terminate the rail + vm.prank(OPERATOR); + payments.terminateRail(railId); + + // Get the rail's end epoch + Payments.RailView memory rail = payments.getRail(railId); + uint256 endEpoch = rail.endEpoch; + + // Advance blocks to reach the end epoch + uint256 blocksToAdvance = endEpoch - block.number; + helper.advanceBlocks(blocksToAdvance); + + // Now we're at the end epoch - try to modify rate + vm.prank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.CannotModifyTerminatedRailBeyondEndEpoch.selector, railId, endEpoch, block.number + ) + ); + payments.modifyRailPayment(railId, 5 ether, 0); + + // Also try to make a one-time payment + vm.prank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.CannotModifyTerminatedRailBeyondEndEpoch.selector, railId, endEpoch, block.number + ) + ); + payments.modifyRailPayment(railId, rate, 1 ether); + + // Advance one more block to go beyond the end epoch + helper.advanceBlocks(1); + + // Try to modify rate again - should still revert + vm.prank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.CannotModifyTerminatedRailBeyondEndEpoch.selector, railId, endEpoch, block.number + ) + ); + payments.modifyRailPayment(railId, 5 ether, 0); + + // Try to make both rate change and one-time payment + vm.prank(OPERATOR); + vm.expectRevert( + abi.encodeWithSelector( + Errors.CannotModifyTerminatedRailBeyondEndEpoch.selector, railId, endEpoch, block.number + ) + ); + payments.modifyRailPayment(railId, 5 ether, 1 ether); + } +} diff --git a/packages/pay/test/RateChangeQueue.t.sol b/packages/pay/test/RateChangeQueue.t.sol new file mode 100644 index 00000000..34c2adbb --- /dev/null +++ b/packages/pay/test/RateChangeQueue.t.sol @@ -0,0 +1,217 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {RateChangeQueue} from "../src/RateChangeQueue.sol"; + +contract RateChangeQueueTest is Test { + using RateChangeQueue for RateChangeQueue.Queue; + + struct TestQueueContainer { + RateChangeQueue.Queue queue; + } + + TestQueueContainer private queueContainer; + + function queue() internal view returns (RateChangeQueue.Queue storage) { + return queueContainer.queue; + } + + function createEmptyQueue() internal { + // Clear any existing data + RateChangeQueue.Queue storage q = queue(); + while (!q.isEmpty()) { + q.dequeue(); + } + } + + function createSingleItemQueue(uint256 rate, uint256 untilEpoch) + internal + returns (RateChangeQueue.RateChange memory) + { + createEmptyQueue(); + RateChangeQueue.enqueue(queue(), rate, untilEpoch); + assertEq(RateChangeQueue.size(queue()), 1); + return RateChangeQueue.RateChange(rate, untilEpoch); + } + + function createMultiItemQueue(uint256[] memory rates, uint256[] memory untilEpochs) + internal + returns (RateChangeQueue.RateChange[] memory) + { + require(rates.length == untilEpochs.length, "Input arrays must have same length"); + + createEmptyQueue(); + + RateChangeQueue.RateChange[] memory items = new RateChangeQueue.RateChange[](rates.length); + + for (uint256 i = 0; i < rates.length; i++) { + RateChangeQueue.enqueue(queue(), rates[i], untilEpochs[i]); + items[i] = RateChangeQueue.RateChange(rates[i], untilEpochs[i]); + } + + assertEq(RateChangeQueue.size(queue()), rates.length); + return items; + } + + function createQueueWithAdvancedIndices(uint256 cycles) internal { + createEmptyQueue(); + + // Create cycles of filling and emptying + for (uint256 i = 0; i < cycles; i++) { + // Fill with 3 items + RateChangeQueue.enqueue(queue(), 100 + i, 5 + i); + RateChangeQueue.enqueue(queue(), 200 + i, 6 + i); + RateChangeQueue.enqueue(queue(), 300 + i, 7 + i); + + // Empty + RateChangeQueue.dequeue(queue()); + RateChangeQueue.dequeue(queue()); + RateChangeQueue.dequeue(queue()); + } + + // Queue should be empty now but with advanced indices + assertTrue(RateChangeQueue.isEmpty(queue())); + } + + function assertRateChangeEq( + RateChangeQueue.RateChange memory actual, + RateChangeQueue.RateChange memory expected, + string memory message + ) internal pure { + assertEq(actual.rate, expected.rate, string.concat(message, " - rate mismatch")); + assertEq(actual.untilEpoch, expected.untilEpoch, string.concat(message, " - untilEpoch mismatch")); + } + + function testBasicQueueOperations() public { + createEmptyQueue(); + + RateChangeQueue.enqueue(queue(), 100, 5); + assertEq(RateChangeQueue.size(queue()), 1); + RateChangeQueue.enqueue(queue(), 200, 10); + RateChangeQueue.enqueue(queue(), 300, 15); + assertEq(RateChangeQueue.size(queue()), 3); + + // Verify peek (head) and peekTail operations + RateChangeQueue.RateChange memory head = RateChangeQueue.peek(queue()); + assertRateChangeEq(head, RateChangeQueue.RateChange(100, 5), "Head should match first enqueued item"); + + RateChangeQueue.RateChange memory tail = RateChangeQueue.peekTail(queue()); + assertRateChangeEq(tail, RateChangeQueue.RateChange(300, 15), "Tail should match last enqueued item"); + + // Size should remain unchanged after peek operations + assertEq(RateChangeQueue.size(queue()), 3); + + // Dequeue and verify FIFO order + RateChangeQueue.RateChange memory first = RateChangeQueue.dequeue(queue()); + assertRateChangeEq(first, RateChangeQueue.RateChange(100, 5), "First dequeued item mismatch"); + assertEq(RateChangeQueue.size(queue()), 2); + + RateChangeQueue.RateChange memory second = RateChangeQueue.dequeue(queue()); + assertRateChangeEq(second, RateChangeQueue.RateChange(200, 10), "Second dequeued item mismatch"); + assertEq(RateChangeQueue.size(queue()), 1); + + RateChangeQueue.RateChange memory third = RateChangeQueue.dequeue(queue()); + assertRateChangeEq(third, RateChangeQueue.RateChange(300, 15), "Third dequeued item mismatch"); + + // Queue should now be empty + assertTrue(RateChangeQueue.isEmpty(queue())); + assertEq(RateChangeQueue.size(queue()), 0); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testEmptyQueueDequeue() public { + createEmptyQueue(); + + // Test dequeue on empty queue + vm.expectRevert("Queue is empty"); + RateChangeQueue.dequeue(queue()); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testEmptyQueuePeek() public { + createEmptyQueue(); + + // Test peek on empty queue + vm.expectRevert("Queue is empty"); + RateChangeQueue.peek(queue()); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testEmptyQueuePeekTail() public { + createEmptyQueue(); + + // Test peekTail on empty queue + vm.expectRevert("Queue is empty"); + RateChangeQueue.peekTail(queue()); + } + + function testBoundaryValues() public { + // Test with zero values + RateChangeQueue.RateChange memory zeroItem = createSingleItemQueue(0, 0); + RateChangeQueue.RateChange memory peekedZero = RateChangeQueue.peek(queue()); + assertRateChangeEq(peekedZero, zeroItem, "Zero values not stored correctly"); + RateChangeQueue.dequeue(queue()); + + // Test with max uint values + uint256 maxUint = type(uint256).max; + RateChangeQueue.RateChange memory maxItem = createSingleItemQueue(maxUint, maxUint); + RateChangeQueue.RateChange memory peekedMax = RateChangeQueue.peek(queue()); + assertRateChangeEq(peekedMax, maxItem, "Max values not stored correctly"); + } + + function testQueueReusability() public { + // Test emptying and reusing a queue + createSingleItemQueue(100, 5); + RateChangeQueue.dequeue(queue()); + assertTrue(RateChangeQueue.isEmpty(queue())); + + // Reuse after emptying + RateChangeQueue.enqueue(queue(), 200, 10); + assertEq(RateChangeQueue.size(queue()), 1); + + RateChangeQueue.RateChange memory peeked = RateChangeQueue.peek(queue()); + assertRateChangeEq(peeked, RateChangeQueue.RateChange(200, 10), "Queue reuse failed"); + + // Test with advanced indices + RateChangeQueue.dequeue(queue()); + createQueueWithAdvancedIndices(10); + + // Verify queue still functions correctly after index cycling + RateChangeQueue.enqueue(queue(), 999, 999); + assertEq(RateChangeQueue.size(queue()), 1); + + peeked = RateChangeQueue.peek(queue()); + assertRateChangeEq(peeked, RateChangeQueue.RateChange(999, 999), "Queue with advanced indices failed"); + } + + function testMixedOperations() public { + createEmptyQueue(); + + // Series of mixed enqueue/dequeue operations + RateChangeQueue.enqueue(queue(), 100, 5); + RateChangeQueue.enqueue(queue(), 200, 10); + + RateChangeQueue.RateChange memory first = RateChangeQueue.dequeue(queue()); + assertRateChangeEq(first, RateChangeQueue.RateChange(100, 5), "First dequeue failed"); + + RateChangeQueue.enqueue(queue(), 300, 15); + RateChangeQueue.enqueue(queue(), 400, 20); + + assertEq(RateChangeQueue.size(queue()), 3, "Queue size incorrect after mixed operations"); + + // Verify peek at both ends + RateChangeQueue.RateChange memory head = RateChangeQueue.peek(queue()); + assertRateChangeEq(head, RateChangeQueue.RateChange(200, 10), "Head incorrect after mixed operations"); + + RateChangeQueue.RateChange memory tail = RateChangeQueue.peekTail(queue()); + assertRateChangeEq(tail, RateChangeQueue.RateChange(400, 20), "Tail incorrect after mixed operations"); + + // Empty the queue + RateChangeQueue.dequeue(queue()); + RateChangeQueue.dequeue(queue()); + RateChangeQueue.dequeue(queue()); + + assertTrue(RateChangeQueue.isEmpty(queue()), "Queue should be empty after all dequeues"); + } +} diff --git a/packages/pay/test/WithdrawExtraFeeToken.t.sol b/packages/pay/test/WithdrawExtraFeeToken.t.sol new file mode 100644 index 00000000..6f99270d --- /dev/null +++ b/packages/pay/test/WithdrawExtraFeeToken.t.sol @@ -0,0 +1,122 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {ExtraFeeToken} from "./mocks/ExtraFeeToken.sol"; +import {Errors} from "../src/Errors.sol"; +import {Payments} from "../src/Payments.sol"; +import {Test} from "forge-std/Test.sol"; + +contract WithdrawExtraFeeTokenTest is Test { + function testWithdrawFeeToken() public { + Payments payments = new Payments(); + uint256 transferFee = 10 ** 18; + ExtraFeeToken feeToken = new ExtraFeeToken(transferFee); + address user1 = vm.addr(0x1111); + address user2 = vm.addr(0x2222); + feeToken.mint(user1, 10 ** 24); + feeToken.mint(user2, 10 ** 24); + + vm.prank(user1); + feeToken.approve(address(payments), 10 ** 24); + + vm.prank(user2); + feeToken.approve(address(payments), 10 ** 24); + + vm.prank(user1); + vm.expectRevert(); + payments.deposit(feeToken, user1, 10 ** 24); + + vm.prank(user1); + payments.deposit(feeToken, user1, 10 ** 23); + + assertEq(feeToken.balanceOf(address(payments)), 10 ** 23); + (uint256 deposit,,,) = payments.accounts(feeToken, user1); + assertEq(deposit, 10 ** 23); + + vm.prank(user1); + vm.expectRevert(); + payments.withdraw(feeToken, 10 ** 23); + + vm.prank(user2); + payments.deposit(feeToken, user2, 10 ** 23); + (deposit,,,) = payments.accounts(feeToken, user2); + assertEq(deposit, 10 ** 23); + + assertEq(feeToken.balanceOf(address(payments)), 2 * 10 ** 23); + + // the other user's deposit should not allow the withdrawal + vm.prank(user1); + vm.expectRevert(); + payments.withdraw(feeToken, 10 ** 23); + + // users can still withdraw their balance + (deposit,,,) = payments.accounts(feeToken, user1); + assertEq(deposit, 10 ** 23); + vm.prank(user1); + payments.withdraw(feeToken, deposit - transferFee); + (deposit,,,) = payments.accounts(feeToken, user1); + assertEq(deposit, 0); + + (deposit,,,) = payments.accounts(feeToken, user2); + assertEq(deposit, 10 ** 23); + vm.prank(user2); + payments.withdraw(feeToken, deposit - transferFee); + (deposit,,,) = payments.accounts(feeToken, user2); + assertEq(deposit, 0); + + assertEq(feeToken.balanceOf(address(payments)), 0); + } + + function testWithdrawLockup() public { + Payments payments = new Payments(); + uint256 transferFee = 10 ** 18; + ExtraFeeToken feeToken = new ExtraFeeToken(transferFee); + address user1 = vm.addr(0x1111); + address user2 = vm.addr(0x1112); + feeToken.mint(user1, 10 ** 24); + feeToken.mint(user2, 10 ** 24); + + vm.prank(user1); + feeToken.approve(address(payments), 10 ** 24); + vm.prank(user1); + payments.deposit(feeToken, user1, 10 ** 24 - transferFee); + + vm.prank(user2); + feeToken.approve(address(payments), 10 ** 24); + vm.prank(user2); + payments.deposit(feeToken, user2, 10 ** 24 - transferFee); + + (uint256 deposit,,,) = payments.accounts(feeToken, user1); + assertEq(deposit, 10 ** 24 - transferFee); + + address operator = vm.addr(0x2222); + + vm.prank(user1); + payments.setOperatorApproval(feeToken, operator, true, deposit, deposit, deposit); + vm.prank(operator); + uint256 railId = payments.createRail(feeToken, user1, operator, address(0), 0, address(0)); + + uint256 lockup = 10 ** 17; + vm.prank(operator); + payments.modifyRailLockup(railId, 0, lockup); + + vm.prank(user1); + vm.expectRevert(abi.encodeWithSelector(Errors.InsufficientUnlockedFunds.selector, deposit - lockup, deposit)); + payments.withdraw(feeToken, deposit); + + vm.prank(user1); + vm.expectRevert( + abi.encodeWithSelector( + Errors.InsufficientUnlockedFunds.selector, deposit - lockup, deposit - lockup + transferFee + ) + ); + payments.withdraw(feeToken, deposit - lockup); + + vm.prank(user1); + vm.expectRevert(abi.encodeWithSelector(Errors.InsufficientUnlockedFunds.selector, deposit - lockup, deposit)); + payments.withdraw(feeToken, deposit - transferFee); + + vm.prank(user1); + payments.withdraw(feeToken, deposit - transferFee - lockup); + } +} diff --git a/packages/pay/test/helpers/BaseTestHelper.sol b/packages/pay/test/helpers/BaseTestHelper.sol new file mode 100644 index 00000000..c8449b61 --- /dev/null +++ b/packages/pay/test/helpers/BaseTestHelper.sol @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; + +contract BaseTestHelper is Test { + uint256 internal ownerSk = 0x01; + uint256 internal user1Sk = 0x11; + uint256 internal user2Sk = 0x12; + uint256 internal user3Sk = 0x13; + uint256 internal operatorSk = 0x21; + uint256 internal operator2Sk = 0x22; + uint256 internal validatorSk = 0x31; + uint256 internal serviceFeeRecipientSk = 0x41; + uint256 internal relayerSk = 0x51; + + address public immutable OWNER = vm.addr(ownerSk); + address public immutable USER1 = vm.addr(user1Sk); + address public immutable USER2 = vm.addr(user2Sk); + address public immutable USER3 = vm.addr(user3Sk); + address public immutable OPERATOR = vm.addr(operatorSk); + address public immutable OPERATOR2 = vm.addr(operator2Sk); + address public immutable VALIDATOR = vm.addr(validatorSk); + address public immutable SERVICE_FEE_RECIPIENT = vm.addr(serviceFeeRecipientSk); + address public immutable RELAYER = vm.addr(relayerSk); +} diff --git a/packages/pay/test/helpers/PaymentsTestHelpers.sol b/packages/pay/test/helpers/PaymentsTestHelpers.sol new file mode 100644 index 00000000..110c5d53 --- /dev/null +++ b/packages/pay/test/helpers/PaymentsTestHelpers.sol @@ -0,0 +1,956 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../../src/Payments.sol"; +import {MockERC20} from "../mocks/MockERC20.sol"; +import {BaseTestHelper} from "./BaseTestHelper.sol"; +import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import {console} from "forge-std/console.sol"; +import {MessageHashUtils} from "@openzeppelin/contracts/utils/cryptography/MessageHashUtils.sol"; +import {Errors} from "../../src/Errors.sol"; + +contract PaymentsTestHelpers is Test, BaseTestHelper { + // Common constants + uint256 public constant INITIAL_BALANCE = 1000 ether; + uint256 public constant DEPOSIT_AMOUNT = 100 ether; + uint256 internal constant MAX_LOCKUP_PERIOD = 100; + + Payments public payments; + MockERC20 public testToken; + IERC20 private constant NATIVE_TOKEN = IERC20(address(0)); + + // Standard test environment setup with common addresses and token + function setupStandardTestEnvironment() public { + vm.startPrank(OWNER); + payments = new Payments(); + vm.stopPrank(); + + // Setup test token and assign to common users + address[] memory users = new address[](6); + users[0] = OWNER; + users[1] = USER1; + users[2] = USER2; + users[3] = OPERATOR; + users[4] = OPERATOR2; + users[5] = VALIDATOR; + + vm.deal(USER1, INITIAL_BALANCE); + vm.deal(USER2, INITIAL_BALANCE); + + testToken = setupTestToken("Test Token", "TEST", users, INITIAL_BALANCE, address(payments)); + } + + function setupTestToken( + string memory name, + string memory symbol, + address[] memory users, + uint256 initialBalance, + address paymentsContract + ) public returns (MockERC20) { + MockERC20 newToken = new MockERC20(name, symbol); + + // Mint tokens to users + for (uint256 i = 0; i < users.length; i++) { + newToken.mint(users[i], initialBalance); + + // Approve payments contract to spend tokens (i.e. allowance) + vm.startPrank(users[i]); + newToken.approve(paymentsContract, type(uint256).max); + vm.stopPrank(); + } + + return newToken; + } + + function getPermitSignature(uint256 privateKey, address owner, address spender, uint256 value, uint256 deadline) + public + view + returns (uint8 v, bytes32 r, bytes32 s) + { + uint256 nonce = MockERC20(testToken).nonces(owner); + bytes32 domainSeparator = MockERC20(testToken).DOMAIN_SEPARATOR(); + + bytes32 structHash = keccak256( + abi.encode( + keccak256("Permit(address owner,address spender,uint256 value,uint256 nonce,uint256 deadline)"), + owner, + spender, + value, + nonce, + deadline + ) + ); + + bytes32 digest = MessageHashUtils.toTypedDataHash(domainSeparator, structHash); + + // Sign the exact digest that `permit` expects using the provided private key + (v, r, s) = vm.sign(privateKey, digest); + } + + function makeDepositWithPermit(uint256 fromPrivateKey, address to, uint256 amount) public { + address from = vm.addr(fromPrivateKey); + uint256 deadline = block.timestamp + 1 hours; + + // Capture pre-deposit balances and state + uint256 fromBalanceBefore = _balanceOf(from, false); + uint256 paymentsBalanceBefore = _balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = _getAccountData(to, false); + + // get signature for permit + (uint8 v, bytes32 r, bytes32 s) = getPermitSignature(fromPrivateKey, from, address(payments), amount, deadline); + + // Execute deposit with permit + vm.startPrank(from); + + payments.depositWithPermit(testToken, to, amount, deadline, v, r, s); + + vm.stopPrank(); + + // Capture post-deposit balances and state + uint256 fromBalanceAfter = _balanceOf(from, false); + uint256 paymentsBalanceAfter = _balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = _getAccountData(to, false); + + // Asserts / Checks + _assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + amount + ); + } + + function _assertDepositBalances( + uint256 fromBalanceBefore, + uint256 fromBalanceAfter, + uint256 paymentsBalanceBefore, + uint256 paymentsBalanceAfter, + Payments.Account memory toAccountBefore, + Payments.Account memory toAccountAfter, + uint256 amount + ) public pure { + assertEq(fromBalanceAfter, fromBalanceBefore - amount, "Sender's balance not reduced correctly"); + assertEq( + paymentsBalanceAfter, paymentsBalanceBefore + amount, "Payments contract balance not increased correctly" + ); + + assertEq( + paymentsBalanceAfter, paymentsBalanceBefore + amount, "Payments contract balance not increased correctly" + ); + + assertEq( + toAccountAfter.funds, toAccountBefore.funds + amount, "Recipient's account balance not increased correctly" + ); + } + + function getAccountData(address user) public view returns (Payments.Account memory) { + return _getAccountData(user, false); + } + + function getNativeAccountData(address user) public view returns (Payments.Account memory) { + return _getAccountData(user, true); + } + + function _getAccountData(address user, bool useNativeToken) public view returns (Payments.Account memory) { + IERC20 token = useNativeToken ? NATIVE_TOKEN : testToken; + (uint256 funds, uint256 lockupCurrent, uint256 lockupRate, uint256 lockupLastSettledAt) = + payments.accounts(token, user); + + return Payments.Account({ + funds: funds, + lockupCurrent: lockupCurrent, + lockupRate: lockupRate, + lockupLastSettledAt: lockupLastSettledAt + }); + } + + function makeDeposit(address from, address to, uint256 amount) public { + _performDeposit(from, to, amount, false); + } + + function makeNativeDeposit(address from, address to, uint256 amount) public { + _performDeposit(from, to, amount, true); + } + + function _performDeposit(address from, address to, uint256 amount, bool useNativeToken) public { + // Capture pre-deposit balances + uint256 fromBalanceBefore = _balanceOf(from, useNativeToken); + uint256 paymentsBalanceBefore = _balanceOf(address(payments), useNativeToken); + Payments.Account memory toAccountBefore = _getAccountData(to, useNativeToken); + + // Make the deposit + vm.startPrank(from); + + uint256 value = 0; + IERC20 token = testToken; + if (useNativeToken) { + value = amount; + token = NATIVE_TOKEN; + } + + payments.deposit{value: value}(token, to, amount); + vm.stopPrank(); + + // Verify token balances + uint256 fromBalanceAfter = _balanceOf(from, useNativeToken); + uint256 paymentsBalanceAfter = _balanceOf(address(payments), useNativeToken); + Payments.Account memory toAccountAfter = _getAccountData(to, useNativeToken); + + // Verify balances + assertEq(fromBalanceAfter, fromBalanceBefore - amount, "Sender's balance not reduced correctly"); + assertEq( + paymentsBalanceAfter, paymentsBalanceBefore + amount, "Payments contract balance not increased correctly" + ); + assertEq( + toAccountAfter.funds, toAccountBefore.funds + amount, "Recipient's account balance not increased correctly" + ); + console.log("toAccountAfter.funds", toAccountAfter.funds); + } + + function makeWithdrawal(address from, uint256 amount) public { + _performWithdrawal( + from, + from, // recipient is the same as sender + amount, + true, // use the standard withdraw function + false // use ERC20 token + ); + } + + function makeNativeWithdrawal(address from, uint256 amount) public { + _performWithdrawal( + from, + from, // recipient is the same as sender + amount, + true, // use the standard withdraw function + true // use native token + ); + } + + function expectWithdrawalToFail(address from, uint256 available, uint256 requested) public { + vm.startPrank(from); + vm.expectRevert(abi.encodeWithSelector(Errors.InsufficientUnlockedFunds.selector, available, requested)); + payments.withdraw(testToken, requested); + vm.stopPrank(); + } + + function makeWithdrawalTo(address from, address to, uint256 amount) public { + _performWithdrawal( + from, + to, + amount, + false, // use the withdrawTo function + false // use erc20 token + ); + } + + function makeNativeWithdrawalTo(address from, address to, uint256 amount) public { + _performWithdrawal( + from, + to, + amount, + false, // use the withdrawTo function + true // use native token + ); + } + + function _balanceOf(address addr, bool useNativeToken) public view returns (uint256) { + if (useNativeToken) { + return addr.balance; + } else { + return testToken.balanceOf(addr); + } + } + + function _performWithdrawal( + address from, + address to, + uint256 amount, + bool isStandardWithdrawal, + bool useNativeToken + ) private { + IERC20 token = useNativeToken ? NATIVE_TOKEN : testToken; + + // Capture pre-withdrawal balances + uint256 fromAccountBalanceBefore = _getAccountData(from, useNativeToken).funds; + uint256 recipientBalanceBefore = _balanceOf(to, useNativeToken); + uint256 paymentsBalanceBefore = _balanceOf(address(payments), useNativeToken); + + // Make the withdrawal + vm.startPrank(from); + if (isStandardWithdrawal) { + payments.withdraw(token, amount); + } else { + payments.withdrawTo(token, to, amount); + } + vm.stopPrank(); + + // Verify balances + uint256 fromAccountBalanceAfter = _getAccountData(from, useNativeToken).funds; + uint256 recipientBalanceAfter = _balanceOf(to, useNativeToken); + uint256 paymentsBalanceAfter = _balanceOf(address(payments), useNativeToken); + + // Assert balances changed correctly + assertEq( + fromAccountBalanceAfter, + fromAccountBalanceBefore - amount, + "Sender's account balance not decreased correctly" + ); + assertEq(recipientBalanceAfter, recipientBalanceBefore + amount, "Recipient's balance not increased correctly"); + assertEq( + paymentsBalanceAfter, paymentsBalanceBefore - amount, "Payments contract balance not decreased correctly" + ); + } + + function createRail(address from, address to, address railOperator, address validator, address serviceFeeRecipient) + public + returns (uint256) + { + vm.startPrank(railOperator); + uint256 railId = payments.createRail( + testToken, + from, + to, + validator, + 0, // commissionRateBps + serviceFeeRecipient // serviceFeeRecipient + ); + vm.stopPrank(); + + // Verify rail was created with the correct parameters + Payments.RailView memory rail = payments.getRail(railId); + assertEq(address(rail.token), address(testToken), "Rail token address mismatch"); + assertEq(rail.from, from, "Rail sender address mismatch"); + assertEq(rail.to, to, "Rail recipient address mismatch"); + assertEq(rail.validator, validator, "Rail validator address mismatch"); + assertEq(rail.operator, railOperator, "Rail operator address mismatch"); + assertEq(rail.serviceFeeRecipient, serviceFeeRecipient, "Rail service fee recipient address mismatch"); + + return railId; + } + + function setupRailWithParameters( + address from, + address to, + address railOperator, + uint256 paymentRate, + uint256 lockupPeriod, + uint256 lockupFixed, + address validator, + address serviceFeeRecipient + ) public returns (uint256 railId) { + // Calculate required allowances for the rail + uint256 requiredRateAllowance = paymentRate; + uint256 requiredLockupAllowance = lockupFixed + (paymentRate * lockupPeriod); + + // Get current operator allowances + (bool isApproved, uint256 rateAllowance, uint256 lockupAllowance,,,) = + payments.operatorApprovals(testToken, from, railOperator); + + // Ensure operator has sufficient allowances before creating the rail + if (!isApproved || rateAllowance < requiredRateAllowance || lockupAllowance < requiredLockupAllowance) { + vm.startPrank(from); + payments.setOperatorApproval( + testToken, + railOperator, + true, + requiredRateAllowance > rateAllowance ? requiredRateAllowance : rateAllowance, + requiredLockupAllowance > lockupAllowance ? requiredLockupAllowance : lockupAllowance, + MAX_LOCKUP_PERIOD + ); + vm.stopPrank(); + } + + railId = createRail(from, to, railOperator, validator, serviceFeeRecipient); + + // Get operator usage before modifications + (,,, uint256 rateUsageBefore, uint256 lockupUsageBefore,) = + payments.operatorApprovals(testToken, from, railOperator); + + // Get rail parameters before modifications to accurately calculate expected usage changes + Payments.RailView memory railBefore; + try payments.getRail(railId) returns (Payments.RailView memory railData) { + railBefore = railData; + } catch { + // If this is a new rail, all values will be zero + railBefore.paymentRate = 0; + railBefore.lockupPeriod = 0; + railBefore.lockupFixed = 0; + } + + // Set payment rate and lockup parameters + vm.startPrank(railOperator); + payments.modifyRailPayment(railId, paymentRate, 0); + payments.modifyRailLockup(railId, lockupPeriod, lockupFixed); + vm.stopPrank(); + + // Verify rail parameters were set correctly + Payments.RailView memory rail = payments.getRail(railId); + assertEq(rail.paymentRate, paymentRate, "Rail payment rate mismatch"); + assertEq(rail.lockupPeriod, lockupPeriod, "Rail lockup period mismatch"); + assertEq(rail.lockupFixed, lockupFixed, "Rail fixed lockup mismatch"); + assertEq(rail.validator, validator, "Rail validator address mismatch"); + + // Get operator usage after modifications + (,,, uint256 rateUsageAfter, uint256 lockupUsageAfter,) = + payments.operatorApprovals(testToken, from, railOperator); + + // Calculate expected change in rate usage + int256 expectedRateChange; + if (paymentRate > railBefore.paymentRate) { + expectedRateChange = int256(paymentRate - railBefore.paymentRate); + } else { + expectedRateChange = -int256(railBefore.paymentRate - paymentRate); + } + + // Calculate old and new lockup values to determine the change + uint256 oldLockupTotal = railBefore.lockupFixed + (railBefore.paymentRate * railBefore.lockupPeriod); + uint256 newLockupTotal = lockupFixed + (paymentRate * lockupPeriod); + int256 expectedLockupChange; + + if (newLockupTotal > oldLockupTotal) { + expectedLockupChange = int256(newLockupTotal - oldLockupTotal); + } else { + expectedLockupChange = -int256(oldLockupTotal - newLockupTotal); + } + + // Verify operator usage has been updated correctly + if (expectedRateChange > 0) { + assertEq( + rateUsageAfter, + rateUsageBefore + uint256(expectedRateChange), + "Operator rate usage not increased correctly" + ); + } else { + assertEq( + rateUsageBefore, + rateUsageAfter + uint256(-expectedRateChange), + "Operator rate usage not decreased correctly" + ); + } + + if (expectedLockupChange > 0) { + assertEq( + lockupUsageAfter, + lockupUsageBefore + uint256(expectedLockupChange), + "Operator lockup usage not increased correctly" + ); + } else { + assertEq( + lockupUsageBefore, + lockupUsageAfter + uint256(-expectedLockupChange), + "Operator lockup usage not decreased correctly" + ); + } + + return railId; + } + + function setupOperatorApproval( + address from, + address operator, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) public { + // Get initial usage values for verification + (,,, uint256 initialRateUsage, uint256 initialLockupUsage,) = + payments.operatorApprovals(testToken, from, operator); + + // Set approval + vm.startPrank(from); + payments.setOperatorApproval(testToken, operator, true, rateAllowance, lockupAllowance, maxLockupPeriod); + vm.stopPrank(); + + // Verify operator allowances after setting them + verifyOperatorAllowances( + from, + operator, + true, // isApproved + rateAllowance, // rateAllowance + lockupAllowance, // lockupAllowance + initialRateUsage, // rateUsage shouldn't change + initialLockupUsage, // lockupUsage shouldn't change + maxLockupPeriod // maxLockupPeriod + ); + } + + function revokeOperatorApprovalAndVerify(address from, address operator) public { + // Get current values for verification + ( + , + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 rateUsage, + uint256 lockupUsage, + uint256 maxLockupPeriod + ) = payments.operatorApprovals(testToken, from, operator); + + // Revoke approval + vm.startPrank(from); + payments.setOperatorApproval(testToken, operator, false, rateAllowance, lockupAllowance, maxLockupPeriod); + vm.stopPrank(); + + // Verify operator allowances after revoking + verifyOperatorAllowances( + from, + operator, + false, // isApproved should be false + rateAllowance, // rateAllowance should remain the same + lockupAllowance, // lockupAllowance should remain the same + rateUsage, // rateUsage shouldn't change + lockupUsage, // lockupUsage shouldn't change, + maxLockupPeriod // maxLockupPeriod should remain the same + ); + } + + function advanceBlocks(uint256 blocks) public { + vm.roll(block.number + blocks); + } + + function assertAccountState( + address user, + uint256 expectedFunds, + uint256 expectedLockup, + uint256 expectedRate, + uint256 expectedLastSettled + ) public view { + Payments.Account memory account = getAccountData(user); + assertEq(account.funds, expectedFunds, "Account funds incorrect"); + assertEq(account.lockupCurrent, expectedLockup, "Account lockup incorrect"); + assertEq(account.lockupRate, expectedRate, "Account lockup rate incorrect"); + assertEq(account.lockupLastSettledAt, expectedLastSettled, "Account last settled at incorrect"); + } + + function verifyOperatorAllowances( + address client, + address operator, + bool expectedIsApproved, + uint256 expectedRateAllowance, + uint256 expectedLockupAllowance, + uint256 expectedRateUsage, + uint256 expectedLockupUsage, + uint256 expectedMaxLockupPeriod + ) public view { + ( + bool isApproved, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 rateUsage, + uint256 lockupUsage, + uint256 maxLockupPeriod + ) = payments.operatorApprovals(testToken, client, operator); + + assertEq(isApproved, expectedIsApproved, "Operator approval status mismatch"); + assertEq(rateAllowance, expectedRateAllowance, "Rate allowance mismatch"); + assertEq(lockupAllowance, expectedLockupAllowance, "Lockup allowance mismatch"); + assertEq(rateUsage, expectedRateUsage, "Rate usage mismatch"); + assertEq(lockupUsage, expectedLockupUsage, "Lockup usage mismatch"); + assertEq(maxLockupPeriod, expectedMaxLockupPeriod, "Max lockup period mismatch"); + } + + // Get current operator allowance and usage + function getOperatorAllowanceAndUsage(address client, address operator) + public + view + returns ( + bool isApproved, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 rateUsage, + uint256 lockupUsage, + uint256 maxLockupPeriod + ) + { + return payments.operatorApprovals(testToken, client, operator); + } + + function executeOneTimePayment(uint256 railId, address operatorAddress, uint256 oneTimeAmount) public { + Payments.RailView memory railBefore = payments.getRail(railId); + address railClient = railBefore.from; + address railRecipient = railBefore.to; + + // Get initial balances + Payments.Account memory clientBefore = getAccountData(railClient); + Payments.Account memory recipientBefore = getAccountData(railRecipient); + Payments.Account memory operatorBefore = getAccountData(operatorAddress); + + // Get operator allowance and usage before payment + (,, uint256 lockupAllowanceBefore,, uint256 lockupUsageBefore,) = + payments.operatorApprovals(testToken, railClient, operatorAddress); + + // Make one-time payment + vm.startPrank(operatorAddress); + payments.modifyRailPayment(railId, railBefore.paymentRate, oneTimeAmount); + vm.stopPrank(); + + // Verify balance changes + Payments.Account memory clientAfter = getAccountData(railClient); + Payments.Account memory recipientAfter = getAccountData(railRecipient); + Payments.Account memory operatorAfter = getAccountData(operatorAddress); + + assertEq( + clientAfter.funds, + clientBefore.funds - oneTimeAmount, + "Client funds not reduced correctly after one-time payment" + ); + + uint256 networkFee = oneTimeAmount * payments.NETWORK_FEE_NUMERATOR() / payments.NETWORK_FEE_DENOMINATOR(); + // Get commission rate from rail + uint256 commissionRate = railBefore.commissionRateBps; + uint256 operatorCommission = 0; + + if (commissionRate > 0) { + operatorCommission = ((oneTimeAmount - networkFee) * commissionRate) / payments.COMMISSION_MAX_BPS(); + // Verify operator commission is non-zero when commission rate is non-zero + assertGt(operatorCommission, 0, "Operator commission should be non-zero when commission rate is non-zero"); + } + + uint256 netPayeeAmount = oneTimeAmount - networkFee - operatorCommission; + + assertEq( + recipientAfter.funds, + recipientBefore.funds + netPayeeAmount, + "Recipient funds not increased correctly after one-time payment" + ); + + // Verify fixed lockup was reduced + Payments.RailView memory railAfter = payments.getRail(railId); + assertEq( + railAfter.lockupFixed, + railBefore.lockupFixed - oneTimeAmount, + "Fixed lockup not reduced by one-time payment amount" + ); + + // Verify operator account is credited with commission + if (operatorCommission > 0) { + assertEq( + operatorAfter.funds, + operatorBefore.funds + operatorCommission, + "Operator funds not increased correctly with commission amount" + ); + } + + // Verify account lockup is also reduced + assertEq( + clientAfter.lockupCurrent, + clientBefore.lockupCurrent - oneTimeAmount, + "Client lockup not reduced correctly after one-time payment" + ); + + // Verify operator lockup allowance and usage are both reduced + (,, uint256 lockupAllowanceAfter,, uint256 lockupUsageAfter,) = + payments.operatorApprovals(testToken, railClient, operatorAddress); + + assertEq( + lockupAllowanceBefore - oneTimeAmount, + lockupAllowanceAfter, + "Operator lockup allowance not reduced correctly after one-time payment" + ); + + assertEq( + lockupUsageBefore - oneTimeAmount, + lockupUsageAfter, + "Operator lockup usage not reduced correctly after one-time payment" + ); + } + + function expectcreateRailToRevertWithoutOperatorApproval() public { + vm.startPrank(OPERATOR); + vm.expectRevert(abi.encodeWithSelector(Errors.OperatorNotApproved.selector, USER1, OPERATOR)); + payments.createRail( + testToken, + USER1, + USER2, + address(0), + 0, + SERVICE_FEE_RECIPIENT // operator commision receiver + ); + } + + function expectExpiredPermitToRevert(uint256 senderSk, address to, uint256 amount) public { + address from = vm.addr(senderSk); + uint256 futureDeadline = block.timestamp + 1 hours; + (uint8 v, bytes32 r, bytes32 s) = getPermitSignature(senderSk, from, address(payments), amount, futureDeadline); + vm.warp(futureDeadline + 10); + vm.startPrank(from); + vm.expectRevert(abi.encodeWithSignature("ERC2612ExpiredSignature(uint256)", futureDeadline)); + payments.depositWithPermit(testToken, to, amount, futureDeadline, v, r, s); + vm.stopPrank(); + } + + function expectNativeTokenDepositWithPermitToRevert(uint256 senderSk, address to, uint256 amount) public { + uint256 deadline = block.timestamp + 1 hours; + address from = vm.addr(senderSk); + vm.startPrank(from); + vm.expectRevert(Errors.NativeTokenNotSupported.selector); + payments.depositWithPermit( + NATIVE_TOKEN, // Native token is not allowed + to, + amount, + deadline, + 0, // v + bytes32(0), // r + bytes32(0) // s + ); + vm.stopPrank(); + } + + function expectInvalidPermitToRevert(uint256 senderSk, address to, uint256 amount) public { + uint256 deadline = block.timestamp + 1 hours; + + uint256 notSenderSk = senderSk == user1Sk ? user2Sk : user1Sk; + address from = vm.addr(senderSk); + + // Make permit signature from notFromSk, but call from 'from' + (uint8 v, bytes32 r, bytes32 s) = getPermitSignature(notSenderSk, from, address(payments), amount, deadline); + + vm.startPrank(from); + + // Expect custom error: ERC2612InvalidSigner(wrongRecovered, expectedOwner) + vm.expectRevert(abi.encodeWithSignature("ERC2612InvalidSigner(address,address)", vm.addr(notSenderSk), from)); + payments.depositWithPermit(testToken, to, amount, deadline, v, r, s); + vm.stopPrank(); + } + + function makeDepositWithPermitAndOperatorApproval( + uint256 fromPrivateKey, + uint256 amount, + address operator, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) public { + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 deadline = block.timestamp + 1 hours; + + // Capture pre-deposit balances and state + uint256 fromBalanceBefore = _balanceOf(from, false); + uint256 paymentsBalanceBefore = _balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = _getAccountData(to, false); + + // get signature for permit + (uint8 v, bytes32 r, bytes32 s) = getPermitSignature(fromPrivateKey, from, address(payments), amount, deadline); + + // Execute deposit with permit + vm.startPrank(from); + + payments.depositWithPermitAndApproveOperator( + testToken, from, amount, deadline, v, r, s, operator, rateAllowance, lockupAllowance, maxLockupPeriod + ); + + vm.stopPrank(); + + // Capture post-deposit balances and state + uint256 fromBalanceAfter = _balanceOf(from, false); + uint256 paymentsBalanceAfter = _balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = _getAccountData(to, false); + + // Asserts / Checks + _assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + amount + ); + + verifyOperatorAllowances(from, operator, true, rateAllowance, lockupAllowance, 0, 0, maxLockupPeriod); + } + + function expectInvalidPermitAndOperatorApprovalToRevert( + uint256 senderSk, + uint256 amount, + address operator, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) public { + uint256 deadline = block.timestamp + 1 hours; + address to = vm.addr(senderSk); // Use the sender's address as recipient + + uint256 notSenderSk = senderSk == user1Sk ? user2Sk : user1Sk; + address from = vm.addr(senderSk); + + // Make permit signature from notFromSk, but call from 'from' + (uint8 v, bytes32 r, bytes32 s) = getPermitSignature(notSenderSk, from, address(payments), amount, deadline); + + // Capture pre-deposit balances and state + uint256 fromBalanceBefore = _balanceOf(from, false); + uint256 paymentsBalanceBefore = _balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = _getAccountData(to, false); + + vm.startPrank(from); + + // Expect custom error: ERC2612InvalidSigner(wrongRecovered, expectedOwner) + vm.expectRevert(abi.encodeWithSignature("ERC2612InvalidSigner(address,address)", vm.addr(notSenderSk), from)); + payments.depositWithPermitAndApproveOperator( + testToken, from, amount, deadline, v, r, s, operator, rateAllowance, lockupAllowance, maxLockupPeriod + ); + vm.stopPrank(); + + // Capture post-deposit balances and state + uint256 fromBalanceAfter = _balanceOf(from, false); + uint256 paymentsBalanceAfter = _balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = _getAccountData(to, false); + + // Asserts / Checks + _assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + 0 // No funds should have been transferred due to revert + ); + + verifyOperatorAllowances(from, operator, false, 0, 0, 0, 0, 0); // No values should have been set due to revert - expect defaults + } + + function makeDepositWithPermitToAnotherUser(uint256 senderSk, address depositer, uint256 amount) public { + address to = vm.addr(senderSk); + uint256 deadline = block.timestamp + 1 hours; + + // Get permit signature for 'to' address + (uint8 v, bytes32 r, bytes32 s) = getPermitSignature(senderSk, to, address(payments), amount, deadline); + + vm.startPrank(depositer); + payments.depositWithPermit(testToken, to, amount, deadline, v, r, s); + vm.stopPrank(); + } + + // keccak256("ReceiveWithAuthorization(address from,address to,uint256 value,uint256 validAfter,uint256 validBefore,bytes32 nonce)") + bytes32 private constant RECEIVE_WITH_AUTHORIZATION_TYPEHASH = keccak256( + "ReceiveWithAuthorization(address from,address to,uint256 value,uint256 validAfter,uint256 validBefore,bytes32 nonce)" + ); // as per EIP-3009 + + function getReceiveWithAuthorizationSignature( + uint256 privateKey, + IERC20 token, + address from, + address to, + uint256 value, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce + ) public view returns (uint8 v, bytes32 r, bytes32 s) { + // EIP-712 domain for ERC-3009 (MockERC20 defines its own domainSeparator unrelated to ERC2612) + bytes32 domainSeparator = MockERC20(address(token)).domainSeparator(); + + bytes32 structHash = + keccak256(abi.encode(RECEIVE_WITH_AUTHORIZATION_TYPEHASH, from, to, value, validAfter, validBefore, nonce)); + + bytes32 digest = MessageHashUtils.toTypedDataHash(domainSeparator, structHash); + + (v, r, s) = vm.sign(privateKey, digest); + } + + function depositWithAuthorizationInsufficientBalance(uint256 fromPrivateKey) public { + address from = vm.addr(fromPrivateKey); + address to = from; + uint256 validAfter = 0; + uint256 validBefore = block.timestamp + 300; + uint256 amount = INITIAL_BALANCE + 1; + bytes32 nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + (uint8 v, bytes32 r, bytes32 s) = getReceiveWithAuthorizationSignature( + fromPrivateKey, testToken, from, address(payments), amount, validAfter, validBefore, nonce + ); + + vm.startPrank(from); + // Since signature is valid but balance is insufficient, MockERC20 will revert with ERC20InsufficientBalance + vm.expectRevert( + abi.encodeWithSignature("ERC20InsufficientBalance(address,uint256,uint256)", from, INITIAL_BALANCE, amount) + ); + payments.depositWithAuthorization(testToken, to, amount, validAfter, validBefore, nonce, v, r, s); + vm.stopPrank(); + } + + function depositWithAuthorizationAndOperatorApproval( + uint256 fromPrivateKey, + uint256 amount, + uint256 validForSeconds, + address operator, + uint256 rateAllowance, + uint256 lockupAllowance, + uint256 maxLockupPeriod + ) public returns (bytes32 nonce) { + address from = vm.addr(fromPrivateKey); + address to = from; + + // Windows + uint256 validAfter = 0; // valid immediately + uint256 validBefore = block.timestamp + validForSeconds; + + // Unique nonce + nonce = keccak256(abi.encodePacked("auth-nonce", from, to, amount, block.number)); + + // Pre-state capture + uint256 fromBalanceBefore = _balanceOf(from, false); + uint256 paymentsBalanceBefore = _balanceOf(address(payments), false); + Payments.Account memory toAccountBefore = _getAccountData(to, false); + + // Build signature + (uint8 v, bytes32 r, bytes32 s) = getReceiveWithAuthorizationSignature( + fromPrivateKey, + testToken, + from, + address(payments), // pay to Payments contract + amount, + validAfter, + validBefore, + nonce + ); + + // Execute deposit via authorization + vm.startPrank(from); + + payments.depositWithAuthorizationAndApproveOperator( + testToken, + to, + amount, + validAfter, + validBefore, + nonce, + v, + r, + s, + operator, + rateAllowance, + lockupAllowance, + maxLockupPeriod + ); + + vm.stopPrank(); + + // Post-state capture + uint256 fromBalanceAfter = _balanceOf(from, false); + uint256 paymentsBalanceAfter = _balanceOf(address(payments), false); + Payments.Account memory toAccountAfter = _getAccountData(from, false); + + // Assertions + _assertDepositBalances( + fromBalanceBefore, + fromBalanceAfter, + paymentsBalanceBefore, + paymentsBalanceAfter, + toAccountBefore, + toAccountAfter, + amount + ); + + // Verify authorization is consumed on the token + bool used = testToken.authorizationState(from, nonce); + assertTrue(used); + + verifyOperatorAllowances(from, operator, true, rateAllowance, lockupAllowance, 0, 0, maxLockupPeriod); + } +} diff --git a/packages/pay/test/helpers/RailSettlementHelpers.sol b/packages/pay/test/helpers/RailSettlementHelpers.sol new file mode 100644 index 00000000..4338a168 --- /dev/null +++ b/packages/pay/test/helpers/RailSettlementHelpers.sol @@ -0,0 +1,299 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {Test} from "forge-std/Test.sol"; +import {Payments} from "../../src/Payments.sol"; +import {MockValidator} from "../mocks/MockValidator.sol"; +import {PaymentsTestHelpers} from "./PaymentsTestHelpers.sol"; +import {console} from "forge-std/console.sol"; + +contract RailSettlementHelpers is Test { + PaymentsTestHelpers public baseHelper; + Payments public payments; + + constructor() { + baseHelper = new PaymentsTestHelpers(); + } + + function initialize(Payments _payments, PaymentsTestHelpers _baseHelper) public { + payments = _payments; + baseHelper = _baseHelper; + } + + struct SettlementResult { + uint256 totalAmount; + uint256 netPayeeAmount; + uint256 operatorCommission; + uint256 settledUpto; + string note; + } + + function setupRailWithValidatorAndRateChangeQueue( + address from, + address to, + address operator, + address validator, + uint256[] memory rates, + uint256 lockupPeriod, + uint256 lockupFixed, + uint256 maxLokkupPeriod, + address serviceFeeRecipient + ) public returns (uint256) { + require(validator != address(0), "RailSettlementHelpers: validator cannot be zero address"); + + // Setup operator approval with sufficient allowances + uint256 maxRate = 0; + for (uint256 i = 0; i < rates.length; i++) { + if (rates[i] > maxRate) { + maxRate = rates[i]; + } + } + + // Calculate total lockup needed + uint256 totalLockupAllowance = lockupFixed + (maxRate * lockupPeriod); + + // Setup operator approval with the necessary allowances + baseHelper.setupOperatorApproval( + from, + operator, + maxRate, // Rate allowance + totalLockupAllowance, // Lockup allowance + maxLokkupPeriod // Max lockup period + ); + + // Create rail with parameters + uint256 railId = baseHelper.setupRailWithParameters( + from, + to, + operator, + rates[0], // Initial rate + lockupPeriod, + lockupFixed, + validator, + serviceFeeRecipient + ); + + // Apply rate changes for the rest of the rates + vm.startPrank(operator); + for (uint256 i = 1; i < rates.length; i++) { + // Each change will enqueue the previous rate + payments.modifyRailPayment(railId, rates[i], 0); + + // Advance one block to ensure the changes are at different epochs + baseHelper.advanceBlocks(1); + } + vm.stopPrank(); + + return railId; + } + + function createInDebtRail( + address from, + address to, + address operator, + uint256 paymentRate, + uint256 lockupPeriod, + uint256 fundAmount, + uint256 fixedLockup, + address serviceFeeRecipient + ) public returns (uint256) { + baseHelper.makeDeposit(from, from, fundAmount); + + // Create a rail with specified parameters + uint256 railId = baseHelper.setupRailWithParameters( + from, to, operator, paymentRate, lockupPeriod, fixedLockup, address(0), serviceFeeRecipient + ); + + // Advance blocks past the lockup period to force the rail into debt + baseHelper.advanceBlocks(lockupPeriod + 1); + + return railId; + } + + function deployMockValidator(MockValidator.ValidatorMode mode) public returns (MockValidator) { + return new MockValidator(mode); + } + + function settleRailAndVerify(uint256 railId, uint256 untilEpoch, uint256 expectedAmount, uint256 expectedUpto) + public + returns (SettlementResult memory result) + { + console.log("settleRailAndVerify"); + // Get the rail details to identify payer and payee + Payments.RailView memory rail = payments.getRail(railId); + address payer = rail.from; + address payee = rail.to; + + // Get balances before settlement + Payments.Account memory payerAccountBefore = baseHelper.getAccountData(payer); + Payments.Account memory payeeAccountBefore = baseHelper.getAccountData(payee); + + console.log("payerFundsBefore", payerAccountBefore.funds); + console.log("payerLockupBefore", payerAccountBefore.lockupCurrent); + console.log("payeeFundsBefore", payeeAccountBefore.funds); + console.log("payeeLockupBefore", payeeAccountBefore.lockupCurrent); + + uint256 settlementAmount; + uint256 netPayeeAmount; + uint256 operatorCommission; + uint256 networkFee; + uint256 settledUpto; + string memory note; + + vm.startPrank(payer); + (settlementAmount, netPayeeAmount, operatorCommission, networkFee, settledUpto, note) = + payments.settleRail(railId, untilEpoch); + vm.stopPrank(); + + console.log("settlementAmount", settlementAmount); + console.log("netPayeeAmount", netPayeeAmount); + console.log("operatorCommission", operatorCommission); + console.log("networkFee", networkFee); + console.log("settledUpto", settledUpto); + console.log("note", note); + + // Verify results + assertEq(settlementAmount, expectedAmount, "Settlement amount doesn't match expected"); + assertEq(settledUpto, expectedUpto, "Settled upto doesn't match expected"); + + // Verify payer and payee balance changes + Payments.Account memory payerAccountAfter = baseHelper.getAccountData(payer); + Payments.Account memory payeeAccountAfter = baseHelper.getAccountData(payee); + console.log("payerFundsAfter", payerAccountAfter.funds); + console.log("payeeFundsAfter", payeeAccountAfter.funds); + + assertEq( + payerAccountBefore.funds - payerAccountAfter.funds, + settlementAmount, + "Payer's balance reduction doesn't match settlement amount" + ); + assertEq( + payeeAccountAfter.funds - payeeAccountBefore.funds, + netPayeeAmount, + "Payee's balance increase doesn't match net payee amount" + ); + + rail = payments.getRail(railId); + assertEq(rail.settledUpTo, expectedUpto, "Rail settled upto incorrect"); + + return SettlementResult(settlementAmount, netPayeeAmount, operatorCommission, settledUpto, note); + } + + function terminateAndSettleRail(uint256 railId, uint256 expectedAmount, uint256 expectedUpto) + public + returns (SettlementResult memory result) + { + // Get rail details to extract client and operator addresses + Payments.RailView memory rail = payments.getRail(railId); + address client = rail.from; + address operator = rail.operator; + + // Terminate the rail as operator + vm.prank(operator); + payments.terminateRail(railId); + + // Verify rail was properly terminated + rail = payments.getRail(railId); + (,,, uint256 lockupLastSettledAt) = payments.accounts(baseHelper.testToken(), client); + assertTrue(rail.endEpoch > 0, "Rail should be terminated"); + assertEq( + rail.endEpoch, + lockupLastSettledAt + rail.lockupPeriod, + "Rail end epoch should be account lockup last settled at + rail lockup period" + ); + + return settleRailAndVerify(railId, block.number, expectedAmount, expectedUpto); + } + + function modifyRailSettingsAndVerify( + Payments paymentsContract, + uint256 railId, + address operator, + uint256 newRate, + uint256 newLockupPeriod, + uint256 newFixedLockup + ) public { + Payments.RailView memory railBefore = paymentsContract.getRail(railId); + address client = railBefore.from; + + // Get operator allowance usage before modifications + (,,, uint256 rateUsageBefore, uint256 lockupUsageBefore,) = + paymentsContract.operatorApprovals(baseHelper.testToken(), client, operator); + + // Calculate current lockup total + uint256 oldLockupTotal = railBefore.lockupFixed + (railBefore.paymentRate * railBefore.lockupPeriod); + + // Calculate new lockup total + uint256 newLockupTotal = newFixedLockup + (newRate * newLockupPeriod); + + // Modify rail settings + vm.startPrank(operator); + + // First modify rate if needed + if (newRate != railBefore.paymentRate) { + paymentsContract.modifyRailPayment(railId, newRate, 0); + } + + // Then modify lockup parameters + if (newLockupPeriod != railBefore.lockupPeriod || newFixedLockup != railBefore.lockupFixed) { + paymentsContract.modifyRailLockup(railId, newLockupPeriod, newFixedLockup); + } + + vm.stopPrank(); + + // Verify changes + Payments.RailView memory railAfter = paymentsContract.getRail(railId); + + assertEq(railAfter.paymentRate, newRate, "Rail payment rate not updated correctly"); + + assertEq(railAfter.lockupPeriod, newLockupPeriod, "Rail lockup period not updated correctly"); + + assertEq(railAfter.lockupFixed, newFixedLockup, "Rail fixed lockup not updated correctly"); + + // Get operator allowance usage after modifications + (,,, uint256 rateUsageAfter, uint256 lockupUsageAfter,) = + paymentsContract.operatorApprovals(baseHelper.testToken(), client, operator); + + // Verify rate usage changes correctly + if (newRate > railBefore.paymentRate) { + // Rate increased + assertEq( + rateUsageAfter, + rateUsageBefore + (newRate - railBefore.paymentRate), + "Rate usage not increased correctly after rate increase" + ); + } else if (newRate < railBefore.paymentRate) { + // Rate decreased + assertEq( + rateUsageBefore, + rateUsageAfter + (railBefore.paymentRate - newRate), + "Rate usage not decreased correctly after rate decrease" + ); + } else { + // Rate unchanged + assertEq(rateUsageBefore, rateUsageAfter, "Rate usage changed unexpectedly when rate was not modified"); + } + + // Verify lockup usage changes correctly + if (newLockupTotal > oldLockupTotal) { + // Lockup increased + assertEq( + lockupUsageAfter, + lockupUsageBefore + (newLockupTotal - oldLockupTotal), + "Lockup usage not increased correctly after lockup increase" + ); + } else if (newLockupTotal < oldLockupTotal) { + // Lockup decreased + assertEq( + lockupUsageBefore, + lockupUsageAfter + (oldLockupTotal - newLockupTotal), + "Lockup usage not decreased correctly after lockup decrease" + ); + } else { + // Lockup unchanged + assertEq( + lockupUsageBefore, lockupUsageAfter, "Lockup usage changed unexpectedly when lockup was not modified" + ); + } + } +} diff --git a/packages/pay/test/mocks/ExtraFeeToken.sol b/packages/pay/test/mocks/ExtraFeeToken.sol new file mode 100644 index 00000000..7b80f3cb --- /dev/null +++ b/packages/pay/test/mocks/ExtraFeeToken.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {ERC20} from "@openzeppelin/contracts/token/ERC20/ERC20.sol"; + +/** + * This token decreases the sender balance by more than the value parameter + */ +contract ExtraFeeToken is ERC20 { + address private constant FEE_RECIPIENT = 0x0FeefeefeEFeeFeefeEFEEFEEfEeFEeFeeFeEfEe; + uint256 public transferFee; + + constructor(uint256 _transferFee) ERC20("FeeToken", "FEE") { + transferFee = _transferFee; + } + + function setFeeBips(uint256 bips) public { + transferFee = bips; + } + + function mint(address to, uint256 value) public { + _mint(to, value); + } + + function transfer(address to, uint256 value) public override returns (bool) { + _transfer(msg.sender, to, value); + _transfer(msg.sender, FEE_RECIPIENT, transferFee); + return true; + } + + function transferFrom(address from, address to, uint256 value) public override returns (bool) { + _spendAllowance(from, msg.sender, value); + _transfer(from, to, value); + _transfer(from, FEE_RECIPIENT, transferFee); + return true; + } +} diff --git a/packages/pay/test/mocks/MockERC20.sol b/packages/pay/test/mocks/MockERC20.sol new file mode 100644 index 00000000..3900c8bb --- /dev/null +++ b/packages/pay/test/mocks/MockERC20.sol @@ -0,0 +1,171 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {ERC20Permit} from "@openzeppelin/contracts/token/ERC20/extensions/ERC20Permit.sol"; +import {ECDSA} from "@openzeppelin/contracts/utils/cryptography/ECDSA.sol"; +import {IERC3009} from "../../src/interfaces/IERC3009.sol"; +import {ERC20} from "@openzeppelin/contracts/token/ERC20/ERC20.sol"; + +/** + * @title MockERC20 + * @dev A mock ERC20 token with permit (ERC-2612) and transferWithAuthorization (ERC-3009) functionality for testing purposes. + */ +contract MockERC20 is ERC20, ERC20Permit, IERC3009 { + // --- ERC-3009 State and Constants --- + mapping(address => mapping(bytes32 => bool)) private _authorizationStates; + + bytes32 private constant _TRANSFER_WITH_AUTHORIZATION_TYPEHASH = keccak256( + "TransferWithAuthorization(address from,address to,uint256 value,uint256 validAfter,uint256 validBefore,bytes32 nonce)" + ); + bytes32 private constant _RECEIVE_WITH_AUTHORIZATION_TYPEHASH = keccak256( + "ReceiveWithAuthorization(address from,address to,uint256 value,uint256 validAfter,uint256 validBefore,bytes32 nonce)" + ); + + bytes32 private immutable _HASHED_NAME; + bytes32 private constant _HASHED_VERSION = keccak256("1"); + + // keccak256("Permit(address owner,address spender,uint256 value,uint256 nonce,uint256 deadline)"); + bytes32 private constant _PERMIT_TYPEHASH = 0x6e71edae12b1b97f4d1f60370fef10105fa2faae0126114a169c64845d6126c9; + // keccak256("EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)"); + bytes32 private constant _TYPE_HASH = 0x8b73c3c69bb8fe3d512ecc4cf759cc79239f7b179b0ffacaa9a75d522b39400f; + + uint256 private immutable _CACHED_CHAIN_ID; + bytes32 private immutable _CACHED_DOMAIN_SEPARATOR; + + // --- ERC-3009 Event --- + event AuthorizationUsed(address indexed authorizer, bytes32 indexed nonce); + + constructor(string memory name, string memory symbol) ERC20(name, symbol) ERC20Permit(name) { + _HASHED_NAME = keccak256(abi.encode(name)); + _CACHED_CHAIN_ID = block.chainid; + _CACHED_DOMAIN_SEPARATOR = _buildDomainSeparator(_TYPE_HASH, _HASHED_NAME, _HASHED_VERSION); + } + + // Mint tokens for testing + function mint(address to, uint256 amount) public { + _mint(to, amount); + } + + // --- ERC-3009 Implementation --- + + /** + * @notice Execute a transfer with a signed authorization + * @param from Payer's address (Authorizer) + * @param to Payee's address + * @param value Amount to be transferred + * @param validAfter The time after which this is valid (unix time) + * @param validBefore The time before which this is valid (unix time) + * @param nonce Unique nonce + * @param v v of the signature + * @param r r of the signature + * @param s s of the signature + */ + function transferWithAuthorization( + address from, + address to, + uint256 value, + uint256 validAfter, + uint256 validBefore, + bytes32 nonce, + uint8 v, + bytes32 r, + bytes32 s + ) external { + require(block.timestamp > validAfter, "EIP3009: authorization not yet valid"); + require(block.timestamp < validBefore, "EIP3009: authorization expired"); + require(!_authorizationStates[from][nonce], "EIP3009: authorization already used"); + + bytes32 structHash = keccak256( + abi.encode(_TRANSFER_WITH_AUTHORIZATION_TYPEHASH, from, to, value, validAfter, validBefore, nonce) + ); + + bytes32 digest = _hashTypedDataV4(structHash); + address signer = ECDSA.recover(digest, v, r, s); + require(signer == from, "Invalid signature"); + + _authorizationStates[from][nonce] = true; + emit AuthorizationUsed(from, nonce); + + _transfer(from, to, value); + } + + /** + * @notice Receive a transfer with a signed authorization from the payer + * @dev This has an additional check to ensure that the payee's address matches + * the caller of this function to prevent front-running attacks. (See security + * considerations) + * @param _from Payer's address (Authorizer) + * @param _to Payee's address + * @param _value Amount to be transferred + * @param _validAfter The time after which this is valid (unix time) + * @param _validBefore The time before which this is valid (unix time) + * @param _nonce Unique nonce + * @param _v v of the signature + * @param _r r of the signature + * @param _s s of the signature + */ + function receiveWithAuthorization( + address _from, + address _to, + uint256 _value, + uint256 _validAfter, + uint256 _validBefore, + bytes32 _nonce, + uint8 _v, + bytes32 _r, + bytes32 _s + ) external { + require(_to == msg.sender, "EIP3009: caller must be the recipient"); + require(block.timestamp > _validAfter, "EIP3009: authorization not yet valid"); + require(block.timestamp < _validBefore, "EIP3009: authorization expired"); + require(!_authorizationStates[_from][_nonce], "EIP3009: authorization already used"); + _requireValidRecipient(_to); + + address recoveredAddress = _recover( + _v, + _r, + _s, + abi.encode(_RECEIVE_WITH_AUTHORIZATION_TYPEHASH, _from, _to, _value, _validAfter, _validBefore, _nonce) + ); + require(recoveredAddress == _from, "EIP3009: invalid signature"); + + _authorizationStates[_from][_nonce] = true; + emit AuthorizationUsed(_from, _nonce); + + _transfer(_from, _to, _value); + } + + function authorizationState(address authorizer, bytes32 nonce) external view returns (bool) { + return _authorizationStates[authorizer][nonce]; + } + + function _requireValidRecipient(address _recipient) internal view { + require( + _recipient != address(0) && _recipient != address(this), + "DebtToken: Cannot transfer tokens directly to the Debt token contract or the zero address" + ); + } + + function _recover(uint8 _v, bytes32 _r, bytes32 _s, bytes memory _typeHashAndData) + internal + view + returns (address) + { + bytes32 digest = keccak256(abi.encodePacked("\x19\x01", domainSeparator(), keccak256(_typeHashAndData))); + address recovered = ecrecover(digest, _v, _r, _s); + require(recovered != address(0), "EIP712: invalid signature"); + return recovered; + } + + function domainSeparator() public view returns (bytes32) { + if (block.chainid == _CACHED_CHAIN_ID) { + return _CACHED_DOMAIN_SEPARATOR; + } else { + return _buildDomainSeparator(_TYPE_HASH, _HASHED_NAME, _HASHED_VERSION); + } + } + + function _buildDomainSeparator(bytes32 _typeHash, bytes32 _name, bytes32 _version) private view returns (bytes32) { + return keccak256(abi.encode(_typeHash, _name, _version, block.chainid, address(this))); + } +} diff --git a/packages/pay/test/mocks/MockFeeOnTransferTokenWithPermit.sol b/packages/pay/test/mocks/MockFeeOnTransferTokenWithPermit.sol new file mode 100644 index 00000000..0ec8e326 --- /dev/null +++ b/packages/pay/test/mocks/MockFeeOnTransferTokenWithPermit.sol @@ -0,0 +1,44 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {ERC20, ERC20Permit} from "@openzeppelin/contracts/token/ERC20/extensions/ERC20Permit.sol"; + +contract MockFeeOnTransferTokenWithPermit is ERC20Permit { + uint256 public feePercentage; // Fee in basis points (100 = 1%) + + constructor(string memory name, string memory symbol, uint256 _feePercentage) + ERC20(name, symbol) + ERC20Permit(name) + { + feePercentage = _feePercentage; + } + + function mint(address to, uint256 amount) public { + _mint(to, amount); + } + + function setFeePercentage(uint256 _feePercentage) public { + feePercentage = _feePercentage; + } + + function transfer(address to, uint256 amount) public override returns (bool) { + return _transferWithFee(_msgSender(), to, amount); + } + + function transferFrom(address from, address to, uint256 amount) public override returns (bool) { + address spender = _msgSender(); + _spendAllowance(from, spender, amount); + return _transferWithFee(from, to, amount); + } + + function _transferWithFee(address from, address to, uint256 amount) internal returns (bool) { + uint256 fee = (amount * feePercentage) / 10000; + uint256 actualAmount = amount - fee; + + // Burn the fee (simulating fee-on-transfer) + _transfer(from, address(0xdead), fee); + _transfer(from, to, actualAmount); + + return true; + } +} diff --git a/packages/pay/test/mocks/MockValidator.sol b/packages/pay/test/mocks/MockValidator.sol new file mode 100644 index 00000000..c9090b52 --- /dev/null +++ b/packages/pay/test/mocks/MockValidator.sol @@ -0,0 +1,101 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.27; + +import {IValidator} from "../../src/Payments.sol"; + +contract MockValidator is IValidator { + enum ValidatorMode { + STANDARD, // Approves all payments as proposed + REDUCE_AMOUNT, // Reduces payment amount by a percentage + REDUCE_DURATION, // Settles for fewer epochs than requested + CUSTOM_RETURN, // Returns specific values set by the test + MALICIOUS // Returns invalid values + + } + + ValidatorMode public mode = ValidatorMode.STANDARD; // Default to STANDARD mode + uint256 public modificationFactor; // Percentage (0-100) for reductions + uint256 public customAmount; + uint256 public customUpto; + string public customNote; + + // Storage for railTerminated calls + uint256 public lastTerminatedRailId; + address public lastTerminator; + uint256 public lastEndEpoch; + bool public railTerminatedCalled; + + constructor(ValidatorMode _mode) { + mode = _mode; + modificationFactor = 100; // 100% = no modification by default + } + + function configure(uint256 _modificationFactor) external { + require(_modificationFactor <= 100, "Factor must be between 0-100"); + modificationFactor = _modificationFactor; + } + + // Set custom return values for CUSTOM_RETURN mode + function setCustomValues(uint256 _amount, uint256 _upto, string calldata _note) external { + customAmount = _amount; + customUpto = _upto; + customNote = _note; + } + + // Change the validator's mode + function setMode(ValidatorMode _mode) external { + mode = _mode; + } + + function validatePayment( + uint256, /* railId */ + uint256 proposedAmount, + uint256 fromEpoch, + uint256 toEpoch, + uint256 /* rate */ + ) external view override returns (ValidationResult memory result) { + if (mode == ValidatorMode.STANDARD) { + return ValidationResult({ + modifiedAmount: proposedAmount, + settleUpto: toEpoch, + note: "Standard approved payment" + }); + } else if (mode == ValidatorMode.REDUCE_AMOUNT) { + uint256 reducedAmount = (proposedAmount * modificationFactor) / 100; + return ValidationResult({ + modifiedAmount: reducedAmount, + settleUpto: toEpoch, + note: "Validator reduced payment amount" + }); + } else if (mode == ValidatorMode.REDUCE_DURATION) { + uint256 totalEpochs = toEpoch - fromEpoch; + uint256 reducedEpochs = (totalEpochs * modificationFactor) / 100; + uint256 reducedEndEpoch = fromEpoch + reducedEpochs; + + // Calculate reduced amount proportionally + uint256 reducedAmount = (proposedAmount * reducedEpochs) / totalEpochs; + + return ValidationResult({ + modifiedAmount: reducedAmount, + settleUpto: reducedEndEpoch, + note: "Validator reduced settlement duration" + }); + } else if (mode == ValidatorMode.CUSTOM_RETURN) { + return ValidationResult({modifiedAmount: customAmount, settleUpto: customUpto, note: customNote}); + } else { + // Malicious mode attempts to return invalid values + return ValidationResult({ + modifiedAmount: proposedAmount * 2, // Try to double the payment + settleUpto: toEpoch + 10, // Try to settle beyond the requested range + note: "Malicious validator attempting to manipulate payment" + }); + } + } + + function railTerminated(uint256 railId, address terminator, uint256 endEpoch) external override { + lastTerminatedRailId = railId; + lastTerminator = terminator; + lastEndEpoch = endEpoch; + railTerminatedCalled = true; + } +} diff --git a/packages/pay/tools/README.md b/packages/pay/tools/README.md new file mode 100644 index 00000000..00706e6a --- /dev/null +++ b/packages/pay/tools/README.md @@ -0,0 +1,70 @@ +# Filecoin Payment Services Tools + +A place for all tools related to deploying, upgrading, and managing the Payments contract. + +## Tools + +### Available Tools + +- **Deployment Script**: `deploy.sh` (all networks) + +### Deployment Script + +#### deploy.sh +This script deploys the Payments contract to the specified network. Usage: + +```bash +./tools/deploy.sh +# Example: 314159 (calibnet), 314 (mainnet), 12345 (devnet) +``` +- Uses `PAYMENTS_PATH` if set, otherwise defaults to `src/Payments.sol:Payments`. +- Sets a default `RPC_URL` if not provided, based on `CHAIN_ID`. +- Outputs the Payments Contract Address (proxy) and Implementation Address. + +### Environment Variables + +To use these scripts, set the following environment variables: +- `RPC_URL` - The RPC URL for the network. For Calibration Testnet (314159) and Mainnet (314), a default is set if not provided. For devnet or any custom CHAIN_ID, you must set `RPC_URL` explicitly. +- `KEYSTORE` - Path to the keystore file +- `PASSWORD` - Password for the keystore +- `PAYMENTS_PATH` - Path to the implementation contract (e.g., "src/Payments.sol:Payments") + +### Make Targets + +```bash +# Deployment +make deploy-devnet # Deploy to local devnet +make deploy-calibnet # Deploy to Calibration Testnet +make deploy-mainnet # Deploy to Mainnet +``` + +--- + +### Direct Script Usage (without Make) + +You can run all scripts directly from the `tools/` directory without using Makefile targets. +Set the required environment variables as shown below, then invoke the scripts with the appropriate arguments. + +**Note:** +- For Calibration Testnet (314159) and Mainnet (314), the script sets a default `RPC_URL` if not provided. +- For devnet or any custom `CHAIN_ID`, you must set `RPC_URL` explicitly or the script will exit with an error. +- You can always inspect each script for more details on required and optional environment variables. + +#### Deploy + +```bash +export KEYSTORE="/path/to/keystore" +export PASSWORD="your-password" +# Optionally set PAYMENTS_PATH and RPC_URL +./tools/deploy.sh +# Example: ./tools/deploy.sh 314159 +``` + +### Example Usage + +```bash +# Deploy to calibnet +export KEYSTORE="/path/to/keystore" +export PASSWORD="your-password" +make deploy-calibnet +``` diff --git a/packages/pay/tools/deploy.sh b/packages/pay/tools/deploy.sh new file mode 100755 index 00000000..b956af24 --- /dev/null +++ b/packages/pay/tools/deploy.sh @@ -0,0 +1,56 @@ +#! /bin/bash +# deploy.sh deploys the FilecoinPayV1 contract to the specified network +# Usage: ./tools/deploy.sh +# Example: ./tools/deploy.sh 314159 (calibnet) +# ./tools/deploy.sh 314 (mainnet) +# ./tools/deploy.sh 31415926 (devnet) +# +if [ -f ".env" ]; then + export $(grep -v '^#' .env | xargs) +fi +set -euo pipefail + +CHAIN_ID=${1:-314159} # Default to calibnet + +# Set default RPC_URL if not set +if [ -z "${RPC_URL:-}" ]; then + if [ "$CHAIN_ID" = "314159" ]; then + export RPC_URL="https://api.calibration.node.glif.io/rpc/v1" + elif [ "$CHAIN_ID" = "314" ]; then + export RPC_URL="https://api.node.glif.io/rpc/v1" + else + echo "Error: RPC_URL must be set for CHAIN_ID $CHAIN_ID" + exit 1 + fi +fi + +if [ -z "${KEYSTORE:-}" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi +if [ -z "${PASSWORD:-}" ]; then + echo "Error: PASSWORD is not set" + exit 1 +fi + +ADDR=$(cast wallet address --keystore "$KEYSTORE" --password "$PASSWORD") +echo "Deploying FilecoinPayV1 from address $ADDR to chain $CHAIN_ID" +NONCE="$(cast nonce --rpc-url "$RPC_URL" "$ADDR")" + +# Use PAYMENTS_PATH if set, otherwise default +if [ -z "${PAYMENTS_PATH:-}" ]; then + PAYMENTS_PATH="src/FilecoinPayV1.sol:FilecoinPayV1" +fi + +echo "Deploying FilecoinPayV1 implementation ($PAYMENTS_PATH)" +export PAYMENTS_CONTRACT_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id $CHAIN_ID $PAYMENTS_PATH | grep "Deployed to" | awk '{print $3}') +if [ -z "$PAYMENTS_CONTRACT_ADDRESS" ]; then + echo "Error: Failed to extract FilecoinPayV1 implementation contract address" + exit 1 +fi +echo "FilecoinPayV1 Address: $PAYMENTS_CONTRACT_ADDRESS" + +echo "" +echo "=== DEPLOYMENT SUMMARY ===" +echo "FilecoinPayV1 Contract Address: $PAYMENTS_CONTRACT_ADDRESS" +echo "==========================" diff --git a/packages/pdp/package.json b/packages/pdp/package.json new file mode 100644 index 00000000..1421f753 --- /dev/null +++ b/packages/pdp/package.json @@ -0,0 +1,30 @@ +{ + "name": "@filoz/pdp", + "version": "1.0.0", + "description": "Filecoin PDP (Proof of Data Possession) - Smart contracts for data possession verification", + "main": "src/index.js", + "files": [ + "src/**/*.sol", + "abi/**/*.json" + ], + "scripts": { + "build": "echo 'Use root forge build'", + "test": "echo 'Use root forge test'", + "clean": "echo 'Use root forge clean'", + "lint": "echo 'Use root forge fmt --check'", + "lint:fix": "echo 'Use root forge fmt'" + }, + "repository": { + "type": "git", + "url": "https://github.com/FilOzone/pdp.git" + }, + "keywords": [ + "filecoin", + "pdp", + "proof-of-data-possession", + "solidity", + "smart-contracts" + ], + "author": "FilOzone", + "license": "Apache-2.0 OR MIT" +} diff --git a/packages/pdp/src/BitOps.sol b/packages/pdp/src/BitOps.sol new file mode 100644 index 00000000..ddc8a3fb --- /dev/null +++ b/packages/pdp/src/BitOps.sol @@ -0,0 +1,97 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +// Library for bit operations. +library BitOps { + // Calculates the number of leading zeros in binary representation. + function clz(uint256 x) internal pure returns (uint256) { + uint256 n = 256; + uint256 y; + + y = x >> 128; + if (y != 0) { + n -= 128; + x = y; + } + y = x >> 64; + if (y != 0) { + n -= 64; + x = y; + } + y = x >> 32; + if (y != 0) { + n -= 32; + x = y; + } + y = x >> 16; + if (y != 0) { + n -= 16; + x = y; + } + y = x >> 8; + if (y != 0) { + n -= 8; + x = y; + } + y = x >> 4; + if (y != 0) { + n -= 4; + x = y; + } + y = x >> 2; + if (y != 0) { + n -= 2; + x = y; + } + y = x >> 1; + if (y != 0) return n - 2; + return n - x; + } + + int256 constant MASK128 = 0x00000000000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF; + int256 constant MASK64 = 0x0000000000000000FFFFFFFFFFFFFFFF0000000000000000FFFFFFFFFFFFFFFF; + int256 constant MASK32 = 0x00000000FFFFFFFF00000000FFFFFFFF00000000FFFFFFFF00000000FFFFFFFF; + int256 constant MASK16 = 0x0000FFFF0000FFFF0000FFFF0000FFFF0000FFFF0000FFFF0000FFFF0000FFFF; + int256 constant MASK8 = 0x00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF00FF; + int256 constant MASK4 = 0x0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F0F; + int256 constant MASK2 = 0x3333333333333333333333333333333333333333333333333333333333333333; + int256 constant MASK1 = 0x5555555555555555555555555555555555555555555555555555555555555555; + + // Calculates the number of trailing zeros in binary representation. + function ctz(uint256 x) internal pure returns (uint256) { + require(x <= uint256(type(int256).max), "Input exceeds maximum int256 value"); + uint256 c = 256; + + int256 v = -int256(x); + v = v & int256(x); + if (v != 0) { + c--; + } + if (v & MASK128 != 0) { + c -= 128; + } + if (v & MASK64 != 0) { + c -= 64; + } + if (v & MASK32 != 0) { + c -= 32; + } + if (v & MASK16 != 0) { + c -= 16; + } + if (v & MASK8 != 0) { + c -= 8; + } + if (v & MASK4 != 0) { + c -= 4; + } + if (v & MASK2 != 0) { + c -= 2; + } + if (v & MASK1 != 0) { + c -= 1; + } + + return c; + } +} diff --git a/packages/pdp/src/Cids.sol b/packages/pdp/src/Cids.sol new file mode 100644 index 00000000..5321fe23 --- /dev/null +++ b/packages/pdp/src/Cids.sol @@ -0,0 +1,147 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +library Cids { + // 0x01 0x55 0x9120 + // (cidv1) (raw) (fr32-sha2-256-trunc254-padded-binary-tree) + bytes4 public constant COMMP_V2_PREFIX = hex"01559120"; + + // A helper struct for events + getter functions to display digests as CommpV2 CIDs + struct Cid { + bytes data; + } + + // Returns the last 32 bytes of a CID payload as a bytes32. + function digestFromCid(Cid memory cid) internal pure returns (bytes32) { + require(cid.data.length >= 32, "Cid data is too short"); + bytes memory dataSlice = new bytes(32); + for (uint256 i = 0; i < 32; i++) { + dataSlice[i] = cid.data[cid.data.length - 32 + i]; + } + return bytes32(dataSlice); + } + + // Returns the height of the tree from the CID. + function heightFromCid(Cid memory cid) internal pure returns (uint8) { + require(cid.data.length >= 33, "Cid data is too short"); + return uint8(cid.data[cid.data.length - 32 - 1]); + } + + // Checks that CID is CommPv2 and decomposes it into its components. + // See: https://github.com/filecoin-project/FIPs/blob/master/FRCs/frc-0069.md + function validateCommPv2(Cid memory cid) + internal + pure + returns (uint256 padding, uint8 height, uint256 digestOffset) + { + for (uint256 i = 0; i < 4; i++) { + if (cid.data[i] != COMMP_V2_PREFIX[i]) { + revert("Cid must be CommPv2"); + } + } + uint256 offset = 4; + uint256 mhLength; + (mhLength, offset) = _readUvarint(cid.data, offset); + require(mhLength >= 34, "CommPv2 multihash length must be at least 34"); + if (mhLength + offset != cid.data.length) { + revert("CommPv2 multihash length does not match data length"); + } + (padding, offset) = _readUvarint(cid.data, offset); + + height = uint8(cid.data[offset]); + offset++; + + return (padding, height, offset); + } + + // isPaddingExcessive checks if the padding size exceeds the size of the tree + function isPaddingExcessive(uint256 padding, uint8 height) internal pure returns (bool) { + return (128 * padding) / 127 >= 1 << (height + 5); + } + + // pieceSize returns the size of the data defined by amount of padding and height of the tree + // this is after the Fr32 expansion, if 1 bit of actual data spills into padding byte, the whole byte is counted as data + // as the padding is specified as before expansion + function pieceSize(uint256 padding, uint8 height) internal pure returns (uint256) { + // 2^height * 32 - padding + // we can fold the 32 into height + return (1 << (uint256(height) + 5)) - (128 * padding) / 127; + } + + // leafCount returns the number of 32b leaves that contain any amount of data + function leafCount(uint256 padding, uint8 height) internal pure returns (uint256) { + // the padding itself is # of bytes before Fr32 expansion + // so we need to expand it by factor 128/127 + // then we divide by 32 with a floor to get the number of leaves that are fully padding + uint256 paddingLeafs = (128 * padding) / 127 >> 5; + // 1<= 0x80) { + data[offset++] = bytes1(uint8(value) | 0x80); + value >>= 7; + } + data[offset++] = bytes1(uint8(value)); + return offset; + } + + // Helper function to calculate the length of a uvarint + function _uvarintLength(uint256 value) internal pure returns (uint256) { + uint256 length = 1; + while (value >= 0x80) { + value >>= 7; + length++; + } + return length; + } + + // Helper function reading uvarints <= 256 bits + // returns (value, offset) with offset advanced to the following byte + function _readUvarint(bytes memory data, uint256 offset) internal pure returns (uint256, uint256) { + uint256 i = 0; + uint256 value = uint256(uint8(data[offset])) & 0x7F; + while (data[offset + i] >= 0x80) { + i++; + value = value | uint256(uint8(data[offset + i]) & 0x7F) << (i * 7); + } + i++; + return (value, offset + i); + } +} diff --git a/packages/pdp/src/ERC1967Proxy.sol b/packages/pdp/src/ERC1967Proxy.sol new file mode 100644 index 00000000..e9296f3c --- /dev/null +++ b/packages/pdp/src/ERC1967Proxy.sol @@ -0,0 +1,12 @@ +// SPDX-License-Identifier: MIT +// OpenZeppelin Contracts (last updated v5.0.0) (proxy/ERC1967/ERC1967Proxy.sol) + +pragma solidity ^0.8.20; + +import {ERC1967Proxy} from "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol"; + +// This contract is a thin wrapper around the OpenZeppelin ERC1967Proxy. +// It exists for ease of deployment of PDP contracts. +contract MyERC1967Proxy is ERC1967Proxy { + constructor(address _implementation, bytes memory _data) ERC1967Proxy(_implementation, _data) {} +} diff --git a/packages/pdp/src/Fees.sol b/packages/pdp/src/Fees.sol new file mode 100644 index 00000000..8b80600a --- /dev/null +++ b/packages/pdp/src/Fees.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +/// @title PDPFees +/// @notice A library for calculating fees for the PDP. +library PDPFees { + uint256 constant ATTO_FIL = 1; + uint256 constant FIL_TO_ATTO_FIL = 1e18 * ATTO_FIL; + + // 0.1 FIL + uint256 constant SYBIL_FEE = FIL_TO_ATTO_FIL / 10; + + // Default FIL-based proof fee: 0.00023 FIL per TiB (used for initialization) + // Based on: 0.00067 USD per TiB / 2.88 USD per FIL = 0.00023 FIL per TiB + uint96 constant DEFAULT_FEE_PER_TIB = 230000 gwei; // 0.00023 FIL in attoFIL + + // 1 TiB in bytes (2^40) + uint256 constant TIB_IN_BYTES = 2 ** 40; + + /// @notice Calculates the proof fee based on the dataset size and a provided per-TiB fee. + /// @param rawSize The raw size of the proof in bytes. + /// @param feePerTiB The fee rate per TiB in AttoFIL (source of truth lives in PDPVerifier). + /// @return proof fee in AttoFIL + /// @dev The proof fee is calculated as: fee_perTiB * datasetSize_in_TiB + function calculateProofFee(uint256 rawSize, uint96 feePerTiB) internal pure returns (uint256) { + require(rawSize > 0, "failed to validate: raw size must be greater than 0"); + + // Calculate fee as: (feePerTiB * rawSize) >> 40 (since TIB_IN_BYTES == 2**40) + return (feePerTiB * rawSize) >> 40; + } + + // sybil fee adds cost to adding state to the pdp verifier contract to prevent + // wasteful state growth. 0.1 FIL + function sybilFee() internal pure returns (uint256) { + return SYBIL_FEE; + } +} diff --git a/packages/pdp/src/IPDPProvingSchedule.sol b/packages/pdp/src/IPDPProvingSchedule.sol new file mode 100644 index 00000000..cc6ee1dd --- /dev/null +++ b/packages/pdp/src/IPDPProvingSchedule.sol @@ -0,0 +1,30 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +/// @title IPDPProvingSchedule +/// @notice Interface for PDP Service SLA specifications +interface IPDPProvingSchedule { + /** + * @notice Returns PDP configuration values + * @return maxProvingPeriod Maximum number of epochs between proofs + * @return challengeWindow Number of epochs for the challenge window + * @return challengesPerProof Number of challenges required per proof + * @return initChallengeWindowStart Initial challenge window start for new data sets assuming proving period starts now + */ + function getPDPConfig() + external + view + returns ( + uint64 maxProvingPeriod, + uint256 challengeWindow, + uint256 challengesPerProof, + uint256 initChallengeWindowStart + ); + + /** + * @notice Returns the start of the next challenge window for a data set + * @param setId The ID of the data set + * @return The block number when the next challenge window starts + */ + function nextPDPChallengeWindowStart(uint256 setId) external view returns (uint256); +} diff --git a/packages/pdp/src/PDPVerifier.sol b/packages/pdp/src/PDPVerifier.sol new file mode 100644 index 00000000..8def51ef --- /dev/null +++ b/packages/pdp/src/PDPVerifier.sol @@ -0,0 +1,914 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +import {BitOps} from "./BitOps.sol"; +import {Cids} from "./Cids.sol"; +import {MerkleVerify} from "./Proofs.sol"; +import {PDPFees} from "./Fees.sol"; +import {ERC1967Utils} from "@openzeppelin/contracts/proxy/ERC1967/ERC1967Utils.sol"; +import {Initializable} from "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; +import {UUPSUpgradeable} from "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol"; +import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; +import {FVMPay} from "fvm-solidity/FVMPay.sol"; +import {FVMRandom} from "fvm-solidity/FVMRandom.sol"; +import {IPDPTypes} from "./interfaces/IPDPTypes.sol"; + +/// @title PDPListener +/// @notice Interface for PDP Service applications managing data storage. +/// @dev This interface exists to provide an extensible hook for applications to use the PDP verification contract +/// to implement data storage applications. +interface PDPListener { + function dataSetCreated(uint256 dataSetId, address creator, bytes calldata extraData) external; + function dataSetDeleted(uint256 dataSetId, uint256 deletedLeafCount, bytes calldata extraData) external; + function piecesAdded(uint256 dataSetId, uint256 firstAdded, Cids.Cid[] memory pieceData, bytes calldata extraData) + external; + function piecesScheduledRemove(uint256 dataSetId, uint256[] memory pieceIds, bytes calldata extraData) external; + // Note: extraData not included as proving messages conceptually always originate from the SP + function possessionProven(uint256 dataSetId, uint256 challengedLeafCount, uint256 seed, uint256 challengeCount) + external; + function nextProvingPeriod(uint256 dataSetId, uint256 challengeEpoch, uint256 leafCount, bytes calldata extraData) + external; + /// @notice Called when data set storage provider is changed in PDPVerifier. + function storageProviderChanged( + uint256 dataSetId, + address oldStorageProvider, + address newStorageProvider, + bytes calldata extraData + ) external; +} + +uint256 constant NEW_DATA_SET_SENTINEL = 0; + +contract PDPVerifier is Initializable, UUPSUpgradeable, OwnableUpgradeable { + // Constants + uint256 public constant MAX_PIECE_SIZE_LOG2 = 50; + uint256 public constant MAX_ENQUEUED_REMOVALS = 2000; + uint256 public constant EXTRA_DATA_MAX_SIZE = 2048; + uint256 public constant NO_CHALLENGE_SCHEDULED = 0; + uint256 public constant NO_PROVEN_EPOCH = 0; + + // Events + event DataSetCreated(uint256 indexed setId, address indexed storageProvider); + event StorageProviderChanged( + uint256 indexed setId, address indexed oldStorageProvider, address indexed newStorageProvider + ); + event DataSetDeleted(uint256 indexed setId, uint256 deletedLeafCount); + event DataSetEmpty(uint256 indexed setId); + + event PiecesAdded(uint256 indexed setId, uint256[] pieceIds, Cids.Cid[] pieceCids); + event PiecesRemoved(uint256 indexed setId, uint256[] pieceIds); + + event ProofFeePaid(uint256 indexed setId, uint256 fee); + event FeeUpdateProposed(uint256 currentFee, uint256 newFee, uint256 effectiveTime); + + event PossessionProven(uint256 indexed setId, IPDPTypes.PieceIdAndOffset[] challenges); + event NextProvingPeriod(uint256 indexed setId, uint256 challengeEpoch, uint256 leafCount); + + // Types + // State fields + /* + A data set is the metadata required for tracking data for proof of possession. + It maintains a list of CIDs of data to be proven and metadata needed to + add and remove data to the set and prove possession efficiently. + + ** logical structure of the data set** + /* + struct DataSet { + Cid[] pieces; + uint256[] leafCounts; + uint256[] sumTree; + uint256 leafCount; + address storageProvider; + address proposed storageProvider; + nextPieceID uint64; + nextChallengeEpoch: uint64; + listenerAddress: address; + challengeRange: uint256 + enqueuedRemovals: uint256[] + } + ** PDP Verifier contract tracks many possible data sets ** + []DataSet dataSets + + To implement this logical structure in the solidity data model we have + arrays tracking the singleton fields and two dimensional arrays + tracking linear data set data. The first index is the data set id + and the second index if any is the index of the data in the array. + + Invariant: pieceCids.length == pieceLeafCount.length == sumTreeCounts.length + */ + + // Network epoch delay between last proof of possession and next + // randomness sampling for challenge generation. + // + // The purpose of this delay is to prevent SPs from biasing randomness by running forking attacks. + // Given a small enough challengeFinality an SP can run several trials of challenge sampling and + // fork around samples that don't suit them, grinding the challenge randomness. + // For the filecoin L1, a safe value is 150 using the same analysis setting 150 epochs between + // PoRep precommit and PoRep provecommit phases. + // + // We keep this around for future portability to a variety of environments with different assumptions + // behind their challenge randomness sampling methods. + uint256 challengeFinality; + + // TODO PERF: https://github.com/FILCAT/pdp/issues/16#issuecomment-2329838769 + uint64 nextDataSetId; + // The CID of each piece. Pieces and all their associated data can be appended and removed but not modified. + mapping(uint256 => mapping(uint256 => Cids.Cid)) pieceCids; + // The leaf count of each piece + mapping(uint256 => mapping(uint256 => uint256)) pieceLeafCounts; + // The sum tree array for finding the piece id of a given leaf index. + mapping(uint256 => mapping(uint256 => uint256)) sumTreeCounts; + mapping(uint256 => uint256) nextPieceId; + // The number of leaves (32 byte chunks) in the data set when tallying up all pieces. + // This includes the leaves in pieces that have been added but are not yet eligible for proving. + mapping(uint256 => uint256) dataSetLeafCount; + // The epoch for which randomness is sampled for challenge generation while proving possession this proving period. + mapping(uint256 => uint256) nextChallengeEpoch; + // Each data set notifies a configurable listener to implement extensible applications managing data storage. + mapping(uint256 => address) dataSetListener; + // The first index that is not challenged in prove possession calls this proving period. + // Updated to include the latest added leaves when starting the next proving period. + mapping(uint256 => uint256) challengeRange; + // Enqueued piece ids for removal when starting the next proving period + mapping(uint256 => uint256[]) scheduledRemovals; + // storage provider of data set is initialized upon creation to create message sender + // storage provider has exclusive permission to add and remove pieces and delete the data set + mapping(uint256 => address) storageProvider; + mapping(uint256 => address) dataSetProposedStorageProvider; + mapping(uint256 => uint256) dataSetLastProvenEpoch; + + // Packed fee status + struct FeeStatus { + uint96 currentFeePerTiB; + uint96 nextFeePerTiB; + uint64 transitionTime; + } + + FeeStatus private feeStatus; + + // Methods + + /// @custom:oz-upgrades-unsafe-allow constructor + constructor() { + _disableInitializers(); + } + + function initialize(uint256 _challengeFinality) public initializer { + __Ownable_init(msg.sender); + __UUPSUpgradeable_init(); + challengeFinality = _challengeFinality; + nextDataSetId = 1; // Data sets start at 1 + feeStatus.nextFeePerTiB = PDPFees.DEFAULT_FEE_PER_TIB; + } + + string public constant VERSION = "2.2.1"; + + event ContractUpgraded(string version, address implementation); + + function migrate() external onlyOwner reinitializer(2) { + emit ContractUpgraded(VERSION, ERC1967Utils.getImplementation()); + } + + function _authorizeUpgrade(address newImplementation) internal override onlyOwner {} + + function burnFee(uint256 amount) internal { + require(msg.value >= amount, "Incorrect fee amount"); + bool success = FVMPay.burn(amount); + require(success, "Burn failed"); + } + + // Validates msg.value meets sybil fee requirement and burns the fee. + // Returns the sybil fee amount for later refund calculation. + function _validateAndBurnSybilFee() internal returns (uint256 sybilFee) { + sybilFee = PDPFees.sybilFee(); + require(msg.value >= sybilFee, "sybil fee not met"); + burnFee(sybilFee); + } + + // Refunds any amount sent over the sybil fee back to msg.sender. + // Must be called after all state changes to avoid re-entrancy issues. + function _refundExcessSybilFee(uint256 sybilFee) internal { + if (msg.value > sybilFee) { + (bool success,) = msg.sender.call{value: msg.value - sybilFee}(""); + require(success, "Transfer failed."); + } + } + + // Returns the current challenge finality value + function getChallengeFinality() public view returns (uint256) { + return challengeFinality; + } + + // Returns the next data set ID + function getNextDataSetId() public view returns (uint64) { + return nextDataSetId; + } + + // Returns false if the data set is 1) not yet created 2) deleted + function dataSetLive(uint256 setId) public view returns (bool) { + return setId < nextDataSetId && storageProvider[setId] != address(0); + } + + // Returns false if the data set is not live or if the piece id is 1) not yet created 2) deleted + function pieceLive(uint256 setId, uint256 pieceId) public view returns (bool) { + return dataSetLive(setId) && pieceId < nextPieceId[setId] && pieceLeafCounts[setId][pieceId] > 0; + } + + // Returns false if the piece is not live or if the piece id is not yet in challenge range + function pieceChallengable(uint256 setId, uint256 pieceId) public view returns (bool) { + uint256 top = 256 - BitOps.clz(nextPieceId[setId]); + IPDPTypes.PieceIdAndOffset memory ret = findOnePieceId(setId, challengeRange[setId] - 1, top); + require( + ret.offset == pieceLeafCounts[setId][ret.pieceId] - 1, + "challengeRange -1 should align with the very last leaf of a piece" + ); + return pieceLive(setId, pieceId) && pieceId <= ret.pieceId; + } + + // Returns the leaf count of a data set + function getDataSetLeafCount(uint256 setId) public view returns (uint256) { + require(dataSetLive(setId), "Data set not live"); + return dataSetLeafCount[setId]; + } + + // Returns the next piece ID for a data set + function getNextPieceId(uint256 setId) public view returns (uint256) { + require(dataSetLive(setId), "Data set not live"); + return nextPieceId[setId]; + } + + // Returns the next challenge epoch for a data set + function getNextChallengeEpoch(uint256 setId) public view returns (uint256) { + require(dataSetLive(setId), "Data set not live"); + return nextChallengeEpoch[setId]; + } + + // Returns the listener address for a data set + function getDataSetListener(uint256 setId) public view returns (address) { + require(dataSetLive(setId), "Data set not live"); + return dataSetListener[setId]; + } + + // Returns the storage provider of a data set and the proposed storage provider if any + function getDataSetStorageProvider(uint256 setId) public view returns (address, address) { + require(dataSetLive(setId), "Data set not live"); + return (storageProvider[setId], dataSetProposedStorageProvider[setId]); + } + + function getDataSetLastProvenEpoch(uint256 setId) public view returns (uint256) { + require(dataSetLive(setId), "Data set not live"); + return dataSetLastProvenEpoch[setId]; + } + + // Returns the piece CID for a given data set and piece ID + function getPieceCid(uint256 setId, uint256 pieceId) public view returns (Cids.Cid memory) { + require(dataSetLive(setId), "Data set not live"); + return pieceCids[setId][pieceId]; + } + + // Returns the piece leaf count for a given data set and piece ID + function getPieceLeafCount(uint256 setId, uint256 pieceId) public view returns (uint256) { + require(dataSetLive(setId), "Data set not live"); + return pieceLeafCounts[setId][pieceId]; + } + + // Returns the index of the most recently added leaf that is challengeable in the current proving period + function getChallengeRange(uint256 setId) public view returns (uint256) { + require(dataSetLive(setId), "Data set not live"); + return challengeRange[setId]; + } + + // Returns the piece ids of the pieces scheduled for removal at the start of the next proving period + function getScheduledRemovals(uint256 setId) public view returns (uint256[] memory) { + require(dataSetLive(setId), "Data set not live"); + uint256[] storage removals = scheduledRemovals[setId]; + uint256[] memory result = new uint256[](removals.length); + for (uint256 i = 0; i < removals.length; i++) { + result[i] = removals[i]; + } + return result; + } + + /** + * @notice Returns the count of active pieces (non-zero leaf count) for a data set + * @param setId The data set ID + * @return activeCount The number of active pieces in the data set + */ + function getActivePieceCount(uint256 setId) public view returns (uint256 activeCount) { + require(dataSetLive(setId), "Data set not live"); + + uint256 maxPieceId = nextPieceId[setId]; + for (uint256 i = 0; i < maxPieceId; i++) { + if (pieceLeafCounts[setId][i] > 0) { + activeCount++; + } + } + } + + /** + * @notice Returns active pieces (non-zero leaf count) for a data set with pagination + * @param setId The data set ID + * @param offset Starting index for pagination (0-based) + * @param limit Maximum number of pieces to return + * @return pieces Array of active piece CIDs + * @return pieceIds Array of corresponding piece IDs + * @return rawSizes Array of raw sizes for each piece (in bytes) + * @return hasMore True if there are more pieces beyond this page + */ + function getActivePieces(uint256 setId, uint256 offset, uint256 limit) + public + view + returns (Cids.Cid[] memory pieces, uint256[] memory pieceIds, uint256[] memory rawSizes, bool hasMore) + { + require(dataSetLive(setId), "Data set not live"); + require(limit > 0, "Limit must be greater than 0"); + + // Single pass: collect data and check for more + uint256 maxPieceId = nextPieceId[setId]; + + // Over-allocate arrays to limit size + Cids.Cid[] memory tempPieces = new Cids.Cid[](limit); + uint256[] memory tempPieceIds = new uint256[](limit); + uint256[] memory tempRawSizes = new uint256[](limit); + + uint256 activeCount = 0; + uint256 resultIndex = 0; + + for (uint256 i = 0; i < maxPieceId; i++) { + if (pieceLeafCounts[setId][i] > 0) { + if (activeCount >= offset && resultIndex < limit) { + tempPieces[resultIndex] = pieceCids[setId][i]; + tempPieceIds[resultIndex] = i; + tempRawSizes[resultIndex] = pieceLeafCounts[setId][i] * 32; + resultIndex++; + } else if (activeCount >= offset + limit) { + // Found at least one more active piece beyond our limit + hasMore = true; + break; + } + activeCount++; + } + } + + // Handle case where we found fewer items than limit + if (resultIndex == 0) { + // No items found + return (new Cids.Cid[](0), new uint256[](0), new uint256[](0), false); + } else if (resultIndex < limit) { + // Found fewer items than limit - need to resize arrays + pieces = new Cids.Cid[](resultIndex); + pieceIds = new uint256[](resultIndex); + rawSizes = new uint256[](resultIndex); + + for (uint256 i = 0; i < resultIndex; i++) { + pieces[i] = tempPieces[i]; + pieceIds[i] = tempPieceIds[i]; + rawSizes[i] = tempRawSizes[i]; + } + } else { + // Found exactly limit items - use temp arrays directly + pieces = tempPieces; + pieceIds = tempPieceIds; + rawSizes = tempRawSizes; + } + } + + // storage provider proposes new storage provider. If the storage provider proposes themself delete any outstanding proposed storage provider + function proposeDataSetStorageProvider(uint256 setId, address newStorageProvider) public { + require(dataSetLive(setId), "Data set not live"); + address currentStorageProvider = storageProvider[setId]; + require( + currentStorageProvider == msg.sender, "Only the current storage provider can propose a new storage provider" + ); + if (currentStorageProvider == newStorageProvider) { + // If the storage provider proposes themself delete any outstanding proposed storage provider + delete dataSetProposedStorageProvider[setId]; + } else { + dataSetProposedStorageProvider[setId] = newStorageProvider; + } + } + + function claimDataSetStorageProvider(uint256 setId, bytes calldata extraData) public { + require(dataSetLive(setId), "Data set not live"); + require( + dataSetProposedStorageProvider[setId] == msg.sender, + "Only the proposed storage provider can claim storage provider role" + ); + address oldStorageProvider = storageProvider[setId]; + storageProvider[setId] = msg.sender; + delete dataSetProposedStorageProvider[setId]; + emit StorageProviderChanged(setId, oldStorageProvider, msg.sender); + address listenerAddr = dataSetListener[setId]; + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).storageProviderChanged(setId, oldStorageProvider, msg.sender, extraData); + } + } + + // Internal helper to create a new data set and initialize its state. + // Returns the newly created data set ID. + function _createDataSet(address listenerAddr, bytes memory extraData) internal returns (uint256) { + uint256 setId = nextDataSetId++; + dataSetLeafCount[setId] = 0; + nextChallengeEpoch[setId] = NO_CHALLENGE_SCHEDULED; // initialized on first call to NextProvingPeriod + storageProvider[setId] = msg.sender; + dataSetListener[setId] = listenerAddr; + dataSetLastProvenEpoch[setId] = NO_PROVEN_EPOCH; + + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).dataSetCreated(setId, msg.sender, extraData); + } + emit DataSetCreated(setId, msg.sender); + + return setId; + } + + // Creates a new empty data set with no pieces. Pieces can be added later via addPieces(). + // This is the simpler alternative to creating and adding pieces atomically via addPieces(NEW_DATA_SET_SENTINEL, ...). + // + // Parameters: + // - listenerAddr: Address of PDPListener contract to receive callbacks (can be address(0) for no listener) + // - extraData: Arbitrary bytes passed to listener's dataSetCreated callback + // - msg.value: Must include sybil fee (PDPFees.sybilFee()), excess is refunded + // + // Returns: The newly created data set ID + // + // Only the storage provider (msg.sender) can call this function. + function createDataSet(address listenerAddr, bytes calldata extraData) public payable returns (uint256) { + require(extraData.length <= EXTRA_DATA_MAX_SIZE, "Extra data too large"); + uint256 sybilFee = _validateAndBurnSybilFee(); + + uint256 setId = _createDataSet(listenerAddr, extraData); + + _refundExcessSybilFee(sybilFee); + return setId; + } + + // Removes a data set. Must be called by the storage provider. + function deleteDataSet(uint256 setId, bytes calldata extraData) public { + require(extraData.length <= EXTRA_DATA_MAX_SIZE, "Extra data too large"); + if (setId >= nextDataSetId) { + revert("data set id out of bounds"); + } + + require(storageProvider[setId] == msg.sender, "Only the storage provider can delete data sets"); + uint256 deletedLeafCount = dataSetLeafCount[setId]; + dataSetLeafCount[setId] = 0; + storageProvider[setId] = address(0); + nextChallengeEpoch[setId] = 0; + dataSetLastProvenEpoch[setId] = NO_PROVEN_EPOCH; + + address listenerAddr = dataSetListener[setId]; + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).dataSetDeleted(setId, deletedLeafCount, extraData); + } + emit DataSetDeleted(setId, deletedLeafCount); + } + + // Appends pieces to a data set. Optionally creates a new data set if setId == 0. + // These pieces won't be challenged until the next proving period is started by calling nextProvingPeriod. + // + // Two modes of operation: + // 1. Add to existing data set: + // - setId: ID of the existing data set + // - listenerAddr: must be address(0) + // - extraData: arbitrary bytes passed to the listener's piecesAdded callback + // - msg.value: must be 0 (no fee required) + // - Returns: first piece ID added + // + // 2. Create new data set and add pieces (atomic operation): + // - setId: must be NEW_DATA_SET_SENTINEL (0) + // - listenerAddr: listener contract address (required, cannot be address(0)) + // - pieceData: array of pieces to add (can be empty to create empty data set) + // - extraData: abi.encode(bytes createPayload, bytes addPayload) where: + // - createPayload: passed to listener's dataSetCreated callback + // - addPayload: passed to listener's piecesAdded callback (if pieces added) + // - msg.value: must include sybil fee (PDPFees.sybilFee()), excess is refunded + // - Returns: the newly created data set ID + // + // Only the storage provider can call this function. + function addPieces(uint256 setId, address listenerAddr, Cids.Cid[] calldata pieceData, bytes calldata extraData) + public + payable + returns (uint256) + { + if (setId == NEW_DATA_SET_SENTINEL) { + (bytes memory createPayload, bytes memory addPayload) = abi.decode(extraData, (bytes, bytes)); + + require(createPayload.length <= EXTRA_DATA_MAX_SIZE, "Extra data too large"); + uint256 sybilFee = _validateAndBurnSybilFee(); + + require(listenerAddr != address(0), "listener required for new dataset"); + uint256 newSetId = _createDataSet(listenerAddr, createPayload); + + // Add pieces to the newly created data set (if any) + if (pieceData.length > 0) { + _addPiecesToDataSet(newSetId, pieceData, addPayload); + } + + _refundExcessSybilFee(sybilFee); + return newSetId; + } else { + // Adding to an existing set; no fee should be sent and listenerAddr must be zero + require(listenerAddr == address(0), "listener must be zero for existing dataset"); + require(msg.value == 0, "no fee on add to existing dataset"); + + require(dataSetLive(setId), "Data set not live"); + require(storageProvider[setId] == msg.sender, "Only the storage provider can add pieces"); + + return _addPiecesToDataSet(setId, pieceData, extraData); + } + } + + // Internal function to add pieces to a data set and handle events/listeners + function _addPiecesToDataSet(uint256 setId, Cids.Cid[] calldata pieceData, bytes memory extraData) + internal + returns (uint256 firstAdded) + { + require(extraData.length <= EXTRA_DATA_MAX_SIZE, "Extra data too large"); + uint256 nPieces = pieceData.length; + require(nPieces > 0, "Must add at least one piece"); + + firstAdded = nextPieceId[setId]; + uint256[] memory pieceIds = new uint256[](nPieces); + Cids.Cid[] memory pieceCidsAdded = new Cids.Cid[](nPieces); + + for (uint256 i = 0; i < nPieces; i++) { + addOnePiece(setId, i, pieceData[i]); + pieceIds[i] = firstAdded + i; + pieceCidsAdded[i] = pieceData[i]; + } + + emit PiecesAdded(setId, pieceIds, pieceCidsAdded); + + address listenerAddr = dataSetListener[setId]; + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).piecesAdded(setId, firstAdded, pieceData, extraData); + } + } + + error IndexedError(uint256 idx, string msg); + + function addOnePiece(uint256 setId, uint256 callIdx, Cids.Cid calldata piece) internal returns (uint256) { + (uint256 padding, uint8 height,) = Cids.validateCommPv2(piece); + if (Cids.isPaddingExcessive(padding, height)) { + revert IndexedError(callIdx, "Padding is too large"); + } + if (height > MAX_PIECE_SIZE_LOG2) { + revert IndexedError(callIdx, "Piece size must be less than 2^50"); + } + + uint256 leafCount = Cids.leafCount(padding, height); + uint256 pieceId = nextPieceId[setId]++; + sumTreeAdd(setId, leafCount, pieceId); + pieceCids[setId][pieceId] = piece; + pieceLeafCounts[setId][pieceId] = leafCount; + dataSetLeafCount[setId] += leafCount; + return pieceId; + } + + // schedulePieceDeletions schedules deletion of a batch of pieces from a data set for the start of the next + // proving period. It must be called by the storage provider. + function schedulePieceDeletions(uint256 setId, uint256[] calldata pieceIds, bytes calldata extraData) public { + require(extraData.length <= EXTRA_DATA_MAX_SIZE, "Extra data too large"); + require(dataSetLive(setId), "Data set not live"); + require(storageProvider[setId] == msg.sender, "Only the storage provider can schedule removal of pieces"); + require( + pieceIds.length + scheduledRemovals[setId].length <= MAX_ENQUEUED_REMOVALS, + "Too many removals wait for next proving period to schedule" + ); + + for (uint256 i = 0; i < pieceIds.length; i++) { + require(pieceIds[i] < nextPieceId[setId], "Can only schedule removal of existing pieces"); + scheduledRemovals[setId].push(pieceIds[i]); + } + + address listenerAddr = dataSetListener[setId]; + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).piecesScheduledRemove(setId, pieceIds, extraData); + } + } + + // Verifies and records that the provider proved possession of the + // data set Merkle pieces at some epoch. The challenge seed is determined + // by the epoch of the previous proof of possession. + function provePossession(uint256 setId, IPDPTypes.Proof[] calldata proofs) public payable { + uint256 nProofs = proofs.length; + require(msg.sender == storageProvider[setId], "Only the storage provider can prove possession"); + require(nProofs > 0, "empty proof"); + { + uint256 challengeEpoch = nextChallengeEpoch[setId]; + require(block.number >= challengeEpoch, "premature proof"); + require(challengeEpoch != NO_CHALLENGE_SCHEDULED, "no challenge scheduled"); + } + + IPDPTypes.PieceIdAndOffset[] memory challenges = new IPDPTypes.PieceIdAndOffset[](proofs.length); + + uint256 seed = drawChallengeSeed(setId); + { + uint256 leafCount = challengeRange[setId]; + uint256 sumTreeTop = 256 - BitOps.clz(nextPieceId[setId]); + for (uint64 i = 0; i < nProofs; i++) { + // Hash (SHA3) the seed, data set id, and proof index to create challenge. + // Note -- there is a slight deviation here from the uniform distribution. + // Some leaves are challenged with probability p and some have probability p + deviation. + // This deviation is bounded by leafCount / 2^256 given a 256 bit hash. + // Deviation grows with data set leaf count. + // Assuming a 1000EiB = 1 ZiB network size ~ 2^70 bytes of data or 2^65 leaves + // This deviation is bounded by 2^65 / 2^256 = 2^-191 which is negligible. + // If modifying this code to use a hash function with smaller output size + // this deviation will increase and caution is advised. + // To remove this deviation we could use the standard solution of rejection sampling + // This is complicated and slightly more costly at one more hash on average for maximally misaligned data sets + // and comes at no practical benefit given how small the deviation is. + bytes memory payload = abi.encodePacked(seed, setId, i); + uint256 challengeIdx = uint256(keccak256(payload)) % leafCount; + + // Find the piece that has this leaf, and the offset of the leaf within that piece. + challenges[i] = findOnePieceId(setId, challengeIdx, sumTreeTop); + Cids.Cid memory pieceCid = getPieceCid(setId, challenges[i].pieceId); + bytes32 pieceHash = Cids.digestFromCid(pieceCid); + uint8 pieceHeight = Cids.heightFromCid(pieceCid) + 1; // because MerkleVerify.verify assumes that base layer is 1 + bool ok = + MerkleVerify.verify(proofs[i].proof, pieceHash, proofs[i].leaf, challenges[i].offset, pieceHeight); + require(ok, "proof did not verify"); + } + } + + // Note: We don't want to include gas spent on the listener call in the fee calculation + // to only account for proof verification fees and avoid gamability by getting the listener + // to do extraneous work just to inflate the gas fee. + // + // (add 32 bytes to the `callDataSize` to also account for the `setId` calldata param) + uint256 refund = calculateAndBurnProofFee(setId); + + { + address listenerAddr = dataSetListener[setId]; + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).possessionProven(setId, dataSetLeafCount[setId], seed, proofs.length); + } + } + + dataSetLastProvenEpoch[setId] = block.number; + emit PossessionProven(setId, challenges); + + // Return the overpayment after doing everything else to avoid re-entrancy issues (all state has been updated by this point). If this + // call fails, the entire operation reverts. + if (refund > 0) { + bool success = FVMPay.pay(msg.sender, refund); + require(success, "Transfer failed."); + } + } + + function calculateProofFee(uint256 setId) public view returns (uint256) { + uint256 rawSize = 32 * challengeRange[setId]; + return calculateProofFeeForSize(rawSize); + } + + function calculateProofFeeForSize(uint256 rawSize) public view returns (uint256) { + require(rawSize > 0, "failed to validate: raw size must be greater than 0"); + return PDPFees.calculateProofFee(rawSize, _currentFeePerTiB()); + } + + function calculateAndBurnProofFee(uint256 setId) internal returns (uint256 refund) { + uint256 rawSize = 32 * challengeRange[setId]; + uint256 proofFee = calculateProofFeeForSize(rawSize); + + burnFee(proofFee); + emit ProofFeePaid(setId, proofFee); + + return msg.value - proofFee; // burnFee asserts that proofFee <= msg.value; + } + + function _currentFeePerTiB() internal view returns (uint96) { + return block.timestamp >= feeStatus.transitionTime ? feeStatus.nextFeePerTiB : feeStatus.currentFeePerTiB; + } + + // Public getters for packed fee status + function feePerTiB() public view returns (uint96) { + return _currentFeePerTiB(); + } + + function proposedFeePerTiB() public view returns (uint96) { + return feeStatus.nextFeePerTiB; + } + + function feeEffectiveTime() public view returns (uint64) { + return feeStatus.transitionTime; + } + + function getRandomness(uint256 epoch) public view returns (uint256) { + // Call the precompile + return FVMRandom.getBeaconRandomness(epoch); + } + + function drawChallengeSeed(uint256 setId) internal view returns (uint256) { + return getRandomness(nextChallengeEpoch[setId]); + } + + // Roll over to the next proving period + // + // This method updates the collection of provable pieces in the data set by + // 1. Actually removing the pieces that have been scheduled for removal + // 2. Updating the challenge range to now include leaves added in the last proving period + // So after this method is called pieces scheduled for removal are no longer eligible for challenging + // and can be deleted. And pieces added in the last proving period must be available for challenging. + // + // Additionally this method forces sampling of a new challenge. It enforces that the new + // challenge epoch is at least `challengeFinality` epochs in the future. + // + // Note that this method can be called at any time but the pdpListener will likely consider it + // a "fault" or other penalizeable behavior to call this method before calling provePossesion. + function nextProvingPeriod(uint256 setId, uint256 challengeEpoch, bytes calldata extraData) public { + require(extraData.length <= EXTRA_DATA_MAX_SIZE, "Extra data too large"); + require(msg.sender == storageProvider[setId], "only the storage provider can move to next proving period"); + require(dataSetLeafCount[setId] > 0, "can only start proving once leaves are added"); + + if (dataSetLastProvenEpoch[setId] == NO_PROVEN_EPOCH) { + dataSetLastProvenEpoch[setId] = block.number; + } + + // Take removed pieces out of proving set + uint256[] storage removals = scheduledRemovals[setId]; + uint256 nRemovals = removals.length; + if (nRemovals > 0) { + uint256[] memory removalsToProcess = new uint256[](nRemovals); + + for (uint256 i = 0; i < nRemovals; i++) { + removalsToProcess[i] = removals[removals.length - 1]; + removals.pop(); + } + + removePieces(setId, removalsToProcess); + emit PiecesRemoved(setId, removalsToProcess); + } + + // Bring added pieces into proving set + challengeRange[setId] = dataSetLeafCount[setId]; + if (challengeEpoch < block.number + challengeFinality) { + revert("challenge epoch must be at least challengeFinality epochs in the future"); + } + nextChallengeEpoch[setId] = challengeEpoch; + + // Clear next challenge epoch if the set is now empty. + // It will be re-set after new data is added and nextProvingPeriod is called. + if (dataSetLeafCount[setId] == 0) { + emit DataSetEmpty(setId); + dataSetLastProvenEpoch[setId] = NO_PROVEN_EPOCH; + nextChallengeEpoch[setId] = NO_CHALLENGE_SCHEDULED; + } + + address listenerAddr = dataSetListener[setId]; + if (listenerAddr != address(0)) { + PDPListener(listenerAddr).nextProvingPeriod( + setId, nextChallengeEpoch[setId], dataSetLeafCount[setId], extraData + ); + } + emit NextProvingPeriod(setId, challengeEpoch, dataSetLeafCount[setId]); + } + + // removes pieces from a data set's state. + function removePieces(uint256 setId, uint256[] memory pieceIds) internal { + require(dataSetLive(setId), "Data set not live"); + uint256 totalDelta = 0; + for (uint256 i = 0; i < pieceIds.length; i++) { + totalDelta += removeOnePiece(setId, pieceIds[i]); + } + dataSetLeafCount[setId] -= totalDelta; + } + + // removeOnePiece removes a piece's array entries from the data sets state and returns + // the number of leafs by which to reduce the total data set leaf count. + function removeOnePiece(uint256 setId, uint256 pieceId) internal returns (uint256) { + uint256 delta = pieceLeafCounts[setId][pieceId]; + sumTreeRemove(setId, pieceId, delta); + delete pieceLeafCounts[setId][pieceId]; + delete pieceCids[setId][pieceId]; + return delta; + } + + /* Sum tree functions */ + /* + A sumtree is a variant of a Fenwick or binary indexed tree. It is a binary + tree where each node is the sum of its children. It is designed to support + efficient query and update operations on a base array of integers. Here + the base array is the pieces leaf count array. Asymptotically the sum tree + has logarithmic search and update functions. Each slot of the sum tree is + logically a node in a binary tree. + + The node’s height from the leaf depth is defined as -1 + the ruler function + (https://oeis.org/A001511 [0,1,0,2,0,1,0,3,0,1,0,2,0,1,0,4,...]) applied to + the slot’s index + 1, i.e. the number of trailing 0s in the binary representation + of the index + 1. Each slot in the sum tree array contains the sum of a range + of the base array. The size of this range is defined by the height assigned + to this slot in the binary tree structure of the sum tree, i.e. the value of + the ruler function applied to the slot’s index. The range for height d and + current index j is [j + 1 - 2^d : j] inclusive. For example if the node’s + height is 0 its value is set to the base array’s value at the same index and + if the node’s height is 3 then its value is set to the sum of the last 2^3 = 8 + values of the base array. The reason to do things with recursive partial sums + is to accommodate O(log len(base array)) updates for add and remove operations + on the base array. + */ + + // Perform sumtree addition + // + function sumTreeAdd(uint256 setId, uint256 count, uint256 pieceId) internal { + uint256 index = pieceId; + uint256 h = heightFromIndex(index); + + uint256 sum = count; + // Sum BaseArray[j - 2^i] for i in [0, h) + for (uint256 i = 0; i < h; i++) { + uint256 j = index - (1 << i); + sum += sumTreeCounts[setId][j]; + } + sumTreeCounts[setId][pieceId] = sum; + } + + // Perform sumtree removal + // + function sumTreeRemove(uint256 setId, uint256 index, uint256 delta) internal { + uint256 top = uint256(256 - BitOps.clz(nextPieceId[setId])); + uint256 h = uint256(heightFromIndex(index)); + + // Deletion traversal either terminates at + // 1) the top of the tree or + // 2) the highest node right of the removal index + while (h <= top && index < nextPieceId[setId]) { + sumTreeCounts[setId][index] -= delta; + index += 1 << h; + h = heightFromIndex(index); + } + } + + // Perform sumtree find + function findOnePieceId(uint256 setId, uint256 leafIndex, uint256 top) + internal + view + returns (IPDPTypes.PieceIdAndOffset memory) + { + require(leafIndex < dataSetLeafCount[setId], "Leaf index out of bounds"); + uint256 searchPtr = (1 << top) - 1; + uint256 acc = 0; + + // Binary search until we find the index of the sumtree leaf covering the index range + uint256 candidate; + for (uint256 h = top; h > 0; h--) { + // Search has taken us past the end of the sumtree + // Only option is to go left + if (searchPtr >= nextPieceId[setId]) { + searchPtr -= 1 << (h - 1); + continue; + } + + candidate = acc + sumTreeCounts[setId][searchPtr]; + // Go right + if (candidate <= leafIndex) { + acc += sumTreeCounts[setId][searchPtr]; + searchPtr += 1 << (h - 1); + } else { + // Go left + searchPtr -= 1 << (h - 1); + } + } + candidate = acc + sumTreeCounts[setId][searchPtr]; + if (candidate <= leafIndex) { + // Choose right + return IPDPTypes.PieceIdAndOffset(searchPtr + 1, leafIndex - candidate); + } // Choose left + return IPDPTypes.PieceIdAndOffset(searchPtr, leafIndex - acc); + } + + // findPieceIds is a batched version of findOnePieceId + function findPieceIds(uint256 setId, uint256[] calldata leafIndexs) + public + view + returns (IPDPTypes.PieceIdAndOffset[] memory) + { + // The top of the sumtree is the largest power of 2 less than the number of pieces + uint256 top = 256 - BitOps.clz(nextPieceId[setId]); + IPDPTypes.PieceIdAndOffset[] memory result = new IPDPTypes.PieceIdAndOffset[](leafIndexs.length); + for (uint256 i = 0; i < leafIndexs.length; i++) { + result[i] = findOnePieceId(setId, leafIndexs[i], top); + } + return result; + } + + // Return height of sumtree node at given index + // Calculated by taking the trailing zeros of 1 plus the index + function heightFromIndex(uint256 index) internal pure returns (uint256) { + return BitOps.ctz(index + 1); + } + + /// @notice Proposes a new proof fee with 7-day delay + /// @param newFeePerTiB The new fee per TiB in AttoFIL + function updateProofFee(uint256 newFeePerTiB) external onlyOwner { + // Auto-commit any pending update that has reached its transition time + if (block.timestamp >= feeStatus.transitionTime) { + feeStatus.currentFeePerTiB = feeStatus.nextFeePerTiB; + } + feeStatus.nextFeePerTiB = uint96(newFeePerTiB); + feeStatus.transitionTime = uint64(block.timestamp + 7 days); + emit FeeUpdateProposed(feeStatus.currentFeePerTiB, newFeePerTiB, feeStatus.transitionTime); + } +} diff --git a/packages/pdp/src/Proofs.sol b/packages/pdp/src/Proofs.sol new file mode 100644 index 00000000..0e15f92b --- /dev/null +++ b/packages/pdp/src/Proofs.sol @@ -0,0 +1,217 @@ +// SPDX-License-Identifier: MIT +// The verification functions are adapted from OpenZeppelin Contracts (last updated v5.0.0) (utils/cryptography/MerkleProof.sol) + +pragma solidity ^0.8.20; + +import {BitOps} from "./BitOps.sol"; + +/** + * Functions for the generation and verification of Merkle proofs. + * These are specialised to the hash function of SHA254 and implicitly balanced trees. + * + * Note that only the verification functions are intended to execute on-chain. + * The commitment and proof generation functions are co-located for convenience and to function + * as a specification for off-chain operations. + */ +library MerkleVerify { + /** + * Returns true if a `leaf` can be proved to be a part of a Merkle tree + * defined by `root` at `position`. For this, a `proof` must be provided, containing + * sibling hashes on the branch from the leaf to the root of the tree. + * + * Will only return true if the leaf is at the bottom of the tree for the given tree height + * + * This version handles proofs in memory. + */ + function verify(bytes32[] memory proof, bytes32 root, bytes32 leaf, uint256 position, uint256 treeHeight) + internal + view + returns (bool) + { + // Tree heigh includes root, proof does not + require(proof.length == treeHeight - 1, "proof length does not match tree height"); + return processInclusionProofMemory(proof, leaf, position) == root; + } + + /** + * Returns the rebuilt hash obtained by traversing a Merkle tree up + * from `leaf` at `position` using `proof`. A `proof` is valid if and only if the rebuilt + * hash matches the root of the tree. + * + * This version handles proofs in memory. + */ + function processInclusionProofMemory(bytes32[] memory proof, bytes32 leaf, uint256 position) + internal + view + returns (bytes32) + { + bytes32 computedHash = leaf; + for (uint256 i = 0; i < proof.length; i++) { + // If position is even, the leaf/node is on the left and sibling is on the right. + bytes32 sibling = proof[i]; + if (position % 2 == 0) { + computedHash = Hashes.orderedHash(computedHash, sibling); + } else { + computedHash = Hashes.orderedHash(sibling, computedHash); + } + position /= 2; + } + return computedHash; + } + + /** + * Returns the root of a Merkle tree of all zero leaves and specified height. + * A height of zero returns zero (the leaf value). + * A height of 1 returns the hash of two zero leaves. + * A height of n returns the hash of two nodes of height n-1. + * Height must be <= 50 (representing 2^50 leaves or 32EiB). + */ + function zeroRoot(uint256 height) internal pure returns (bytes32) { + require(height <= 50, "Height must be <= 50"); + // These roots were generated by code in Proots.t.sol. + uint256[51] memory zeroRoots = [ + 0x0000000000000000000000000000000000000000000000000000000000000000, + 0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb0b, + 0x3731bb99ac689f66eef5973e4a94da188f4ddcae580724fc6f3fd60dfd488333, + 0x642a607ef886b004bf2c1978463ae1d4693ac0f410eb2d1b7a47fe205e5e750f, + 0x57a2381a28652bf47f6bef7aca679be4aede5871ab5cf3eb2c08114488cb8526, + 0x1f7ac9595510e09ea41c460b176430bb322cd6fb412ec57cb17d989a4310372f, + 0xfc7e928296e516faade986b28f92d44a4f24b935485223376a799027bc18f833, + 0x08c47b38ee13bc43f41b915c0eed9911a26086b3ed62401bf9d58b8d19dff624, + 0xb2e47bfb11facd941f62af5c750f3ea5cc4df517d5c4f16db2b4d77baec1a32f, + 0xf9226160c8f927bfdcc418cdf203493146008eaefb7d02194d5e548189005108, + 0x2c1a964bb90b59ebfe0f6da29ad65ae3e417724a8f7c11745a40cac1e5e74011, + 0xfee378cef16404b199ede0b13e11b624ff9d784fbbed878d83297e795e024f02, + 0x8e9e2403fa884cf6237f60df25f83ee40dca9ed879eb6f6352d15084f5ad0d3f, + 0x752d9693fa167524395476e317a98580f00947afb7a30540d625a9291cc12a07, + 0x7022f60f7ef6adfa17117a52619e30cea82c68075adf1c667786ec506eef2d19, + 0xd99887b973573a96e11393645236c17b1f4c7034d723c7a99f709bb4da61162b, + 0xd0b530dbb0b4f25c5d2f2a28dfee808b53412a02931f18c499f5a254086b1326, + 0x84c0421ba0685a01bf795a2344064fe424bd52a9d24377b394ff4c4b4568e811, + 0x65f29e5d98d246c38b388cfc06db1f6b021303c5a289000bdce832a9c3ec421c, + 0xa2247508285850965b7e334b3127b0c042b1d046dc54402137627cd8799ce13a, + 0xdafdab6da9364453c26d33726b9fefe343be8f81649ec009aad3faff50617508, + 0xd941d5e0d6314a995c33ffbd4fbe69118d73d4e5fd2cd31f0f7c86ebdd14e706, + 0x514c435c3d04d349a5365fbd59ffc713629111785991c1a3c53af22079741a2f, + 0xad06853969d37d34ff08e09f56930a4ad19a89def60cbfee7e1d3381c1e71c37, + 0x39560e7b13a93b07a243fd2720ffa7cb3e1d2e505ab3629e79f46313512cda06, + 0xccc3c012f5b05e811a2bbfdd0f6833b84275b47bf229c0052a82484f3c1a5b3d, + 0x7df29b69773199e8f2b40b77919d048509eed768e2c7297b1f1437034fc3c62c, + 0x66ce05a3667552cf45c02bcc4e8392919bdeac35de2ff56271848e9f7b675107, + 0xd8610218425ab5e95b1ca6239d29a2e420d706a96f373e2f9c9a91d759d19b01, + 0x6d364b1ef846441a5a4a68862314acc0a46f016717e53443e839eedf83c2853c, + 0x077e5fde35c50a9303a55009e3498a4ebedff39c42b710b730d8ec7ac7afa63e, + 0xe64005a6bfe3777953b8ad6ef93f0fca1049b2041654f2a411f7702799cece02, + 0x259d3d6b1f4d876d1185e1123af6f5501af0f67cf15b5216255b7b178d12051d, + 0x3f9a4d411da4ef1b36f35ff0a195ae392ab23fee7967b7c41b03d1613fc29239, + 0xfe4ef328c61aa39cfdb2484eaa32a151b1fe3dfd1f96dd8c9711fd86d6c58113, + 0xf55d68900e2d8381eccb8164cb9976f24b2de0dd61a31b97ce6eb23850d5e819, + 0xaaaa8c4cb40aacee1e02dc65424b2a6c8e99f803b72f7929c4101d7fae6bff32, + 0xc91a84c057fd4afcc209c3b482360cf7493b9129fa164cd1fe6b045a683b5322, + 0x64a2c1df312ecb443b431946c02fe701514b5291091b888f03189bee8ea11416, + 0x739953434ead6e24f1d1bf5b68ca823b2692b3000a7806d08c76640da98c3526, + 0x771f5b63af6f7d1d515d134084d535f5f4d8ab8529b2c3f581f143f8cc38be2f, + 0x9031a15bf51550a85db1f64f4db739e01125478a50ee332bc2b4f6462214b20b, + 0xc83ba84710b74413f3be84a5466aff2d7f0c5472248ffbeb2266466a92ac4f12, + 0x2fe598945de393714c10f447cec237039b5944077a78e0a9811cf5f7a45abe1b, + 0x395355ae44754a5cde74898a3f2ef60d5871ab35019c610fc413a62d57646501, + 0x4bd4712084416c77eec00cab23416eda8c8dbf681c8ccd0b96c0be980a40d818, + 0xf6eeae7dee22146564155ebe4bdf633333401de68da4aa2a6e946c2363807a34, + 0x8b43a114ba1c1bb80781e85f87b0bbee11c69fdbbd2ed81d6c9b4c7859c04e34, + 0xf74dc344ee4fa47f07fb2732ad9443d94892ca8b53d006c9891a32ef2b74491e, + 0x6f5246ae0f965e5424162403d3ab81ef8d15439c5f3a49038488e3640ef98718, + 0x0b5b44ccf91ff135af58d2cf694b2ac99f22f5264863d6b9272b6155956aa10e + ]; + return bytes32(zeroRoots[height]); + } +} + +library MerkleProve { + // Builds a merkle tree from an array of leaves. + // The tree is an array of arrays of bytes32. + // The last array is the leaves, and each prior array is the result of the hash of pairs in the previous array. + // An unpaired element is paired with the root of a tree of the same height with zero leaves. + // The first element of the first array is the root. + function buildTree(bytes32[] memory leaves) internal view returns (bytes32[][] memory) { + require(leaves.length > 0, "Leaves array must not be empty"); + + uint256 levels = 256 - BitOps.clz(leaves.length - 1); + bytes32[][] memory tree = new bytes32[][](levels + 1); + tree[levels] = leaves; + + for (uint256 i = levels; i > 0; i--) { + bytes32[] memory currentLevel = tree[i]; + uint256 nextLevelSize = (currentLevel.length + 1) / 2; + tree[i - 1] = new bytes32[](nextLevelSize); + + for (uint256 j = 0; j < nextLevelSize; j++) { + if (2 * j + 1 < currentLevel.length) { + tree[i - 1][j] = Hashes.orderedHash(currentLevel[2 * j], currentLevel[2 * j + 1]); + } else { + // Pair final odd node with a zero-tree of same height. + tree[i - 1][j] = Hashes.orderedHash(currentLevel[2 * j], MerkleVerify.zeroRoot(levels - i)); + } + } + } + + return tree; + } + + // Gets an inclusion proof from a Merkle tree for a leaf at a given index. + // The proof is constructed by traversing up the tree to the root, and the sibling of each node is appended to the proof. + // A final unpaired element in any level is paired with the zero-tree of the same height. + // Every proof thus has length equal to the height of the tree minus 1. + function buildProof(bytes32[][] memory tree, uint256 index) internal pure returns (bytes32[] memory) { + require(index < tree[tree.length - 1].length, "Index out of bounds"); + + bytes32[] memory proof = new bytes32[](tree.length - 1); + uint256 proofIndex = 0; + + for (uint256 i = tree.length - 1; i > 0; i--) { + uint256 levelSize = tree[i].length; + uint256 pairIndex = index ^ 1; // XOR with 1 to get the pair index + + if (pairIndex < levelSize) { + proof[proofIndex] = tree[i][pairIndex]; + } else { + // Pair final odd node with zero-tree of same height. + proof[proofIndex] = MerkleVerify.zeroRoot(tree.length - 1 - i); + } + proofIndex++; + index /= 2; // Move to the parent node + } + return proof; + } +} + +library Hashes { + // "The Sha254 functions are identical to Sha256 except that the last two bits of the Sha256 256-bit digest are zeroed out." + // The bytes of uint256 are arranged in big-endian order, MSB first in memory. + // The bits in each byte are arranged in little-endian order. + // Thus, the "last two bits" are the first two bits of the last byte. + uint256 constant SHA254_MASK = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF3F; + + /** + * Order-dependent hash of pair of bytes32. + */ + function orderedHash(bytes32 a, bytes32 b) internal view returns (bytes32) { + return _efficientSHA254(a, b); + } + + /** + * Implementation equivalent to using sha256(abi.encode(a, b)) that doesn't allocate or expand memory. + */ + function _efficientSHA254(bytes32 a, bytes32 b) private view returns (bytes32 value) { + assembly ("memory-safe") { + mstore(0x00, a) + mstore(0x20, b) + + // Call the SHA256 precompile + if iszero(staticcall(gas(), 0x2, 0x00, 0x40, 0x00, 0x20)) { revert(0, 0) } + + value := mload(0x00) + // SHA254 hash for compatibility with Filecoin piece commitments. + value := and(value, SHA254_MASK) + } + } +} diff --git a/packages/pdp/src/SimplePDPService.sol b/packages/pdp/src/SimplePDPService.sol new file mode 100644 index 00000000..436caeaa --- /dev/null +++ b/packages/pdp/src/SimplePDPService.sol @@ -0,0 +1,299 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +import {PDPListener} from "./PDPVerifier.sol"; +import {Initializable} from "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; +import {UUPSUpgradeable} from "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol"; +import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; +import {Cids} from "./Cids.sol"; +import {IPDPProvingSchedule} from "./IPDPProvingSchedule.sol"; + +// PDPRecordKeeper tracks PDP operations. It is used as a base contract for PDPListeners +// in order to give users the capability to consume events async. +/// @title PDPRecordKeeper +/// @dev This contract is unused by the SimplePDPService as it is too expensive. +/// we've kept it here for future reference and testing. +contract PDPRecordKeeper { + enum OperationType { + NONE, + CREATE, + DELETE, + ADD, + REMOVE_SCHEDULED, + PROVE_POSSESSION, + NEXT_PROVING_PERIOD + } + + // Struct to store event details + struct EventRecord { + uint64 epoch; + uint256 dataSetId; + OperationType operationType; + bytes extraData; + } + + // Eth event emitted when a new record is added + event RecordAdded(uint256 indexed dataSetId, uint64 epoch, OperationType operationType); + + // Mapping to store events for each data set + mapping(uint256 => EventRecord[]) public dataSetEvents; + + function receiveDataSetEvent(uint256 dataSetId, OperationType operationType, bytes memory extraData) + internal + returns (uint256) + { + uint64 epoch = uint64(block.number); + EventRecord memory newRecord = + EventRecord({epoch: epoch, dataSetId: dataSetId, operationType: operationType, extraData: extraData}); + dataSetEvents[dataSetId].push(newRecord); + emit RecordAdded(dataSetId, epoch, operationType); + return dataSetEvents[dataSetId].length - 1; + } + + // Function to get the number of events for a data set + function getEventCount(uint256 dataSetId) external view returns (uint256) { + return dataSetEvents[dataSetId].length; + } + + // Function to get a specific event for a data set + function getEvent(uint256 dataSetId, uint256 eventIndex) external view returns (EventRecord memory) { + require(eventIndex < dataSetEvents[dataSetId].length, "Event index out of bounds"); + return dataSetEvents[dataSetId][eventIndex]; + } + + // Function to get all events for a data set + function listEvents(uint256 dataSetId) external view returns (EventRecord[] memory) { + return dataSetEvents[dataSetId]; + } +} + +/// @title SimplePDPService +/// @notice A default implementation of a PDP Listener. +/// @dev This contract only supports one PDP service caller, set in the constructor, +/// The primary purpose of this contract is to +/// 1. Enforce a proof count of 5 proofs per data set proving period. +/// 2. Provide a reliable way to report faults to users. +contract SimplePDPService is PDPListener, IPDPProvingSchedule, Initializable, UUPSUpgradeable, OwnableUpgradeable { + event FaultRecord(uint256 indexed dataSetId, uint256 periodsFaulted, uint256 deadline); + + uint256 public constant NO_CHALLENGE_SCHEDULED = 0; + uint256 public constant NO_PROVING_DEADLINE = 0; + + // The address of the PDP verifier contract that is allowed to call this contract + address public pdpVerifierAddress; + mapping(uint256 => uint256) public provingDeadlines; + mapping(uint256 => bool) public provenThisPeriod; + + /// @custom:oz-upgrades-unsafe-allow constructor + constructor() { + _disableInitializers(); + } + + function initialize(address _pdpVerifierAddress) public initializer { + __Ownable_init(msg.sender); + __UUPSUpgradeable_init(); + require(_pdpVerifierAddress != address(0), "PDP verifier address cannot be zero"); + pdpVerifierAddress = _pdpVerifierAddress; + } + + function _authorizeUpgrade(address newImplementation) internal override onlyOwner {} + + // Modifier to ensure only the PDP verifier contract can call certain functions + modifier onlyPDPVerifier() { + require(msg.sender == pdpVerifierAddress, "Caller is not the PDP verifier"); + _; + } + + // SLA specification functions setting values for PDP service providers + // Max number of epochs between two consecutive proofs + function getMaxProvingPeriod() public pure returns (uint64) { + return 2880; + } + + // Number of epochs at the end of a proving period during which a + // proof of possession can be submitted + function challengeWindow() public pure returns (uint256) { + return 60; + } + + // Initial value for challenge window start + // Can be used for first call to nextProvingPeriod + function initChallengeWindowStart() public view returns (uint256) { + return block.number + getMaxProvingPeriod() - challengeWindow(); + } + + // The start of the challenge window for the current proving period + function thisChallengeWindowStart(uint256 setId) public view returns (uint256) { + if (provingDeadlines[setId] == NO_PROVING_DEADLINE) { + revert("Proving period not yet initialized"); + } + + uint256 periodsSkipped; + // Proving period is open 0 skipped periods + if (block.number <= provingDeadlines[setId]) { + periodsSkipped = 0; + } else { + // Proving period has closed possibly some skipped periods + periodsSkipped = 1 + (block.number - (provingDeadlines[setId] + 1)) / getMaxProvingPeriod(); + } + return provingDeadlines[setId] + periodsSkipped * getMaxProvingPeriod() - challengeWindow(); + } + + // The start of the NEXT OPEN proving period's challenge window + // Useful for querying before nextProvingPeriod to determine challengeEpoch to submit for nextProvingPeriod + function nextChallengeWindowStart(uint256 setId) public view returns (uint256) { + if (provingDeadlines[setId] == NO_PROVING_DEADLINE) { + revert("Proving period not yet initialized"); + } + // If the current period is open this is the next period's challenge window + if (block.number <= provingDeadlines[setId]) { + return thisChallengeWindowStart(setId) + getMaxProvingPeriod(); + } + // If the current period is not yet open this is the current period's challenge window + return thisChallengeWindowStart(setId); + } + + // Challenges / merkle inclusion proofs provided per data set + function getChallengesPerProof() public pure returns (uint64) { + return 5; + } + + /** + * @notice Returns PDP configuration values (for IPDPProvingSchedule interface) + * @return maxProvingPeriod Maximum number of epochs between proofs + * @return challengeWindow_ Number of epochs for the challenge window + * @return challengesPerProof Number of challenges required per proof + * @return initChallengeWindowStart_ Initial challenge window start for new data sets + */ + function getPDPConfig() + external + view + override + returns ( + uint64 maxProvingPeriod, + uint256 challengeWindow_, + uint256 challengesPerProof, + uint256 initChallengeWindowStart_ + ) + { + maxProvingPeriod = getMaxProvingPeriod(); + challengeWindow_ = challengeWindow(); + challengesPerProof = getChallengesPerProof(); + initChallengeWindowStart_ = initChallengeWindowStart(); + } + + /** + * @notice Returns the start of the next challenge window for a data set (for IPDPProvingSchedule interface) + * @param setId The ID of the data set + * @return The block number when the next challenge window starts + */ + function nextPDPChallengeWindowStart(uint256 setId) external view override returns (uint256) { + return nextChallengeWindowStart(setId); + } + + // Listener interface methods + // Note many of these are noops as they are not important for the SimplePDPService's functionality + // of enforcing proof contraints and reporting faults. + // Note we generally just drop the user defined extraData as this contract has no use for it + function dataSetCreated(uint256 dataSetId, address creator, bytes calldata) external onlyPDPVerifier {} + + function dataSetDeleted(uint256 dataSetId, uint256 deletedLeafCount, bytes calldata) external onlyPDPVerifier {} + + function piecesAdded(uint256 dataSetId, uint256 firstAdded, Cids.Cid[] memory pieceData, bytes calldata) + external + onlyPDPVerifier + {} + + function piecesScheduledRemove(uint256 dataSetId, uint256[] memory pieceIds, bytes calldata) + external + onlyPDPVerifier + {} + + function storageProviderChanged(uint256, address, address, bytes calldata) external override onlyPDPVerifier {} + + // possession proven checks for correct challenge count and reverts if too low + // it also checks that proofs are not late and emits a fault record if so + function possessionProven( + uint256 dataSetId, + uint256, /*challengedLeafCount*/ + uint256, /*seed*/ + uint256 challengeCount + ) external onlyPDPVerifier { + if (provenThisPeriod[dataSetId]) { + revert("Only one proof of possession allowed per proving period. Open a new proving period."); + } + if (challengeCount < getChallengesPerProof()) { + revert("Invalid challenge count < 5"); + } + if (provingDeadlines[dataSetId] == NO_PROVING_DEADLINE) { + revert("Proving not yet started"); + } + // check for proof outside of challenge window + if (provingDeadlines[dataSetId] < block.number) { + revert("Current proving period passed. Open a new proving period."); + } + + if (provingDeadlines[dataSetId] - challengeWindow() > block.number) { + revert("Too early. Wait for challenge window to open"); + } + provenThisPeriod[dataSetId] = true; + } + + // nextProvingPeriod checks for unsubmitted proof in which case it emits a fault event + // Additionally it enforces constraints on the update of its state: + // 1. One update per proving period. + // 2. Next challenge epoch must fall within the challenge window in the last challengeWindow() + // epochs of the proving period. + function nextProvingPeriod(uint256 dataSetId, uint256 challengeEpoch, uint256, /*leafCount*/ bytes calldata) + external + onlyPDPVerifier + { + // initialize state for new data set + if (provingDeadlines[dataSetId] == NO_PROVING_DEADLINE) { + uint256 firstDeadline = block.number + getMaxProvingPeriod(); + if (challengeEpoch < firstDeadline - challengeWindow() || challengeEpoch > firstDeadline) { + revert("Next challenge epoch must fall within the next challenge window"); + } + provingDeadlines[dataSetId] = firstDeadline; + provenThisPeriod[dataSetId] = false; + return; + } + + // Revert when proving period not yet open + // Can only get here if calling nextProvingPeriod multiple times within the same proving period + uint256 prevDeadline = provingDeadlines[dataSetId] - getMaxProvingPeriod(); + if (block.number <= prevDeadline) { + revert("One call to nextProvingPeriod allowed per proving period"); + } + + uint256 periodsSkipped; + // Proving period is open 0 skipped periods + if (block.number <= provingDeadlines[dataSetId]) { + periodsSkipped = 0; + } else { + // Proving period has closed possibly some skipped periods + periodsSkipped = (block.number - (provingDeadlines[dataSetId] + 1)) / getMaxProvingPeriod(); + } + + uint256 nextDeadline; + // the data set has become empty and provingDeadline is set inactive + if (challengeEpoch == NO_CHALLENGE_SCHEDULED) { + nextDeadline = NO_PROVING_DEADLINE; + } else { + nextDeadline = provingDeadlines[dataSetId] + getMaxProvingPeriod() * (periodsSkipped + 1); + if (challengeEpoch < nextDeadline - challengeWindow() || challengeEpoch > nextDeadline) { + revert("Next challenge epoch must fall within the next challenge window"); + } + } + uint256 faultPeriods = periodsSkipped; + if (!provenThisPeriod[dataSetId]) { + // include previous unproven period + faultPeriods += 1; + } + if (faultPeriods > 0) { + emit FaultRecord(dataSetId, faultPeriods, provingDeadlines[dataSetId]); + } + provingDeadlines[dataSetId] = nextDeadline; + provenThisPeriod[dataSetId] = false; + } +} diff --git a/packages/pdp/src/interfaces/IPDPEvents.sol b/packages/pdp/src/interfaces/IPDPEvents.sol new file mode 100644 index 00000000..0e0cb92d --- /dev/null +++ b/packages/pdp/src/interfaces/IPDPEvents.sol @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import {Cids} from "../Cids.sol"; +import {IPDPTypes} from "./IPDPTypes.sol"; + +/// @title IPDPEvents +/// @notice Shared events for PDP contracts and consumers +interface IPDPEvents { + event DataSetCreated(uint256 indexed setId, address indexed storageProvider); + event StorageProviderChanged( + uint256 indexed setId, address indexed oldStorageProvider, address indexed newStorageProvider + ); + event DataSetDeleted(uint256 indexed setId, uint256 deletedLeafCount); + event DataSetEmpty(uint256 indexed setId); + event PiecesAdded(uint256 indexed setId, uint256[] pieceIds, Cids.Cid[] pieceCids); + event PiecesRemoved(uint256 indexed setId, uint256[] pieceIds); + event ProofFeePaid(uint256 indexed setId, uint256 fee); + event FeeUpdateProposed(uint256 currentFee, uint256 newFee, uint256 effectiveTime); + event PossessionProven(uint256 indexed setId, IPDPTypes.PieceIdAndOffset[] challenges); + event NextProvingPeriod(uint256 indexed setId, uint256 challengeEpoch, uint256 leafCount); + event ContractUpgraded(string version, address newImplementation); +} diff --git a/packages/pdp/src/interfaces/IPDPTypes.sol b/packages/pdp/src/interfaces/IPDPTypes.sol new file mode 100644 index 00000000..63939f18 --- /dev/null +++ b/packages/pdp/src/interfaces/IPDPTypes.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +/// @title IPDPTypes +/// @notice Shared types for PDP contracts and consumers +interface IPDPTypes { + struct Proof { + bytes32 leaf; + bytes32[] proof; + } + + struct PieceIdAndOffset { + uint256 pieceId; + uint256 offset; + } +} diff --git a/packages/pdp/src/interfaces/IPDPVerifier.sol b/packages/pdp/src/interfaces/IPDPVerifier.sol new file mode 100644 index 00000000..5f0b0994 --- /dev/null +++ b/packages/pdp/src/interfaces/IPDPVerifier.sol @@ -0,0 +1,46 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import {Cids} from "../Cids.sol"; +import {IPDPTypes} from "./IPDPTypes.sol"; +import {IPDPEvents} from "./IPDPEvents.sol"; + +/// @title IPDPVerifier +/// @notice Main interface for the PDPVerifier contract +interface IPDPVerifier is IPDPEvents { + // View functions + function getChallengeFinality() external view returns (uint256); + function getNextDataSetId() external view returns (uint64); + function dataSetLive(uint256 setId) external view returns (bool); + function pieceLive(uint256 setId, uint256 pieceId) external view returns (bool); + function pieceChallengable(uint256 setId, uint256 pieceId) external view returns (bool); + function getDataSetLeafCount(uint256 setId) external view returns (uint256); + function getNextPieceId(uint256 setId) external view returns (uint256); + function getNextChallengeEpoch(uint256 setId) external view returns (uint256); + function getDataSetListener(uint256 setId) external view returns (address); + function getDataSetStorageProvider(uint256 setId) external view returns (address, address); + function getDataSetLastProvenEpoch(uint256 setId) external view returns (uint256); + function getPieceCid(uint256 setId, uint256 pieceId) external view returns (bytes memory); + function getPieceLeafCount(uint256 setId, uint256 pieceId) external view returns (uint256); + function getChallengeRange(uint256 setId) external view returns (uint256); + function getScheduledRemovals(uint256 setId) external view returns (uint256[] memory); + + // State-changing functions + function proposeDataSetStorageProvider(uint256 setId, address newStorageProvider) external; + function claimDataSetStorageProvider(uint256 setId, bytes calldata extraData) external; + function createDataSet(address listenerAddr, bytes calldata extraData) external payable returns (uint256); + function deleteDataSet(uint256 setId, bytes calldata extraData) external; + function addPieces(uint256 setId, Cids.Cid[] calldata pieceData, bytes calldata extraData) + external + returns (uint256); + function schedulePieceDeletions(uint256 setId, uint256[] calldata pieceIds, bytes calldata extraData) external; + function provePossession(uint256 setId, IPDPTypes.Proof[] calldata proofs) external payable; + function nextProvingPeriod(uint256 setId, uint256 challengeEpoch, bytes calldata extraData) external; + function findPieceIds(uint256 setId, uint256[] calldata leafIndexs) + external + view + returns (IPDPTypes.PieceIdAndOffset[] memory); + + // Fee view: returns the current effective fee per TiB + function feePerTiB() external view returns (uint96); +} diff --git a/packages/pdp/test/BitOps.t.sol b/packages/pdp/test/BitOps.t.sol new file mode 100644 index 00000000..4039ffb5 --- /dev/null +++ b/packages/pdp/test/BitOps.t.sol @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test} from "forge-std/Test.sol"; +import {BitOps} from "../src/BitOps.sol"; + +contract BitOpsTest is Test { + function testClzZero() public pure { + uint256 result = BitOps.clz(0); + assertEq(result, 256, "CLZ of 0 should be 256"); + } + + function testClzOne() public pure { + uint256 result = BitOps.clz(1); + assertEq(result, 255, "CLZ of 1 should be 255"); + } + + function testClzMaxUint256() public pure { + uint256 result = BitOps.clz(type(uint256).max); + assertEq(result, 0, "CLZ of max uint256 should be 0"); + } + + function testClzPowersOfTwo() public pure { + for (uint16 i = 0; i < 256; i++) { + uint256 input = 1 << i; + uint256 result = BitOps.clz(input); + assertEq( + result, + 255 - i, + string(abi.encodePacked("CLZ of 2^", vm.toString(i), " should be ", vm.toString(255 - i))) + ); + } + } + + function testClzSelectValues() public pure { + assertEq(BitOps.clz(0x000F), 252, "CLZ of 0x000F should be 252"); + assertEq(BitOps.clz(0x00FF), 248, "CLZ of 0x00FF should be 248"); + assertEq(BitOps.clz(0x0100), 247, "CLZ of 0x0100 should be 247"); + assertEq(BitOps.clz(0xFFFF), 240, "CLZ of 0xFFFF should be 240"); + assertEq(BitOps.clz(0x8000), 240, "CLZ of 0x8000 should be 240"); + assertEq(BitOps.clz(0x80000000), 56 * 4, "CLZ of 0x80000000 should be 56*4"); + assertEq(BitOps.clz(0x8FFFFFFF), 56 * 4, "CLZ of 0x8FFFFFFF should be 56*4"); + assertEq(BitOps.clz(0x8000000000000000), 48 * 4, "CLZ of 0x8000000000000000 should be 48*4"); + } + + function testCtzZero() public pure { + uint256 result = BitOps.ctz(0); + assertEq(result, 256, "CTZ of 0 should be 256"); + } + + function testCtz1LShift255() public pure { + uint256 result = BitOps.ctz(1 << 254); + assertEq(result, 254, "CTZ of 2^254 should be 254"); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testCtzInputExceedsMaxInt256() public { + // Setup + uint256 maxInt256 = uint256(type(int256).max); + uint256 exceedingValue = maxInt256 + 1; + + // Expect the call to revert + vm.expectRevert("Input exceeds maximum int256 value"); + + // Call ctz with a value exceeding max int256 + BitOps.ctz(exceedingValue); + } + + function testCtzSelectValues() public pure { + assertEq(BitOps.ctz(0x000F), 0, "CTZ of 0x000F should be 0"); + assertEq(BitOps.ctz(0xFF00), 8, "CTZ of 0xFF00 should be 2"); + assertEq(BitOps.ctz(0x8000), 15, "CTZ of 0x8000 should be 15"); + assertEq(BitOps.ctz(0x80000000), 31, "CLZ of 0x80000000 should be 56*4"); + } +} diff --git a/packages/pdp/test/Cids.t.sol b/packages/pdp/test/Cids.t.sol new file mode 100644 index 00000000..5e10796a --- /dev/null +++ b/packages/pdp/test/Cids.t.sol @@ -0,0 +1,145 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test} from "forge-std/Test.sol"; +import {Cids} from "../src/Cids.sol"; + +contract CidsTest is Test { + function testDigestRoundTrip() public pure { + bytes32 digest = 0xbeadcafefacedeedfeedbabedeadbeefbeadcafefacedeedfeedbabedeadbeef; + Cids.Cid memory c = Cids.CommPv2FromDigest(0, 10, digest); + assertEq(c.data.length, 39); + bytes32 foundDigest = Cids.digestFromCid(c); + assertEq(foundDigest, digest, "digest equal"); + + (uint256 padding, uint8 height, uint256 digestOffset) = Cids.validateCommPv2(c); + assertEq(padding, 0, "padding"); + assertEq(height, 10, "height"); + + // assert that digest is same at digestOffset + for (uint256 i = 0; i < 32; i++) { + assertEq(bytes1(digest[i]), c.data[digestOffset + i], "bytes"); + } + } + + function testPieceSize() public pure { + assertEq(Cids.pieceSize(0, 30), 1 << (30 + 5)); + assertEq(Cids.pieceSize(127, 30), (1 << (30 + 5)) - 128); + assertEq(Cids.pieceSize(128, 30), (1 << (30 + 5)) - 129); + } + + function testLeafCount() public pure { + assertEq(Cids.leafCount(0, 30), 1 << 30); + assertEq(Cids.leafCount(127, 30), (1 << 30) - 4); + assertEq(Cids.leafCount(128, 30), (1 << 30) - 4); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testDigestTooShort() public { + bytes memory byteArray = new bytes(31); + for (uint256 i = 0; i < 31; i++) { + byteArray[i] = bytes1(uint8(i)); + } + Cids.Cid memory c = Cids.Cid(byteArray); + vm.expectRevert("Cid data is too short"); + Cids.digestFromCid(c); + } + + function testUvarintLength() public pure { + assertEq(Cids._uvarintLength(0), 1); + assertEq(Cids._uvarintLength(1), 1); + assertEq(Cids._uvarintLength(127), 1); + assertEq(Cids._uvarintLength(128), 2); + assertEq(Cids._uvarintLength(16383), 2); + assertEq(Cids._uvarintLength(16384), 3); + assertEq(Cids._uvarintLength(2097151), 3); + assertEq(Cids._uvarintLength(2097152), 4); + assertEq(Cids._uvarintLength(type(uint256).max), 37); + } + + function testUvarintRoundTrip() public pure { + uint256[] memory values = new uint256[](7); + values[0] = 0; + values[1] = 1; + values[2] = 127; + values[3] = 128; + values[4] = 16384; + values[5] = 2097152; + values[6] = type(uint256).max; + + uint256 totalLength = 0; + for (uint256 i = 0; i < values.length; i++) { + totalLength += Cids._uvarintLength(values[i]); + } + bytes memory buffer = new bytes(totalLength); + uint256 offset = 0; + + // Write all values + for (uint256 i = 0; i < values.length; i++) { + offset = Cids._writeUvarint(buffer, offset, values[i]); + } + + // Read all values and verify + uint256 currentOffset = 0; + for (uint256 i = 0; i < values.length; i++) { + (uint256 readValue, uint256 newOffset) = Cids._readUvarint(buffer, currentOffset); + assertEq(readValue, values[i], "Uvarint round trip failed"); + currentOffset = newOffset; + } + } + + /// forge-config: default.allow_internal_expect_revert = true + function testReadUvarintIncomplete() public { + // Test reading an incomplete uvarint that should revert + bytes memory incompleteUvarint = hex"80"; // A single byte indicating more to come, but nothing follows + vm.expectRevert(); // Expect any revert, specifically index out of bounds + Cids._readUvarint(incompleteUvarint, 0); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testReadUvarintMSBSetOnLastByte() public { + bytes memory incompleteUvarint2 = hex"ff81"; // MSB set on last byte. + vm.expectRevert(); + Cids._readUvarint(incompleteUvarint2, 0); + } + + function testReadUvarintWithOffset() public pure { + // Test reading with an offset + bytes memory bufferWithOffset = hex"00010203040506078001"; // Value 128 (8001) at offset 8 + (uint256 readValue, uint256 newOffset) = Cids._readUvarint(bufferWithOffset, 8); + assertEq(readValue, 128, "Read uvarint with offset failed"); + assertEq(newOffset, 10, "Offset after reading with offset incorrect"); + } + + function testValidateCommPv2FRC0069() public pure { + // The values are taken from FRC-0069 specification + // Test vector 1: height=4, padding=0 + bytes memory cidData1 = hex"01559120220004496dae0cc9e265efe5a006e80626a5dc5c409e5d3155c13984caf6c8d5cfd605"; + Cids.Cid memory cid1 = Cids.Cid(cidData1); + (uint256 padding1, uint8 height1, uint256 digestOffset1) = Cids.validateCommPv2(cid1); + assertEq(padding1, 0, "CID 1 padding"); + assertEq(height1, 4, "CID 1 height"); + + // Test vector 2: height=2, padding=0 + bytes memory cidData2 = hex"015591202200023731bb99ac689f66eef5973e4a94da188f4ddcae580724fc6f3fd60dfd488333"; + Cids.Cid memory cid2 = Cids.Cid(cidData2); + (uint256 padding2, uint8 height2, uint256 digestOffset2) = Cids.validateCommPv2(cid2); + assertEq(padding2, 0, "CID 2 padding"); + assertEq(height2, 2, "CID 2 height"); + + // Test vector 3: height=5, padding=504 + bytes memory cidData3 = hex"0155912023f80305de6815dcb348843215a94de532954b60be550a4bec6e74555665e9a5ec4e0f3c"; + Cids.Cid memory cid3 = Cids.Cid(cidData3); + (uint256 padding3, uint8 height3, uint256 digestOffset3) = Cids.validateCommPv2(cid3); + assertEq(padding3, 504, "CID 3 padding"); + assertEq(height3, 5, "CID 3 height"); + + // Verify that digestOffset points to valid data by checking a few bytes from the digest + // For CID 1 + assertEq(cid1.data[digestOffset1], bytes1(0x49), "CID 1 digest first byte"); + // For CID 2 + assertEq(cid2.data[digestOffset2], bytes1(0x37), "CID 2 digest first byte"); + // For CID 3 + assertEq(cid3.data[digestOffset3], bytes1(0xde), "CID 3 digest first byte"); + } +} diff --git a/packages/pdp/test/ERC1967Proxy.t.sol b/packages/pdp/test/ERC1967Proxy.t.sol new file mode 100644 index 00000000..be2db7b3 --- /dev/null +++ b/packages/pdp/test/ERC1967Proxy.t.sol @@ -0,0 +1,98 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +import {Test} from "forge-std/Test.sol"; +import {PDPVerifier} from "../src/PDPVerifier.sol"; +import {ERC1967Proxy} from "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol"; +import {MyERC1967Proxy} from "../src/ERC1967Proxy.sol"; + +contract ERC1967ProxyTest is Test { + PDPVerifier public implementation; + PDPVerifier public proxy; + address owner = address(0x123); + + function setUp() public { + // Set owner for testing + vm.startPrank(owner); + // Deploy implementation contract + implementation = new PDPVerifier(); + + // Deploy proxy pointing to implementation + bytes memory initData = abi.encodeWithSelector( + PDPVerifier.initialize.selector, + uint256(150) // challengeFinality + ); + + ERC1967Proxy proxyContract = new MyERC1967Proxy(address(implementation), initData); + + // Get PDPVerifier interface on proxy address + proxy = PDPVerifier(address(proxyContract)); + } + + function testInitialSetup() public view { + assertEq(proxy.getChallengeFinality(), 150); + assertEq(proxy.owner(), owner); + } + + function assertImplementationEquals(address checkImpl) public view { + bytes32 implementationSlot = 0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc; + assertEq(address(uint160(uint256(vm.load(address(proxy), implementationSlot)))), address(checkImpl)); + } + + function testUpgradeImplementation() public { + assertImplementationEquals(address(implementation)); + + // Deploy new implementation + PDPVerifier newImplementation = new PDPVerifier(); + + // Upgrade proxy to new implementation + proxy.upgradeToAndCall(address(newImplementation), ""); + + // Verify upgrade was successful + assertImplementationEquals(address(newImplementation)); + assertEq(proxy.getChallengeFinality(), 150); // State is preserved + assertEq(proxy.owner(), owner); // Owner is preserved + } + + function testUpgradeFromNonOwnerNoGood() public { + PDPVerifier newImplementation = new PDPVerifier(); + + vm.stopPrank(); + vm.startPrank(address(0xdead)); + + vm.expectRevert(); + proxy.upgradeToAndCall(address(newImplementation), ""); + assertEq(proxy.getChallengeFinality(), 150); // State is preserved + assertEq(proxy.owner(), owner); // Owner is preserved + } + + function testOwnershipTransfer() public { + vm.stopPrank(); + vm.startPrank(owner); + // Verify initial owner + assertEq(proxy.owner(), owner); + + address newOwner = address(0x123); + + // Transfer ownership + proxy.transferOwnership(newOwner); + + // Verify ownership changed + assertEq(proxy.owner(), newOwner); + } + + function testTransferFromNonOwneNoGood() public { + // Switch to non-owner account + vm.stopPrank(); + vm.startPrank(address(0xdead)); + + address newOwner = address(0x123); + + // Attempt transfer should fail + vm.expectRevert(); + proxy.transferOwnership(newOwner); + + // Verify owner unchanged + assertEq(proxy.owner(), owner); + } +} diff --git a/packages/pdp/test/Fees.t.sol b/packages/pdp/test/Fees.t.sol new file mode 100644 index 00000000..8e6804fa --- /dev/null +++ b/packages/pdp/test/Fees.t.sol @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test} from "forge-std/Test.sol"; +import {PDPFees} from "../src/Fees.sol"; + +contract PDPFeesTest is Test { + function testCalculateProofFee() public pure { + uint256 rawSize = PDPFees.TIB_IN_BYTES; // 1 TiB + uint256 expectedFee = PDPFees.DEFAULT_FEE_PER_TIB; + uint256 actualFee = PDPFees.calculateProofFee(rawSize, PDPFees.DEFAULT_FEE_PER_TIB); + + assertEq(actualFee, expectedFee, "Fee for 1 TiB should equal FEE_PER_TIB"); + } + + function testCalculateProofFeeHalfTiB() public pure { + uint256 rawSize = PDPFees.TIB_IN_BYTES / 2; // 0.5 TiB + uint256 expectedFee = PDPFees.DEFAULT_FEE_PER_TIB / 2; + uint256 actualFee = PDPFees.calculateProofFee(rawSize, PDPFees.DEFAULT_FEE_PER_TIB); + + assertEq(actualFee, expectedFee, "Fee for 0.5 TiB should be half of FEE_PER_TIB"); + } + + function testCalculateProofFeeMultipleTiB() public pure { + uint256 rawSize = PDPFees.TIB_IN_BYTES * 10; // 10 TiB + uint256 expectedFee = PDPFees.DEFAULT_FEE_PER_TIB * 10; + uint256 actualFee = PDPFees.calculateProofFee(rawSize, PDPFees.DEFAULT_FEE_PER_TIB); + + assertEq(actualFee, expectedFee, "Fee for 10 TiB should be 10x FEE_PER_TIB"); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testCalculateProofFeeZeroRawSize() public { + vm.expectRevert("failed to validate: raw size must be greater than 0"); + PDPFees.calculateProofFee(0, PDPFees.DEFAULT_FEE_PER_TIB); + } + + function testFeePerTiBConstant() public pure { + // Verify the fee constant is set to 0.00023 FIL + uint256 expectedFee = 0.00023 ether; // 0.00023 FIL in attoFIL + assertEq(PDPFees.DEFAULT_FEE_PER_TIB, expectedFee, "DEFAULT_FEE_PER_TIB should be 0.00023 FIL"); + } + + function testSybilFee() public pure { + uint256 fee = PDPFees.sybilFee(); + assertEq(fee, PDPFees.SYBIL_FEE, "Sybil fee should match the constant"); + } +} diff --git a/packages/pdp/test/PDPVerifier.t.sol b/packages/pdp/test/PDPVerifier.t.sol new file mode 100644 index 00000000..8431bab2 --- /dev/null +++ b/packages/pdp/test/PDPVerifier.t.sol @@ -0,0 +1,2033 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {MockFVMTest} from "fvm-solidity/mocks/MockFVMTest.sol"; +import {Test} from "forge-std/Test.sol"; +import {UUPSUpgradeable} from "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol"; +import {Cids} from "../src/Cids.sol"; +import {PDPVerifier, PDPListener} from "../src/PDPVerifier.sol"; +import {MyERC1967Proxy} from "../src/ERC1967Proxy.sol"; +import {ProofUtil} from "./ProofUtil.sol"; +import {PDPFees} from "../src/Fees.sol"; +import {PDPRecordKeeper} from "../src/SimplePDPService.sol"; +import {IPDPTypes} from "../src/interfaces/IPDPTypes.sol"; +import {IPDPEvents} from "../src/interfaces/IPDPEvents.sol"; +import {PieceHelper} from "./PieceHelper.t.sol"; +import {ProofBuilderHelper} from "./ProofBuilderHelper.t.sol"; +import {NEW_DATA_SET_SENTINEL} from "../src/PDPVerifier.sol"; + +contract PDPVerifierDataSetCreateDeleteTest is MockFVMTest, PieceHelper { + TestingRecordKeeperService listener; + PDPVerifier pdpVerifier; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + uint256 challengeFinality = 2; + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, challengeFinality); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + } + + function testCreateDataSet() public { + Cids.Cid memory zeroPiece; + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + + uint256 setId = pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + assertEq(setId, 1, "First data set ID should be 1"); + assertEq(pdpVerifier.getDataSetLeafCount(setId), 0, "Data set leaf count should be 0"); + + (address currentStorageProvider, address proposedStorageProvider) = pdpVerifier.getDataSetStorageProvider(setId); + assertEq(currentStorageProvider, address(this), "Data set storage provider should be the constructor sender"); + assertEq( + proposedStorageProvider, + address(0), + "Data set proposed storage provider should be initialized to zero address" + ); + + assertEq(pdpVerifier.getNextChallengeEpoch(setId), 0, "Data set challenge epoch should be zero"); + assertEq(pdpVerifier.pieceLive(setId, 0), false, "Data set piece should not be live"); + assertEq(pdpVerifier.getPieceCid(setId, 0).data, zeroPiece.data, "Uninitialized piece should be empty"); + assertEq(pdpVerifier.getPieceLeafCount(setId, 0), 0, "Uninitialized piece should have zero leaves"); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), 0, "Data set challenge epoch should be zero"); + assertEq( + pdpVerifier.getDataSetListener(setId), + address(listener), + "Data set listener should be the constructor listener" + ); + } + + function testDeleteDataSet() public { + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + uint256 setId = pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetDeleted(setId, 0); + pdpVerifier.deleteDataSet(setId, empty); + vm.expectRevert("Data set not live"); + pdpVerifier.getDataSetLeafCount(setId); + } + + function testOnlyStorageProviderCanDeleteDataSet() public { + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + uint256 setId = pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + // Create a new address to act as a non-storage-provider + address nonStorageProvider = address(0x1234); + // Expect revert when non-storage-provider tries to delete the data set + vm.prank(nonStorageProvider); + vm.expectRevert("Only the storage provider can delete data sets"); + pdpVerifier.deleteDataSet(setId, empty); + + // Now verify the storage provider can delete the data set + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetDeleted(setId, 0); + pdpVerifier.deleteDataSet(setId, empty); + vm.expectRevert("Data set not live"); + pdpVerifier.getDataSetStorageProvider(setId); + } + + // TODO: once we have addPieces we should test deletion of a non empty data set + function testCannotDeleteNonExistentDataSet() public { + // Test with data set ID 0 (which is never valid since IDs start from 1) + vm.expectRevert("Only the storage provider can delete data sets"); + pdpVerifier.deleteDataSet(0, empty); + + // Test with a data set ID that hasn't been created yet + vm.expectRevert("data set id out of bounds"); + pdpVerifier.deleteDataSet(999, empty); + } + + function testMethodsOnDeletedDataSetFails() public { + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + uint256 setId = pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetDeleted(setId, 0); + pdpVerifier.deleteDataSet(setId, empty); + vm.expectRevert("Only the storage provider can delete data sets"); + pdpVerifier.deleteDataSet(setId, empty); + vm.expectRevert("Data set not live"); + pdpVerifier.getDataSetStorageProvider(setId); + vm.expectRevert("Data set not live"); + pdpVerifier.getDataSetLeafCount(setId); + vm.expectRevert("Data set not live"); + pdpVerifier.getDataSetListener(setId); + vm.expectRevert("Data set not live"); + pdpVerifier.getPieceCid(setId, 0); + vm.expectRevert("Data set not live"); + pdpVerifier.getPieceLeafCount(setId, 0); + vm.expectRevert("Data set not live"); + pdpVerifier.getNextChallengeEpoch(setId); + vm.expectRevert("Data set not live"); + pdpVerifier.addPieces(setId, address(0), new Cids.Cid[](1), empty); + } + + function testGetDataSetID() public { + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(2, address(this)); + pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + assertEq(3, pdpVerifier.getNextDataSetId(), "Next data set ID should be 3"); + assertEq(3, pdpVerifier.getNextDataSetId(), "Next data set ID should be 3"); + } + + receive() external payable {} + + function testDataSetIdsStartFromOne() public { + // Test that data set IDs start from 1, not 0 + assertEq(pdpVerifier.getNextDataSetId(), 1, "Next data set ID should start at 1"); + + uint256 firstSetId = pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + assertEq(firstSetId, 1, "First data set ID should be 1, not 0"); + + uint256 secondSetId = pdpVerifier.createDataSet{value: PDPFees.sybilFee()}(address(listener), empty); + assertEq(secondSetId, 2, "Second data set ID should be 2"); + + assertEq(pdpVerifier.getNextDataSetId(), 3, "Next data set ID should be 3 after creating two data sets"); + } + + function testCreateDataSetFeeHandling() public { + uint256 sybilFee = PDPFees.sybilFee(); + + // Test 1: Fails when sending not enough for sybil fee + vm.expectRevert("sybil fee not met"); + pdpVerifier.createDataSet{value: sybilFee - 1}(address(listener), empty); + + // Test 2: Returns funds over the sybil fee back to the sender + uint256 excessAmount = 1 ether; + uint256 initialBalance = address(this).balance; + + uint256 setId = pdpVerifier.createDataSet{value: sybilFee + excessAmount}(address(listener), empty); + + uint256 finalBalance = address(this).balance; + uint256 refundedAmount = finalBalance - (initialBalance - sybilFee - excessAmount); + assertEq(refundedAmount, excessAmount, "Excess amount should be refunded"); + + // Additional checks to ensure the data set was created correctly + assertEq(pdpVerifier.getDataSetLeafCount(setId), 0, "Data set leaf count should be 0"); + (address currentStorageProvider, address proposedStorageProvider) = pdpVerifier.getDataSetStorageProvider(setId); + assertEq(currentStorageProvider, address(this), "Data set storage provider should be the constructor sender"); + assertEq( + proposedStorageProvider, + address(0), + "Data set proposed storage provider should be initialized to zero address" + ); + } + + function testCombinedCreateDataSetAndAddPieces() public { + uint256 sybilFee = PDPFees.sybilFee(); + bytes memory combinedExtraData = abi.encode(empty, empty); + + Cids.Cid[] memory pieces = new Cids.Cid[](2); + pieces[0] = makeSamplePiece(64); + pieces[1] = makeSamplePiece(128); + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + + vm.expectEmit(true, true, false, false); + uint256[] memory expectedPieceIds = new uint256[](2); + expectedPieceIds[0] = 1; + expectedPieceIds[1] = 2; + emit IPDPEvents.PiecesAdded(1, expectedPieceIds, pieces); + + uint256 firstAdded = + pdpVerifier.addPieces{value: sybilFee}(NEW_DATA_SET_SENTINEL, address(listener), pieces, combinedExtraData); + + // Verify the data set was created correctly + assertEq(firstAdded, 1, "First piece ID should be 1"); + assertEq(pdpVerifier.getDataSetLeafCount(firstAdded), 192, "Data set leaf count should be 64 + 128"); + assertEq(pdpVerifier.getNextPieceId(firstAdded), 2, "Next piece ID should be 2"); + assertEq(pdpVerifier.getDataSetListener(firstAdded), address(listener), "Listener should be set correctly"); + + // Verify pieces were added correctly + assertTrue(pdpVerifier.pieceLive(firstAdded, 0), "First piece should be live"); + assertTrue(pdpVerifier.pieceLive(firstAdded, 1), "Second piece should be live"); + assertEq(pdpVerifier.getPieceLeafCount(firstAdded, 0), 64, "First piece leaf count should be 64"); + assertEq(pdpVerifier.getPieceLeafCount(firstAdded, 1), 128, "Second piece leaf count should be 128"); + } + + function testNewDataSetSentinelValue() public { + assertEq(NEW_DATA_SET_SENTINEL, 0, "Sentinel value should be 0"); + + uint256 sybilFee = PDPFees.sybilFee(); + bytes memory combinedExtraData = abi.encode(empty, empty); + Cids.Cid[] memory pieces = new Cids.Cid[](0); + + uint256 firstAdded = + pdpVerifier.addPieces{value: sybilFee}(NEW_DATA_SET_SENTINEL, address(listener), pieces, combinedExtraData); + + assertEq(firstAdded, 1, "First piece ID should be 1"); + assertEq(pdpVerifier.getDataSetLeafCount(firstAdded), 0, "Data set leaf count should be 0"); + } +} + +contract PDPVerifierStorageProviderTest is MockFVMTest, PieceHelper { + PDPVerifier pdpVerifier; + TestingRecordKeeperService listener; + address public storageProvider; + address public nextStorageProvider; + address public nonStorageProvider; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, 2); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + + storageProvider = address(this); + nextStorageProvider = address(0x1234); + nonStorageProvider = address(0xffff); + } + + function testStorageProviderTransfer() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + pdpVerifier.proposeDataSetStorageProvider(setId, nextStorageProvider); + (address currentStorageProviderStart, address proposedStorageProviderStart) = + pdpVerifier.getDataSetStorageProvider(setId); + assertEq( + currentStorageProviderStart, storageProvider, "Data set storage provider should be the constructor sender" + ); + assertEq( + proposedStorageProviderStart, + nextStorageProvider, + "Data set proposed storage provider should make the one proposed" + ); + vm.prank(nextStorageProvider); + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.StorageProviderChanged(setId, storageProvider, nextStorageProvider); + pdpVerifier.claimDataSetStorageProvider(setId, empty); + (address currentStorageProviderEnd, address proposedStorageProviderEnd) = + pdpVerifier.getDataSetStorageProvider(setId); + assertEq( + currentStorageProviderEnd, nextStorageProvider, "Data set storage provider should be the next provider" + ); + assertEq(proposedStorageProviderEnd, address(0), "Data set proposed storage provider should be zero address"); + } + + function testStorageProviderProposalReset() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + pdpVerifier.proposeDataSetStorageProvider(setId, nextStorageProvider); + pdpVerifier.proposeDataSetStorageProvider(setId, storageProvider); + (address currentStorageProviderEnd, address proposedStorageProviderEnd) = + pdpVerifier.getDataSetStorageProvider(setId); + assertEq( + currentStorageProviderEnd, storageProvider, "Data set storage provider should be the constructor sender" + ); + assertEq(proposedStorageProviderEnd, address(0), "Data set proposed storage provider should be zero address"); + } + + function testStorageProviderPermissionsRequired() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + vm.prank(nonStorageProvider); + vm.expectRevert("Only the current storage provider can propose a new storage provider"); + pdpVerifier.proposeDataSetStorageProvider(setId, nextStorageProvider); + + // Now send proposal from actual storage provider + pdpVerifier.proposeDataSetStorageProvider(setId, nextStorageProvider); + + // Proposed storage provider has no extra permissions + vm.prank(nextStorageProvider); + vm.expectRevert("Only the current storage provider can propose a new storage provider"); + pdpVerifier.proposeDataSetStorageProvider(setId, nonStorageProvider); + + vm.prank(nonStorageProvider); + vm.expectRevert("Only the proposed storage provider can claim storage provider role"); + pdpVerifier.claimDataSetStorageProvider(setId, empty); + } + + function testScheduleRemovePiecesOnlyStorageProvider() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieceDataArray = new Cids.Cid[](1); + pieceDataArray[0] = makeSamplePiece(100); + pdpVerifier.addPieces(setId, address(0), pieceDataArray, empty); + + uint256[] memory pieceIdsToRemove = new uint256[](1); + pieceIdsToRemove[0] = 0; + + vm.prank(nonStorageProvider); + vm.expectRevert("Only the storage provider can schedule removal of pieces"); + pdpVerifier.schedulePieceDeletions(setId, pieceIdsToRemove, empty); + } +} + +contract PDPVerifierDataSetMutateTest is MockFVMTest, PieceHelper { + uint256 constant CHALLENGE_FINALITY_DELAY = 2; + + PDPVerifier pdpVerifier; + TestingRecordKeeperService listener; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + } + + function testAddPiece() public { + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + Cids.Cid[] memory pieces = new Cids.Cid[](1); + uint256 leafCount = 64; + pieces[0] = makeSamplePiece(leafCount); + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.PiecesAdded(setId, new uint256[](0), new Cids.Cid[](0)); + uint256 pieceId = pdpVerifier.addPieces(setId, address(0), pieces, empty); + assertEq(pdpVerifier.getChallengeRange(setId), 0); + + // flush add + vm.expectEmit(true, true, false, false); + emit IPDPEvents.NextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, 2); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + assertEq(pdpVerifier.getDataSetLeafCount(setId), leafCount); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY); + assertEq(pdpVerifier.getChallengeRange(setId), leafCount); + + assertTrue(pdpVerifier.pieceLive(setId, pieceId)); + assertEq(pdpVerifier.getPieceCid(setId, pieceId).data, pieces[0].data); + assertEq(pdpVerifier.getPieceLeafCount(setId, pieceId), leafCount); + + assertEq(pdpVerifier.getNextPieceId(setId), 1); + } + + function testAddPiecesToExistingDataSetWithFee() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(64); + bytes memory addPayload = abi.encode("add", "data"); + + vm.expectRevert("no fee on add to existing dataset"); + pdpVerifier.addPieces{value: 1 ether}(setId, address(0), pieces, addPayload); + } + + function testAddPiecesToNonExistentDataSet() public { + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(64); + bytes memory addPayload = abi.encode("add", "data"); + + vm.expectRevert("Data set not live"); + pdpVerifier.addPieces( + 999, // Non-existent data set ID + address(0), + pieces, + addPayload + ); + } + + function testAddPiecesToExistingDataSetWrongStorageProvider() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(64); + bytes memory addPayload = abi.encode("add", "data"); + + // Try to add pieces as a different address + address otherAddress = address(0x1234); + vm.prank(otherAddress); + vm.expectRevert("Only the storage provider can add pieces"); + pdpVerifier.addPieces(setId, address(0), pieces, addPayload); + } + + function testAddMultiplePieces() public { + vm.expectEmit(true, true, false, false); + emit IPDPEvents.DataSetCreated(1, address(this)); + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](2); + pieces[0] = makeSamplePiece(64); + pieces[1] = makeSamplePiece(128); + + vm.expectEmit(true, true, false, false); + uint256[] memory pieceIds = new uint256[](2); + pieceIds[0] = 0; + pieceIds[1] = 1; + Cids.Cid[] memory pieceCids = new Cids.Cid[](2); + pieceCids[0] = pieces[0]; + pieceCids[1] = pieces[1]; + emit IPDPEvents.PiecesAdded(setId, pieceIds, pieceCids); + uint256 firstId = pdpVerifier.addPieces(setId, address(0), pieces, empty); + assertEq(firstId, 0); + // flush add + vm.expectEmit(true, true, true, false); + emit IPDPEvents.NextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, 6); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + uint256 expectedLeafCount = 64 + 128; + assertEq(pdpVerifier.getDataSetLeafCount(setId), expectedLeafCount); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY); + + assertTrue(pdpVerifier.pieceLive(setId, firstId)); + assertTrue(pdpVerifier.pieceLive(setId, firstId + 1)); + assertEq(pdpVerifier.getPieceCid(setId, firstId).data, pieces[0].data); + assertEq(pdpVerifier.getPieceCid(setId, firstId + 1).data, pieces[1].data); + + assertEq(pdpVerifier.getPieceLeafCount(setId, firstId), 64); + assertEq(pdpVerifier.getPieceLeafCount(setId, firstId + 1), 128); + assertEq(pdpVerifier.getNextPieceId(setId), 2); + } + + function expectIndexedError(uint256 index, string memory expectedMessage) internal { + vm.expectRevert(abi.encodeWithSelector(PDPVerifier.IndexedError.selector, index, expectedMessage)); + } + + function testAddBadPiece() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + + pieces[0] = makeSamplePiece(0); + expectIndexedError(0, "Padding is too large"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Fail when piece size is too large + pieces[0] = makeSamplePiece(1 << pdpVerifier.MAX_PIECE_SIZE_LOG2() + 1); + expectIndexedError(0, "Piece size must be less than 2^50"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Fail when not adding any pieces; + Cids.Cid[] memory emptyPieces = new Cids.Cid[](0); + vm.expectRevert("Must add at least one piece"); + pdpVerifier.addPieces(setId, address(0), emptyPieces, empty); + + // Fail when data set is no longer live + pieces[0] = makeSamplePiece(1); + pdpVerifier.deleteDataSet(setId, empty); + vm.expectRevert("Data set not live"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + } + + function testAddBadPiecesBatched() public { + // Add one bad piece, message fails on bad index + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](4); + pieces[0] = makeSamplePiece(1); + pieces[1] = makeSamplePiece(1); + pieces[2] = makeSamplePiece(1); + pieces[3] = makeSamplePiece(0); + + expectIndexedError(3, "Padding is too large"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Add multiple bad pieces, message fails on first bad index + pieces[0] = makeSamplePiece(0); + expectIndexedError(0, "Padding is too large"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + } + + function testRemovePiece() public { + // Add one piece + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), pdpVerifier.NO_CHALLENGE_SCHEDULED()); // Not updated on first add anymore + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY); + + // Remove piece + uint256[] memory toRemove = new uint256[](1); + toRemove[0] = 0; + pdpVerifier.schedulePieceDeletions(setId, toRemove, empty); + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.PiecesRemoved(setId, toRemove); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); // flush + + assertEq(pdpVerifier.getNextChallengeEpoch(setId), pdpVerifier.NO_CHALLENGE_SCHEDULED()); + assertEq(pdpVerifier.pieceLive(setId, 0), false); + assertEq(pdpVerifier.getNextPieceId(setId), 1); + assertEq(pdpVerifier.getDataSetLeafCount(setId), 0); + bytes memory emptyCidData = new bytes(0); + assertEq(pdpVerifier.getPieceCid(setId, 0).data, emptyCidData); + assertEq(pdpVerifier.getPieceLeafCount(setId, 0), 0); + } + + function testCannotScheduleRemovalOnNonLiveDataSet() public { + // Create a data set + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Add a piece to the data set + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Delete the data set + pdpVerifier.deleteDataSet(setId, empty); + + // Attempt to schedule removal of the piece, which should fail + uint256[] memory pieceIds = new uint256[](1); + pieceIds[0] = 0; + vm.expectRevert("Data set not live"); + pdpVerifier.schedulePieceDeletions(setId, pieceIds, empty); + } + + function testRemovePieceBatch() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](3); + pieces[0] = makeSamplePiece(2); + pieces[1] = makeSamplePiece(2); + pieces[2] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + uint256[] memory toRemove = new uint256[](2); + toRemove[0] = 0; + toRemove[1] = 2; + pdpVerifier.schedulePieceDeletions(setId, toRemove, empty); + + vm.expectEmit(true, true, false, false); + emit IPDPEvents.PiecesRemoved(setId, toRemove); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); // flush + + assertEq(pdpVerifier.pieceLive(setId, 0), false); + assertEq(pdpVerifier.pieceLive(setId, 1), true); + assertEq(pdpVerifier.pieceLive(setId, 2), false); + + assertEq(pdpVerifier.getNextPieceId(setId), 3); + assertEq(pdpVerifier.getDataSetLeafCount(setId), 64 / 32); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY); + + bytes memory emptyCidData = new bytes(0); + assertEq(pdpVerifier.getPieceCid(setId, 0).data, emptyCidData); + assertEq(pdpVerifier.getPieceCid(setId, 1).data, pieces[1].data); + assertEq(pdpVerifier.getPieceCid(setId, 2).data, emptyCidData); + + assertEq(pdpVerifier.getPieceLeafCount(setId, 0), 0); + assertEq(pdpVerifier.getPieceLeafCount(setId, 1), 64 / 32); + assertEq(pdpVerifier.getPieceLeafCount(setId, 2), 0); + } + + function testRemoveFuturePieces() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + assertEq(true, pdpVerifier.pieceLive(setId, 0)); + assertEq(false, pdpVerifier.pieceLive(setId, 1)); + uint256[] memory toRemove = new uint256[](2); + + // Scheduling an un-added piece for removal should fail + toRemove[0] = 0; // current piece + toRemove[1] = 1; // future piece + vm.expectRevert("Can only schedule removal of existing pieces"); + pdpVerifier.schedulePieceDeletions(setId, toRemove, empty); + // Actual removal does not fail + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + // Scheduling both unchallengeable and challengeable pieces for removal succeeds + // scheduling duplicate ids in both cases succeeds + uint256[] memory toRemove2 = new uint256[](4); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + toRemove2[0] = 0; // current challengeable piece + toRemove2[1] = 1; // current unchallengeable piece + toRemove2[2] = 0; // duplicate challengeable + toRemove2[3] = 1; // duplicate unchallengeable + // state exists for both pieces + assertEq(true, pdpVerifier.pieceLive(setId, 0)); + assertEq(true, pdpVerifier.pieceLive(setId, 1)); + // only piece 0 is challengeable + assertEq(true, pdpVerifier.pieceChallengable(setId, 0)); + assertEq(false, pdpVerifier.pieceChallengable(setId, 1)); + pdpVerifier.schedulePieceDeletions(setId, toRemove2, empty); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + assertEq(false, pdpVerifier.pieceLive(setId, 0)); + assertEq(false, pdpVerifier.pieceLive(setId, 1)); + } + + function testExtraDataMaxSizeLimit() public { + // Generate extra data that exceeds the max size (2KB) + bytes memory tooLargeExtraData = new bytes(2049); // 2KB + 1 byte + for (uint256 i = 0; i < tooLargeExtraData.length; i++) { + tooLargeExtraData[i] = 0x41; // ASCII 'A' + } + + // First test createDataSet with too large extra data + vm.expectRevert("Extra data too large"); + pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(tooLargeExtraData, empty) + ); + + // Now create data set + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + + // Test addPieces with too large extra data + pieces[0] = makeSamplePiece(2); + vm.expectRevert("Extra data too large"); + pdpVerifier.addPieces(setId, address(0), pieces, tooLargeExtraData); + + // Now actually add piece id 0 + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Test schedulePieceDeletions with too large extra data + uint256[] memory pieceIds = new uint256[](1); + pieceIds[0] = 0; + vm.expectRevert("Extra data too large"); + pdpVerifier.schedulePieceDeletions(setId, pieceIds, tooLargeExtraData); + + // Test nextProvingPeriod with too large extra data + vm.expectRevert("Extra data too large"); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + 10, tooLargeExtraData); + + // Test deleteDataSet with too large extra data + vm.expectRevert("Extra data too large"); + pdpVerifier.deleteDataSet(setId, tooLargeExtraData); + } + + function testOnlyStorageProviderCanModifyDataSet() public { + // Setup a piece we can add + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + + // First add a piece as the storage provider so we can test removal + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + address nonStorageProvider = address(0xC0FFEE); + // Try to add pieces as non-storage-provider + vm.prank(nonStorageProvider); + vm.expectRevert("Only the storage provider can add pieces"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Try to delete data set as non-storage-provider + vm.prank(nonStorageProvider); + vm.expectRevert("Only the storage provider can delete data sets"); + pdpVerifier.deleteDataSet(setId, empty); + + // Try to schedule removals as non-storage-provider + uint256[] memory pieceIds = new uint256[](1); + pieceIds[0] = 0; + vm.prank(nonStorageProvider); + vm.expectRevert("Only the storage provider can schedule removal of pieces"); + pdpVerifier.schedulePieceDeletions(setId, pieceIds, empty); + + // Try to provePossession as non-storage-provider + vm.prank(nonStorageProvider); + IPDPTypes.Proof[] memory proofs = new IPDPTypes.Proof[](1); + proofs[0] = IPDPTypes.Proof(bytes32(abi.encodePacked("test")), new bytes32[](0)); + vm.expectRevert("Only the storage provider can prove possession"); + pdpVerifier.provePossession(setId, proofs); + + // Try to call nextProvingPeriod as non-storage-provider + vm.prank(nonStorageProvider); + vm.expectRevert("only the storage provider can move to next proving period"); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + 10, empty); + } + + function testNextProvingPeriodChallengeEpochTooSoon() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + // Add a piece to the data set (otherwise nextProvingPeriod fails waiting for leaves) + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Current block number + uint256 currentBlock = vm.getBlockNumber(); + + // Try to call nextProvingPeriod with a challenge epoch that is not at least + // challengeFinality epochs in the future + uint256 tooSoonEpoch = currentBlock + CHALLENGE_FINALITY_DELAY - 1; + + // Expect revert with the specific error message + vm.expectRevert("challenge epoch must be at least challengeFinality epochs in the future"); + pdpVerifier.nextProvingPeriod(setId, tooSoonEpoch, ""); + + // Set challenge epoch to exactly challengeFinality epochs in the future + // This should work (not revert) + uint256 validEpoch = currentBlock + CHALLENGE_FINALITY_DELAY; + + // This call should succeed + pdpVerifier.nextProvingPeriod(setId, validEpoch, ""); + + // Verify the challenge epoch was set correctly + assertEq(pdpVerifier.getNextChallengeEpoch(setId), validEpoch); + } + + function testNextProvingPeriodWithNoData() public { + // Get the NO_CHALLENGE_SCHEDULED constant value for clarity + uint256 noChallenge = pdpVerifier.NO_CHALLENGE_SCHEDULED(); + + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Initial state should be NO_CHALLENGE + assertEq( + pdpVerifier.getNextChallengeEpoch(setId), noChallenge, "Initial state should be NO_CHALLENGE_SCHEDULED" + ); + + // Try to set next proving period with various values + vm.expectRevert("can only start proving once leaves are added"); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + 100, empty); + + vm.expectRevert("can only start proving once leaves are added"); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + vm.expectRevert("can only start proving once leaves are added"); + pdpVerifier.nextProvingPeriod(setId, type(uint256).max, empty); + } + + function testNextProvingPeriodRevertsOnEmptyDataSet() public { + // Create a new data set + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Try to call nextProvingPeriod on the empty data set + // Should revert because no leaves have been added yet + vm.expectRevert("can only start proving once leaves are added"); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + } + + function testEmitDataSetEmptyEvent() public { + // Create a data set with one piece + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Schedule piece for removal + uint256[] memory toRemove = new uint256[](1); + toRemove[0] = 0; + pdpVerifier.schedulePieceDeletions(setId, toRemove, empty); + + // Expect DataSetEmpty event when calling nextProvingPeriod + vm.expectEmit(true, false, false, false); + emit IPDPEvents.DataSetEmpty(setId); + + // Call nextProvingPeriod which should remove the piece and emit the event + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + // Verify the data set is indeed empty + assertEq(pdpVerifier.getDataSetLeafCount(setId), 0); + assertEq(pdpVerifier.getNextChallengeEpoch(setId), 0); + assertEq(pdpVerifier.getDataSetLastProvenEpoch(setId), 0); + } +} + +contract PDPVerifierPaginationTest is MockFVMTest, PieceHelper { + PDPVerifier pdpVerifier; + TestingRecordKeeperService listener; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + uint256 challengeFinality = 2; + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, challengeFinality); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + } + + function testGetActivePiecesEmpty() public { + // Create empty data set and test + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + (Cids.Cid[] memory pieces, uint256[] memory ids, uint256[] memory sizes, bool hasMore) = + pdpVerifier.getActivePieces(setId, 0, 10); + + assertEq(pieces.length, 0, "Should return empty array for empty data set"); + assertEq(ids.length, 0, "Should return empty IDs array"); + assertEq(sizes.length, 0, "Should return empty sizes array"); + assertEq(hasMore, false, "Should not have more items"); + + // Also verify with getActivePieceCount + assertEq(pdpVerifier.getActivePieceCount(setId), 0, "Empty data set should have 0 active pieces"); + } + + function testGetActivePiecesPagination() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Add 15 pieces + Cids.Cid[] memory testPieces = new Cids.Cid[](15); + for (uint256 i = 0; i < 15; i++) { + testPieces[i] = makeSamplePiece(1024 / 32 * (i + 1)); + } + + uint256 firstPieceId = pdpVerifier.addPieces(setId, address(0), testPieces, empty); + assertEq(firstPieceId, 0, "First piece ID should be 0"); + + // Verify total count + assertEq(pdpVerifier.getActivePieceCount(setId), 15, "Should have 15 active pieces"); + + // Test first page + (Cids.Cid[] memory pieces1, uint256[] memory ids1, uint256[] memory sizes1, bool hasMore1) = + pdpVerifier.getActivePieces(setId, 0, 5); + assertEq(pieces1.length, 5, "First page should have 5 pieces"); + assertEq(ids1.length, 5, "First page should have 5 IDs"); + assertEq(sizes1.length, 5, "First page should have 5 sizes"); + assertEq(hasMore1, true, "Should have more items after first page"); + assertEq(sizes1[0], 1024, "First piece size should be 1024"); + assertEq(ids1[0], 0, "First piece ID should be 0"); + + // Test second page + (Cids.Cid[] memory pieces2, uint256[] memory ids2, uint256[] memory sizes2, bool hasMore2) = + pdpVerifier.getActivePieces(setId, 5, 5); + assertEq(pieces2.length, 5, "Second page should have 5 pieces"); + assertEq(hasMore2, true, "Should have more items after second page"); + assertEq(ids2[0], 5, "First piece ID on second page should be 5"); + assertEq(sizes2[0], 6144, "First piece size on second page should be 6144 (1024 * 6)"); + + // Test last page + (Cids.Cid[] memory pieces3, uint256[] memory ids3, /*uint256[] memory sizes3*/, bool hasMore3) = + pdpVerifier.getActivePieces(setId, 10, 5); + assertEq(pieces3.length, 5, "Last page should have 5 pieces"); + assertEq(hasMore3, false, "Should not have more items after last page"); + assertEq(ids3[0], 10, "First piece ID on last page should be 10"); + } + + function testGetActivePiecesWithDeleted() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Add pieces + Cids.Cid[] memory testPieces = new Cids.Cid[](10); + for (uint256 i = 0; i < 10; i++) { + testPieces[i] = makeSamplePiece(1024 / 32); + } + uint256 firstPieceId = pdpVerifier.addPieces(setId, address(0), testPieces, empty); + + // Schedule removal of pieces 2, 4, 6 (indices 1, 3, 5) + uint256[] memory toRemove = new uint256[](3); + toRemove[0] = firstPieceId + 1; // Piece at index 1 + toRemove[1] = firstPieceId + 3; // Piece at index 3 + toRemove[2] = firstPieceId + 5; // Piece at index 5 + pdpVerifier.schedulePieceDeletions(setId, toRemove, empty); + + // Move to next proving period to make removals effective + uint256 challengeFinality = pdpVerifier.getChallengeFinality(); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + challengeFinality, empty); + + // Should return only 7 active pieces + (Cids.Cid[] memory pieces, uint256[] memory ids, /*uint256[] memory sizes*/, bool hasMore) = + pdpVerifier.getActivePieces(setId, 0, 10); + assertEq(pieces.length, 7, "Should have 7 active pieces after deletions"); + assertEq(hasMore, false, "Should not have more items"); + + // Verify count matches + assertEq(pdpVerifier.getActivePieceCount(setId), 7, "Should have 7 active pieces count"); + + // Verify the correct pieces are returned (0, 2, 4, 6, 7, 8, 9) + assertEq(ids[0], 0, "First active piece should be 0"); + assertEq(ids[1], 2, "Second active piece should be 2"); + assertEq(ids[2], 4, "Third active piece should be 4"); + assertEq(ids[3], 6, "Fourth active piece should be 6"); + assertEq(ids[4], 7, "Fifth active piece should be 7"); + } + + function testGetActivePiecesEdgeCases() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Add 5 pieces + Cids.Cid[] memory testPieces = new Cids.Cid[](5); + for (uint256 i = 0; i < 5; i++) { + testPieces[i] = makeSamplePiece(1024 / 32); + } + pdpVerifier.addPieces(setId, address(0), testPieces, empty); + + // Verify count + assertEq(pdpVerifier.getActivePieceCount(setId), 5, "Should have 5 active pieces"); + + // Test offset beyond range + (Cids.Cid[] memory pieces1, /*uint256[] memory ids1*/, /*uint256[] memory sizes1*/, bool hasMore1) = + pdpVerifier.getActivePieces(setId, 10, 5); + assertEq(pieces1.length, 0, "Should return empty when offset beyond range"); + assertEq(hasMore1, false, "Should not have more items"); + + // Test limit 0 - should revert now + vm.expectRevert("Limit must be greater than 0"); + pdpVerifier.getActivePieces(setId, 0, 0); + + // Test limit exceeding available + (Cids.Cid[] memory pieces3, uint256[] memory ids3, /*uint256[] memory sizes3*/, bool hasMore3) = + pdpVerifier.getActivePieces(setId, 3, 10); + assertEq(pieces3.length, 2, "Should return only 2 pieces from offset 3"); + assertEq(hasMore3, false, "Should not have more items"); + assertEq(ids3[0], 3, "First ID should be 3"); + assertEq(ids3[1], 4, "Second ID should be 4"); + } + + function testGetActivePiecesNotLive() public { + // Test with invalid data set ID + vm.expectRevert("Data set not live"); + pdpVerifier.getActivePieces(999, 0, 10); + + // Also test getActivePieceCount + vm.expectRevert("Data set not live"); + pdpVerifier.getActivePieceCount(999); + } + + function testGetActivePiecesHasMore() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Add exactly 10 pieces + Cids.Cid[] memory testPieces = new Cids.Cid[](10); + for (uint256 i = 0; i < 10; i++) { + testPieces[i] = makeSamplePiece(1024 / 32); + } + pdpVerifier.addPieces(setId, address(0), testPieces, empty); + + // Test exact boundary - requesting exactly all items + (,,, bool hasMore1) = pdpVerifier.getActivePieces(setId, 0, 10); + assertEq(hasMore1, false, "Should not have more when requesting exactly all items"); + + // Test one less than total - should have more + (,,, bool hasMore2) = pdpVerifier.getActivePieces(setId, 0, 9); + assertEq(hasMore2, true, "Should have more when requesting less than total"); + + // Test at offset with remaining items + (,,, bool hasMore3) = pdpVerifier.getActivePieces(setId, 5, 4); + assertEq(hasMore3, true, "Should have more when 1 item remains"); + + // Test at offset with no remaining items + (,,, bool hasMore4) = pdpVerifier.getActivePieces(setId, 5, 5); + assertEq(hasMore4, false, "Should not have more when requesting exactly remaining items"); + } + + function testGetActivePiecesLargeSet() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Add 100 pieces + Cids.Cid[] memory testPieces = new Cids.Cid[](100); + for (uint256 i = 0; i < 100; i++) { + testPieces[i] = makeSamplePiece(1024 / 32 * (i + 1)); + } + pdpVerifier.addPieces(setId, address(0), testPieces, empty); + + // Verify total count + assertEq(pdpVerifier.getActivePieceCount(setId), 100, "Should have 100 active pieces"); + + // Test pagination through the entire set + uint256 totalRetrieved = 0; + uint256 offset = 0; + uint256 pageSize = 20; + + while (offset < 100) { + (Cids.Cid[] memory pieces, uint256[] memory ids, uint256[] memory sizes, bool hasMore) = + pdpVerifier.getActivePieces(setId, offset, pageSize); + + if (offset + pageSize < 100) { + assertEq(hasMore, true, "Should have more pages"); + assertEq(pieces.length, pageSize, "Should return full page"); + } else { + assertEq(hasMore, false, "Should not have more pages"); + assertEq(pieces.length, 100 - offset, "Should return remaining pieces"); + } + + // Verify IDs are sequential + for (uint256 i = 0; i < pieces.length; i++) { + assertEq(ids[i], offset + i, "IDs should be sequential"); + assertEq(sizes[i], 1024 * (offset + i + 1), "Sizes should match pattern"); + } + + totalRetrieved += pieces.length; + offset += pageSize; + } + + assertEq(totalRetrieved, 100, "Should have retrieved all 100 pieces"); + } +} + +// TestingRecordKeeperService is a PDPListener that allows any amount of proof challenges +// to help with more flexible testing. +contract TestingRecordKeeperService is PDPListener, PDPRecordKeeper { + // Implement the new storageProviderChanged hook + /// @notice Called when data set storage provider role is changed in PDPVerifier. + function storageProviderChanged(uint256, address, address, bytes calldata) external override {} + + function dataSetCreated(uint256 dataSetId, address creator, bytes calldata) external override { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.CREATE, abi.encode(creator)); + } + + function dataSetDeleted(uint256 dataSetId, uint256 deletedLeafCount, bytes calldata) external override { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.DELETE, abi.encode(deletedLeafCount)); + } + + function piecesAdded(uint256 dataSetId, uint256 firstAdded, Cids.Cid[] calldata pieceData, bytes calldata) + external + override + { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.ADD, abi.encode(firstAdded, pieceData)); + } + + function piecesScheduledRemove(uint256 dataSetId, uint256[] calldata pieceIds, bytes calldata) external override { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.REMOVE_SCHEDULED, abi.encode(pieceIds)); + } + + function possessionProven(uint256 dataSetId, uint256 challengedLeafCount, uint256 seed, uint256 challengeCount) + external + override + { + receiveDataSetEvent( + dataSetId, + PDPRecordKeeper.OperationType.PROVE_POSSESSION, + abi.encode(challengedLeafCount, seed, challengeCount) + ); + } + + function nextProvingPeriod(uint256 dataSetId, uint256 challengeEpoch, uint256 leafCount, bytes calldata) + external + override + { + receiveDataSetEvent( + dataSetId, PDPRecordKeeper.OperationType.NEXT_PROVING_PERIOD, abi.encode(challengeEpoch, leafCount) + ); + } +} + +contract SumTreeInternalTestPDPVerifier is PDPVerifier { + constructor() {} + + function getTestHeightFromIndex(uint256 index) public pure returns (uint256) { + return heightFromIndex(index); + } + + function getSumTreeCounts(uint256 setId, uint256 pieceId) public view returns (uint256) { + return sumTreeCounts[setId][pieceId]; + } +} + +contract SumTreeHeightTest is MockFVMTest { + SumTreeInternalTestPDPVerifier pdpVerifier; + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new SumTreeInternalTestPDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, 2); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = SumTreeInternalTestPDPVerifier(address(proxy)); + } + + function testHeightFromIndex() public view { + // https://oeis.org/A001511 + uint8[105] memory oeisA001511 = [ + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 5, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 6, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 5, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 7, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 5, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 6, + 1, + 2, + 1, + 3, + 1, + 2, + 1, + 4, + 1 + ]; + for (uint256 i = 0; i < 105; i++) { + assertEq( + uint256(oeisA001511[i]), + pdpVerifier.getTestHeightFromIndex(i) + 1, + "Heights from index 0 to 104 should match OEIS A001511" + ); + } + } +} + +contract SumTreeAddTest is MockFVMTest, PieceHelper { + SumTreeInternalTestPDPVerifier pdpVerifier; + TestingRecordKeeperService listener; + uint256 testSetId; + uint256 constant CHALLENGE_FINALITY_DELAY = 100; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new SumTreeInternalTestPDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = SumTreeInternalTestPDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + testSetId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + } + + function testMultiAdd() public { + uint256[] memory counts = new uint256[](8); + counts[0] = 1; + counts[1] = 2; + counts[2] = 3; + counts[3] = 5; + counts[4] = 8; + counts[5] = 13; + counts[6] = 21; + counts[7] = 34; + + Cids.Cid[] memory pieceDataArray = new Cids.Cid[](8); + + for (uint256 i = 0; i < counts.length; i++) { + pieceDataArray[i] = makeSamplePiece(counts[i]); + } + pdpVerifier.addPieces(testSetId, address(0), pieceDataArray, empty); + assertEq(pdpVerifier.getDataSetLeafCount(testSetId), 87, "Incorrect final data set leaf count"); + assertEq(pdpVerifier.getNextPieceId(testSetId), 8, "Incorrect next piece ID"); + assertEq(pdpVerifier.getSumTreeCounts(testSetId, 7), 87, "Incorrect sum tree count"); + assertEq(pdpVerifier.getPieceLeafCount(testSetId, 7), 34, "Incorrect piece leaf count"); + Cids.Cid memory expectedCid = pieceDataArray[3]; + Cids.Cid memory actualCid = pdpVerifier.getPieceCid(testSetId, 3); + assertEq(actualCid.data, expectedCid.data, "Incorrect piece CID"); + } + + function setUpTestingArray() public returns (uint256[] memory counts, uint256[] memory expectedSumTreeCounts) { + counts = new uint256[](8); + counts[0] = 200; + counts[1] = 100; + counts[2] = 1; // Remove + counts[3] = 30; + counts[4] = 50; + counts[5] = 1; // Remove + counts[6] = 400; + counts[7] = 40; + + // Correct sum tree values assuming that pieceIdsToRemove are deleted + expectedSumTreeCounts = new uint256[](8); + expectedSumTreeCounts[0] = 200; + expectedSumTreeCounts[1] = 300; + expectedSumTreeCounts[2] = 0; + expectedSumTreeCounts[3] = 330; + expectedSumTreeCounts[4] = 50; + expectedSumTreeCounts[5] = 50; + expectedSumTreeCounts[6] = 400; + expectedSumTreeCounts[7] = 820; + + uint256[] memory pieceIdsToRemove = new uint256[](2); + pieceIdsToRemove[0] = 2; + pieceIdsToRemove[1] = 5; + + // Add all + for (uint256 i = 0; i < counts.length; i++) { + Cids.Cid[] memory pieceDataArray = new Cids.Cid[](1); + pieceDataArray[0] = makeSamplePiece(counts[i]); + pdpVerifier.addPieces(testSetId, address(0), pieceDataArray, empty); + // Assert the piece was added correctly + assertEq(pdpVerifier.getPieceCid(testSetId, i).data, pieceDataArray[0].data, "Piece not added correctly"); + } + + // Delete some + // Remove pieces in batch + pdpVerifier.schedulePieceDeletions(testSetId, pieceIdsToRemove, empty); + // flush adds and removals + pdpVerifier.nextProvingPeriod(testSetId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + for (uint256 i = 0; i < pieceIdsToRemove.length; i++) { + bytes memory zeroBytes; + assertEq(pdpVerifier.getPieceCid(testSetId, pieceIdsToRemove[i]).data, zeroBytes); + assertEq(pdpVerifier.getPieceLeafCount(testSetId, pieceIdsToRemove[i]), 0, "Piece size should be 0"); + } + } + + function testSumTree() public { + (uint256[] memory counts, uint256[] memory expectedSumTreeCounts) = setUpTestingArray(); + // Assert that the sum tree count is correct + for (uint256 i = 0; i < counts.length; i++) { + assertEq(pdpVerifier.getSumTreeCounts(testSetId, i), expectedSumTreeCounts[i], "Incorrect sum tree size"); + } + + // Assert final data set leaf count + assertEq(pdpVerifier.getDataSetLeafCount(testSetId), 820, "Incorrect final data set leaf count"); + } + + function testFindPieceId() public { + setUpTestingArray(); + + // Test findPieceId for various positions + assertFindPieceAndOffset(testSetId, 0, 0, 0); + assertFindPieceAndOffset(testSetId, 199, 0, 199); + assertFindPieceAndOffset(testSetId, 200, 1, 0); + assertFindPieceAndOffset(testSetId, 299, 1, 99); + assertFindPieceAndOffset(testSetId, 300, 3, 0); + assertFindPieceAndOffset(testSetId, 329, 3, 29); + assertFindPieceAndOffset(testSetId, 330, 4, 0); + assertFindPieceAndOffset(testSetId, 379, 4, 49); + assertFindPieceAndOffset(testSetId, 380, 6, 0); + assertFindPieceAndOffset(testSetId, 779, 6, 399); + assertFindPieceAndOffset(testSetId, 780, 7, 0); + assertFindPieceAndOffset(testSetId, 819, 7, 39); + + // Test edge cases + vm.expectRevert("Leaf index out of bounds"); + uint256[] memory outOfBounds = new uint256[](1); + outOfBounds[0] = 820; + pdpVerifier.findPieceIds(testSetId, outOfBounds); + + vm.expectRevert("Leaf index out of bounds"); + outOfBounds[0] = 1000; + pdpVerifier.findPieceIds(testSetId, outOfBounds); + } + + function testBatchFindPieceId() public { + setUpTestingArray(); + uint256[] memory searchIndexes = new uint256[](12); + searchIndexes[0] = 0; + searchIndexes[1] = 199; + searchIndexes[2] = 200; + searchIndexes[3] = 299; + searchIndexes[4] = 300; + searchIndexes[5] = 329; + searchIndexes[6] = 330; + searchIndexes[7] = 379; + searchIndexes[8] = 380; + searchIndexes[9] = 779; + searchIndexes[10] = 780; + searchIndexes[11] = 819; + + uint256[] memory expectedPieces = new uint256[](12); + expectedPieces[0] = 0; + expectedPieces[1] = 0; + expectedPieces[2] = 1; + expectedPieces[3] = 1; + expectedPieces[4] = 3; + expectedPieces[5] = 3; + expectedPieces[6] = 4; + expectedPieces[7] = 4; + expectedPieces[8] = 6; + expectedPieces[9] = 6; + expectedPieces[10] = 7; + expectedPieces[11] = 7; + + uint256[] memory expectedOffsets = new uint256[](12); + expectedOffsets[0] = 0; + expectedOffsets[1] = 199; + expectedOffsets[2] = 0; + expectedOffsets[3] = 99; + expectedOffsets[4] = 0; + expectedOffsets[5] = 29; + expectedOffsets[6] = 0; + expectedOffsets[7] = 49; + expectedOffsets[8] = 0; + expectedOffsets[9] = 399; + expectedOffsets[10] = 0; + expectedOffsets[11] = 39; + + assertFindPiecesAndOffsets(testSetId, searchIndexes, expectedPieces, expectedOffsets); + } + + error TestingFindError(uint256 expected, uint256 actual, string msg); + + function assertFindPieceAndOffset(uint256 setId, uint256 searchIndex, uint256 expectPieceId, uint256 expectOffset) + internal + view + { + uint256[] memory searchIndices = new uint256[](1); + searchIndices[0] = searchIndex; + IPDPTypes.PieceIdAndOffset[] memory result = pdpVerifier.findPieceIds(setId, searchIndices); + if (result[0].pieceId != expectPieceId) { + revert TestingFindError(expectPieceId, result[0].pieceId, "unexpected piece"); + } + if (result[0].offset != expectOffset) { + revert TestingFindError(expectOffset, result[0].offset, "unexpected offset"); + } + } + + // The batched version of assertFindPieceAndOffset + function assertFindPiecesAndOffsets( + uint256 setId, + uint256[] memory searchIndices, + uint256[] memory expectPieceIds, + uint256[] memory expectOffsets + ) internal view { + IPDPTypes.PieceIdAndOffset[] memory result = pdpVerifier.findPieceIds(setId, searchIndices); + for (uint256 i = 0; i < searchIndices.length; i++) { + assertEq(result[i].pieceId, expectPieceIds[i], "unexpected piece"); + assertEq(result[i].offset, expectOffsets[i], "unexpected offset"); + } + } + + function testFindPieceIdTraverseOffTheEdgeAndBack() public { + uint256[] memory sizes = new uint256[](5); + sizes[0] = 1; // Remove + sizes[1] = 1; // Remove + sizes[2] = 1; // Remove + sizes[3] = 1; + sizes[4] = 1; + + uint256[] memory pieceIdsToRemove = new uint256[](3); + pieceIdsToRemove[0] = 0; + pieceIdsToRemove[1] = 1; + pieceIdsToRemove[2] = 2; + + for (uint256 i = 0; i < sizes.length; i++) { + Cids.Cid[] memory pieceDataArray = new Cids.Cid[](1); + pieceDataArray[0] = makeSamplePiece(sizes[i]); + pdpVerifier.addPieces(testSetId, address(0), pieceDataArray, empty); + } + pdpVerifier.schedulePieceDeletions(testSetId, pieceIdsToRemove, empty); + pdpVerifier.nextProvingPeriod(testSetId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); //flush removals + + assertFindPieceAndOffset(testSetId, 0, 3, 0); + assertFindPieceAndOffset(testSetId, 1, 4, 0); + } +} + +contract BadListener is PDPListener { + PDPRecordKeeper.OperationType public badOperation; + + function setBadOperation(PDPRecordKeeper.OperationType operationType) external { + badOperation = operationType; + } + + function storageProviderChanged(uint256, address, address, bytes calldata) external override {} + + function dataSetCreated(uint256 dataSetId, address creator, bytes calldata) external view override { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.CREATE, abi.encode(creator)); + } + + function dataSetDeleted(uint256 dataSetId, uint256 deletedLeafCount, bytes calldata) external view override { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.DELETE, abi.encode(deletedLeafCount)); + } + + function piecesAdded(uint256 dataSetId, uint256 firstAdded, Cids.Cid[] calldata pieceData, bytes calldata) + external + view + override + { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.ADD, abi.encode(firstAdded, pieceData)); + } + + function piecesScheduledRemove(uint256 dataSetId, uint256[] calldata pieceIds, bytes calldata) + external + view + override + { + receiveDataSetEvent(dataSetId, PDPRecordKeeper.OperationType.REMOVE_SCHEDULED, abi.encode(pieceIds)); + } + + function possessionProven(uint256 dataSetId, uint256 challengedLeafCount, uint256 seed, uint256 challengeCount) + external + view + override + { + receiveDataSetEvent( + dataSetId, + PDPRecordKeeper.OperationType.PROVE_POSSESSION, + abi.encode(challengedLeafCount, seed, challengeCount) + ); + } + + function nextProvingPeriod(uint256 dataSetId, uint256 challengeEpoch, uint256 leafCount, bytes calldata) + external + view + override + { + receiveDataSetEvent( + dataSetId, PDPRecordKeeper.OperationType.NEXT_PROVING_PERIOD, abi.encode(challengeEpoch, leafCount) + ); + } + + function receiveDataSetEvent(uint256, PDPRecordKeeper.OperationType operationType, bytes memory) internal view { + if (operationType == badOperation) { + revert("Failing operation"); + } + } +} + +contract PDPListenerIntegrationTest is MockFVMTest, PieceHelper { + PDPVerifier pdpVerifier; + BadListener badListener; + uint256 constant CHALLENGE_FINALITY_DELAY = 2; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + badListener = new BadListener(); + } + + function testListenerPropagatesErrors() public { + badListener.setBadOperation(PDPRecordKeeper.OperationType.CREATE); + vm.expectRevert("Failing operation"); + pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(badListener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.NONE); + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(badListener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.ADD); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(1); + vm.expectRevert("Failing operation"); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.NONE); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.REMOVE_SCHEDULED); + uint256[] memory pieceIds = new uint256[](1); + pieceIds[0] = 0; + vm.expectRevert("Failing operation"); + pdpVerifier.schedulePieceDeletions(setId, pieceIds, empty); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.NONE); + pdpVerifier.schedulePieceDeletions(setId, pieceIds, empty); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.NEXT_PROVING_PERIOD); + vm.expectRevert("Failing operation"); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + badListener.setBadOperation(PDPRecordKeeper.OperationType.NONE); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + } +} + +contract ExtraDataListener is PDPListener { + mapping(uint256 => mapping(PDPRecordKeeper.OperationType => bytes)) public extraDataBySetId; + + function storageProviderChanged(uint256, address, address, bytes calldata) external override {} + + function dataSetCreated(uint256 dataSetId, address, bytes calldata extraData) external override { + extraDataBySetId[dataSetId][PDPRecordKeeper.OperationType.CREATE] = extraData; + } + + function dataSetDeleted(uint256 dataSetId, uint256, bytes calldata extraData) external override { + extraDataBySetId[dataSetId][PDPRecordKeeper.OperationType.DELETE] = extraData; + } + + function piecesAdded(uint256 dataSetId, uint256, Cids.Cid[] calldata, bytes calldata extraData) external override { + extraDataBySetId[dataSetId][PDPRecordKeeper.OperationType.ADD] = extraData; + } + + function piecesScheduledRemove(uint256 dataSetId, uint256[] calldata, bytes calldata extraData) external override { + extraDataBySetId[dataSetId][PDPRecordKeeper.OperationType.REMOVE_SCHEDULED] = extraData; + } + + function possessionProven(uint256, uint256, uint256, uint256) external override {} + + function nextProvingPeriod(uint256 dataSetId, uint256, uint256, bytes calldata extraData) external override { + extraDataBySetId[dataSetId][PDPRecordKeeper.OperationType.NEXT_PROVING_PERIOD] = extraData; + } + + function getExtraData(uint256 dataSetId, PDPRecordKeeper.OperationType opType) + external + view + returns (bytes memory) + { + return extraDataBySetId[dataSetId][opType]; + } +} + +contract PDPVerifierExtraDataTest is MockFVMTest, PieceHelper { + PDPVerifier pdpVerifier; + ExtraDataListener extraDataListener; + uint256 constant CHALLENGE_FINALITY_DELAY = 2; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + extraDataListener = new ExtraDataListener(); + } + + function testExtraDataPropagation() public { + // Test CREATE operation + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(extraDataListener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + assertEq( + extraDataListener.getExtraData(setId, PDPRecordKeeper.OperationType.CREATE), + empty, + "Extra data not propagated for CREATE" + ); + + // Test ADD operation + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(1); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + assertEq( + extraDataListener.getExtraData(setId, PDPRecordKeeper.OperationType.ADD), + empty, + "Extra data not propagated for ADD" + ); + + // Test REMOVE_SCHEDULED operation + uint256[] memory pieceIds = new uint256[](1); + pieceIds[0] = 0; + pdpVerifier.schedulePieceDeletions(setId, pieceIds, empty); + assertEq( + extraDataListener.getExtraData(setId, PDPRecordKeeper.OperationType.REMOVE_SCHEDULED), + empty, + "Extra data not propagated for REMOVE_SCHEDULED" + ); + + // Test NEXT_PROVING_PERIOD operation + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + assertEq( + extraDataListener.getExtraData(setId, PDPRecordKeeper.OperationType.NEXT_PROVING_PERIOD), + empty, + "Extra data not propagated for NEXT_PROVING_PERIOD" + ); + } +} + +contract PDPVerifierE2ETest is MockFVMTest, ProofBuilderHelper, PieceHelper { + PDPVerifier pdpVerifier; + TestingRecordKeeperService listener; + uint256 constant CHALLENGE_FINALITY_DELAY = 2; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + vm.fee(1 gwei); + vm.deal(address(pdpVerifierImpl), 100 ether); + } + + receive() external payable {} + + function testCompleteProvingPeriodE2E() public { + // Step 1: Create a data set + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + + // Step 2: Add data `A` in scope for the first proving period + // Note that the data in the first addPieces call is added to the first proving period + uint256[] memory leafCountsA = new uint256[](2); + leafCountsA[0] = 2; + leafCountsA[1] = 3; + bytes32[][][] memory treesA = new bytes32[][][](2); + for (uint256 i = 0; i < leafCountsA.length; i++) { + treesA[i] = ProofUtil.makeTree(leafCountsA[i]); + } + + Cids.Cid[] memory piecesProofPeriod1 = new Cids.Cid[](2); + piecesProofPeriod1[0] = makePiece(treesA[0], leafCountsA[0]); + piecesProofPeriod1[1] = makePiece(treesA[1], leafCountsA[1]); + pdpVerifier.addPieces(setId, address(0), piecesProofPeriod1, empty); + // flush the original addPieces call + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + uint256 challengeRangeProofPeriod1 = pdpVerifier.getChallengeRange(setId); + assertEq( + challengeRangeProofPeriod1, + pdpVerifier.getDataSetLeafCount(setId), + "Last challenged leaf should be total leaf count - 1" + ); + + // Step 3: Now that first challenge is set for sampling add more data `B` only in scope for the second proving period + uint256[] memory leafCountsB = new uint256[](2); + leafCountsB[0] = 4; + leafCountsB[1] = 5; + bytes32[][][] memory treesB = new bytes32[][][](2); + for (uint256 i = 0; i < leafCountsB.length; i++) { + treesB[i] = ProofUtil.makeTree(leafCountsB[i]); + } + + Cids.Cid[] memory piecesProvingPeriod2 = new Cids.Cid[](2); + piecesProvingPeriod2[0] = makePiece(treesB[0], leafCountsB[0]); + piecesProvingPeriod2[1] = makePiece(treesB[1], leafCountsB[1]); + pdpVerifier.addPieces(setId, address(0), piecesProvingPeriod2, empty); + + assertEq( + pdpVerifier.getPieceLeafCount(setId, 0), + leafCountsA[0], + "sanity check: First piece leaf count should be correct" + ); + assertEq(pdpVerifier.getPieceLeafCount(setId, 1), leafCountsA[1], "Second piece leaf count should be correct"); + assertEq(pdpVerifier.getPieceLeafCount(setId, 2), leafCountsB[0], "Third piece leaf count should be correct"); + assertEq(pdpVerifier.getPieceLeafCount(setId, 3), leafCountsB[1], "Fourth piece leaf count should be correct"); + + // CHECK: last challenged leaf doesn't move + assertEq( + pdpVerifier.getChallengeRange(setId), challengeRangeProofPeriod1, "Last challenged leaf should not move" + ); + assertEq( + pdpVerifier.getDataSetLeafCount(setId), + leafCountsA[0] + leafCountsA[1] + leafCountsB[0] + leafCountsB[1], + "Leaf count should only include non-removed pieces" + ); + + // Step 5: schedule removal of first + second proving period data + uint256[] memory piecesToRemove = new uint256[](2); + piecesToRemove[0] = 1; // Remove the second piece from first proving period + piecesToRemove[1] = 3; // Remove the second piece from second proving period + pdpVerifier.schedulePieceDeletions(setId, piecesToRemove, empty); + assertEq( + pdpVerifier.getScheduledRemovals(setId), piecesToRemove, "Scheduled removals should match piecesToRemove" + ); + + // Step 7: complete proving period 1. + // Advance chain until challenge epoch. + vm.roll(pdpVerifier.getNextChallengeEpoch(setId)); + // Prepare proofs. + // Proving trees for ProofPeriod1 are just treesA + IPDPTypes.Proof[] memory proofsProofPeriod1 = buildProofs(pdpVerifier, setId, 5, treesA, leafCountsA); + + RANDOMNESS_PRECOMPILE.mockBeaconRandomness( + pdpVerifier.getNextChallengeEpoch(setId), pdpVerifier.getNextChallengeEpoch(setId) + ); + + pdpVerifier.provePossession{value: 1e18}(setId, proofsProofPeriod1); + + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + // CHECK: leaf counts + assertEq( + pdpVerifier.getPieceLeafCount(setId, 0), + leafCountsA[0], + "First piece leaf count should be the set leaf count" + ); + assertEq(pdpVerifier.getPieceLeafCount(setId, 1), 0, "Second piece leaf count should be zeroed after removal"); + assertEq( + pdpVerifier.getPieceLeafCount(setId, 2), + leafCountsB[0], + "Third piece leaf count should be the set leaf count" + ); + assertEq(pdpVerifier.getPieceLeafCount(setId, 3), 0, "Fourth piece leaf count should be zeroed after removal"); + assertEq( + pdpVerifier.getDataSetLeafCount(setId), + leafCountsA[0] + leafCountsB[0], + "Leaf count should == size of non-removed pieces" + ); + assertEq( + pdpVerifier.getChallengeRange(setId), + leafCountsA[0] + leafCountsB[0], + "Last challenged leaf should be total leaf count" + ); + + // CHECK: scheduled removals are processed + assertEq(pdpVerifier.getScheduledRemovals(setId), new uint256[](0), "Scheduled removals should be processed"); + + // CHECK: the next challenge epoch has been updated + assertEq( + pdpVerifier.getNextChallengeEpoch(setId), + vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, + "Next challenge epoch should be updated" + ); + } +} + +contract PDPVerifierMigrateTest is Test { + PDPVerifier implementation; + PDPVerifier newImplementation; + MyERC1967Proxy proxy; + + function setUp() public { + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, 2); + implementation = new PDPVerifier(); + newImplementation = new PDPVerifier(); + proxy = new MyERC1967Proxy(address(implementation), initializeData); + } + + function testMigrate() public { + vm.expectEmit(true, true, true, true); + emit IPDPEvents.ContractUpgraded(newImplementation.VERSION(), address(newImplementation)); + bytes memory migrationCall = abi.encodeWithSelector(PDPVerifier.migrate.selector); + UUPSUpgradeable(address(proxy)).upgradeToAndCall(address(newImplementation), migrationCall); + // Second call should fail because reinitializer(2) can only be called once + vm.expectRevert("InvalidInitialization()"); + UUPSUpgradeable(address(proxy)).upgradeToAndCall(address(newImplementation), migrationCall); + } +} + +contract PDPVerifierFeeTest is MockFVMTest, PieceHelper, ProofBuilderHelper { + PDPVerifier pdpVerifier; + uint256 constant CHALLENGE_FINALITY_DELAY = 2; + bytes empty = new bytes(0); + TestingRecordKeeperService listener; + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + vm.fee(1 gwei); + listener = new TestingRecordKeeperService(); + } + + receive() external payable {} + + function testUpdateProofFeeWithDelayAutoApply() public { + uint256 current = pdpVerifier.feePerTiB(); + uint256 newFee = current + 1; + + // Propose update and verify state + pdpVerifier.updateProofFee(newFee); + assertEq(pdpVerifier.proposedFeePerTiB(), newFee, "proposed fee not recorded"); + uint256 eff = pdpVerifier.feeEffectiveTime(); + assertGt(eff, block.timestamp, "effective time must be in future"); + + // Warp to just before and ensure it hasn't taken effect yet + vm.warp(eff - 1); + assertEq(pdpVerifier.feePerTiB(), current, "fee should not change before effective time"); + + // Warp to effective time; fee auto-applies on read paths + vm.warp(eff); + assertEq(pdpVerifier.feePerTiB(), newFee, "feePerTiB not updated"); + } + + function testCalculateProofFeeForSizeBeforeAfterEffectiveTime() public { + // Use 1 TiB raw size + uint256 rawSize = PDPFees.TIB_IN_BYTES; + uint256 baseFee = pdpVerifier.calculateProofFeeForSize(rawSize); + + // Propose higher fee and check value remains the same before effective time + uint256 newFeePerTiB = pdpVerifier.feePerTiB() + 10; + pdpVerifier.updateProofFee(newFeePerTiB); + uint256 eff = pdpVerifier.feeEffectiveTime(); + + uint256 beforeFee = pdpVerifier.calculateProofFeeForSize(rawSize); + assertEq(beforeFee, baseFee, "fee should not change before effective time"); + + // After effective time the new fee should be used automatically + vm.warp(eff); + uint256 afterFee = pdpVerifier.calculateProofFeeForSize(rawSize); + assertEq(afterFee, (newFeePerTiB * rawSize) / PDPFees.TIB_IN_BYTES, "fee should reflect effective value"); + } + + function testProvePossessionBurnsExpectedFeeAndRefunds() public { + // Create set and add one small piece (leaf = 32 bytes per leaf) + // Build a concrete tree and piece so proof generation matches the piece + uint256 leafCount = 10; // 10 leaves + bytes32[][] memory tree = ProofUtil.makeTree(leafCount); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makePiece(tree, leafCount); + + bytes memory combinedExtra = abi.encode(empty, empty); + + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), pieces, combinedExtra + ); + + // Start proving period so piece becomes challengeable + uint256 challengeEpoch = vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY; + pdpVerifier.nextProvingPeriod(setId, challengeEpoch, empty); + + // Roll to challenge epoch and mock randomness precompile to return epoch + vm.roll(challengeEpoch); + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + + // Build minimum valid proofs (3 challenges) + bytes32[][][] memory trees = new bytes32[][][](1); + trees[0] = tree; + uint256[] memory leafCounts = new uint256[](1); + leafCounts[0] = leafCount; + IPDPTypes.Proof[] memory proofs = buildProofs(pdpVerifier, setId, 3, trees, leafCounts); + + // Expected fee = feePerTiB * rawSize/TiB, where rawSize = 32 * challengeRange + uint256 rawSize = 32 * pdpVerifier.getChallengeRange(setId); + uint256 expectedFee = (pdpVerifier.feePerTiB() * rawSize) / PDPFees.TIB_IN_BYTES; + + // Send a known amount and verify refund equals msg.value - expectedFee + address sender = address(this); + uint256 startBalance = sender.balance; + uint256 sendValue = expectedFee + 1 ether; + + // fund test contract + vm.deal(sender, startBalance + sendValue); + + uint256 balBefore = sender.balance; + pdpVerifier.provePossession{value: sendValue}(setId, proofs); + uint256 balAfter = sender.balance; + + // net spent should be expectedFee + assertEq(balBefore - balAfter, expectedFee, "net spent should equal expected fee"); + } +} + +contract MockStorageProviderChangedListener is PDPListener { + uint256 public lastDataSetId; + address public lastOldStorageProvider; + address public lastNewStorageProvider; + bytes public lastExtraData; + bool public shouldRevert; + + function setShouldRevert(bool value) external { + shouldRevert = value; + } + + function storageProviderChanged( + uint256 dataSetId, + address oldStorageProvider, + address newStorageProvider, + bytes calldata extraData + ) external override { + if (shouldRevert) revert("MockStorageProviderChangedListener: forced revert"); + lastDataSetId = dataSetId; + lastOldStorageProvider = oldStorageProvider; + lastNewStorageProvider = newStorageProvider; + lastExtraData = extraData; + } + + function dataSetCreated(uint256, address, bytes calldata) external override {} + function dataSetDeleted(uint256, uint256, bytes calldata) external override {} + function piecesAdded(uint256, uint256, Cids.Cid[] calldata, bytes calldata) external override {} + function piecesScheduledRemove(uint256, uint256[] calldata, bytes calldata) external override {} + function possessionProven(uint256, uint256, uint256, uint256) external override {} + function nextProvingPeriod(uint256, uint256, uint256, bytes calldata) external override {} +} + +contract PDPVerifierStorageProviderListenerTest is MockFVMTest { + PDPVerifier pdpVerifier; + MockStorageProviderChangedListener listener; + address public storageProvider; + address public nextStorageProvider; + address public nonStorageProvider; + bytes empty = new bytes(0); + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, 2); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new MockStorageProviderChangedListener(); + storageProvider = address(this); + nextStorageProvider = address(0x1234); + nonStorageProvider = address(0xffff); + } + + function testStorageProviderChangedCalledOnStorageProviderTransfer() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + pdpVerifier.proposeDataSetStorageProvider(setId, nextStorageProvider); + vm.prank(nextStorageProvider); + pdpVerifier.claimDataSetStorageProvider(setId, empty); + assertEq(listener.lastDataSetId(), setId, "Data set ID mismatch"); + assertEq(listener.lastOldStorageProvider(), storageProvider, "Old storage provider mismatch"); + assertEq(listener.lastNewStorageProvider(), nextStorageProvider, "New storage provider mismatch"); + } + + function testListenerRevertDoesNotRevertMainTx() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + pdpVerifier.proposeDataSetStorageProvider(setId, nextStorageProvider); + listener.setShouldRevert(true); + vm.prank(nextStorageProvider); + vm.expectRevert("MockStorageProviderChangedListener: forced revert"); + pdpVerifier.claimDataSetStorageProvider(setId, empty); + } +} diff --git a/packages/pdp/test/PDPVerifierProofTest.t.sol b/packages/pdp/test/PDPVerifierProofTest.t.sol new file mode 100644 index 00000000..c9df30d7 --- /dev/null +++ b/packages/pdp/test/PDPVerifierProofTest.t.sol @@ -0,0 +1,452 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {MockFVMTest} from "fvm-solidity/mocks/MockFVMTest.sol"; +import {Cids} from "../src/Cids.sol"; +import {PDPVerifier} from "../src/PDPVerifier.sol"; +import {MyERC1967Proxy} from "../src/ERC1967Proxy.sol"; +import {ProofUtil} from "./ProofUtil.sol"; +import {PDPFees} from "../src/Fees.sol"; +import {IPDPTypes} from "../src/interfaces/IPDPTypes.sol"; +import {IPDPEvents} from "../src/interfaces/IPDPEvents.sol"; +import {PieceHelper} from "./PieceHelper.t.sol"; +import {ProofBuilderHelper} from "./ProofBuilderHelper.t.sol"; +import {TestingRecordKeeperService} from "./PDPVerifier.t.sol"; +import {NEW_DATA_SET_SENTINEL} from "../src/PDPVerifier.sol"; + +contract PDPVerifierProofTest is MockFVMTest, ProofBuilderHelper, PieceHelper { + uint256 constant CHALLENGE_FINALITY_DELAY = 2; + bytes empty = new bytes(0); + PDPVerifier pdpVerifier; + TestingRecordKeeperService listener; + + function setUp() public override { + super.setUp(); + PDPVerifier pdpVerifierImpl = new PDPVerifier(); + bytes memory initializeData = abi.encodeWithSelector(PDPVerifier.initialize.selector, CHALLENGE_FINALITY_DELAY); + MyERC1967Proxy proxy = new MyERC1967Proxy(address(pdpVerifierImpl), initializeData); + pdpVerifier = PDPVerifier(address(proxy)); + listener = new TestingRecordKeeperService(); + vm.fee(1 wei); + vm.deal(address(pdpVerifierImpl), 100 ether); + } + + function testProveSinglePiece() public { + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Advance chain until challenge epoch. + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + + // Build a proof with multiple challenges to single tree. + uint256 challengeCount = 3; + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, challengeCount, tree, leafCount); + + // Submit proof. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + vm.expectEmit(true, true, false, false); + IPDPTypes.PieceIdAndOffset[] memory challenges = new IPDPTypes.PieceIdAndOffset[](challengeCount); + for (uint256 i = 0; i < challengeCount; i++) { + challenges[i] = IPDPTypes.PieceIdAndOffset(0, 0); + } + emit IPDPEvents.PossessionProven(setId, challenges); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + + // Verify the next challenge is in a subsequent epoch. + // Next challenge unchanged by prove + assertEq(pdpVerifier.getNextChallengeEpoch(setId), challengeEpoch); + + // Verify the next challenge is in a subsequent epoch after nextProvingPeriod + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + assertEq(pdpVerifier.getNextChallengeEpoch(setId), vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY); + } + + receive() external payable {} + + event Debug(string message, uint256 value); + + function testProveWithDifferentFeeAmounts() public { + vm.fee(0 gwei); + + address sender = makeAddr("sender"); + vm.deal(sender, 1000 ether); + vm.startPrank(sender); + + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Advance chain until challenge epoch. + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + + // Build a proof with multiple challenges to single tree. + uint256 challengeCount = 3; + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, challengeCount, tree, leafCount); + + // Mock block.number to 2881 + vm.roll(2881); + + // Determine the correct fee. + uint256 correctFee; + { + uint256 snapshotId = vm.snapshotState(); + uint256 balanceBefore = sender.balance; + pdpVerifier.provePossession{value: sender.balance}(setId, proofs); + uint256 balanceAfter = sender.balance; + correctFee = balanceBefore - balanceAfter; + vm.revertToStateAndDelete(snapshotId); + } + + // Test 1: Sending less than the required fee + vm.expectRevert("Incorrect fee amount"); + pdpVerifier.provePossession{value: correctFee - 1}(setId, proofs); + + // Test 2: Sending more than the required fee + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + pdpVerifier.provePossession{value: correctFee + 1}(setId, proofs); + + // Verify that the proof was accepted + assertEq( + pdpVerifier.getNextChallengeEpoch(setId), + challengeEpoch, + "Next challenge epoch should remain unchanged after prove" + ); + } + + function testDataSetLastProvenEpochOnPieceRemoval() public { + // Create a data set and verify initial lastProvenEpoch is 0 + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + assertEq(pdpVerifier.getDataSetLastProvenEpoch(setId), 0, "Initial lastProvenEpoch should be 0"); + + // Mock block.number to 2881 + uint256 blockNumber = 2881; + vm.roll(blockNumber); + // Add a piece and verify lastProvenEpoch is set to current block number + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + + pdpVerifier.addPieces(setId, address(0), pieces, empty); + pdpVerifier.nextProvingPeriod(setId, blockNumber + CHALLENGE_FINALITY_DELAY, empty); + assertEq( + pdpVerifier.getDataSetLastProvenEpoch(setId), + blockNumber, + "lastProvenEpoch should be set to block.number after first proving period piece" + ); + + // Schedule piece removal + uint256[] memory piecesToRemove = new uint256[](1); + piecesToRemove[0] = 0; + pdpVerifier.schedulePieceDeletions(setId, piecesToRemove, empty); + + // Call nextProvingPeriod and verify lastProvenEpoch is reset to 0 + pdpVerifier.nextProvingPeriod(setId, blockNumber + CHALLENGE_FINALITY_DELAY, empty); + assertEq( + pdpVerifier.getDataSetLastProvenEpoch(setId), + 0, + "lastProvenEpoch should be reset to 0 after removing last piece" + ); + } + + function testLateProofAccepted() public { + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Advance chain short of challenge epoch + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch + 100); + + // Build a proof. + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, 3, tree, leafCount); + + // Submit proof. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + function testProvePossesionSmall() public { + uint256 leafCount = 3; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Advance chain short of challenge epoch + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + + // Build a proof. + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, 3, tree, leafCount); + + // Submit proof. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + function testEarlyProofRejected() public { + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Advance chain short of challenge epoch + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch - 1); + + // Build a proof. + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, 3, tree, leafCount); + + // Submit proof. + vm.expectRevert(); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + function testProvePossessionFailsWithNoScheduledChallenge() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makeSamplePiece(2); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + + // Don't sample challenge (i.e. call nextProvingPeriod) + + // Create a dummy proof + IPDPTypes.Proof[] memory proofs = new IPDPTypes.Proof[](1); + proofs[0].leaf = bytes32(0); + proofs[0].proof = new bytes32[](1); + proofs[0].proof[0] = bytes32(0); + + // Try to prove possession without scheduling a challenge + // This should fail because nextChallengeEpoch is still NO_CHALLENGE_SCHEDULED (0) + vm.expectRevert("no challenge scheduled"); + pdpVerifier.provePossession{value: 1 ether}(setId, proofs); + } + + function testEmptyProofRejected() public { + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + IPDPTypes.Proof[] memory emptyProof = new IPDPTypes.Proof[](0); + + // Rejected with no pieces + vm.expectRevert(); + pdpVerifier.provePossession{value: 1e18}(setId, emptyProof); + + addOnePiece(setId, 10); + + // Rejected with a piece + vm.expectRevert(); + pdpVerifier.provePossession{value: 1e18}(setId, emptyProof); + } + + function testBadChallengeRejected() public { + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Make a proof that's good for this challenge epoch. + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, 3, tree, leafCount); + + // Submit proof successfully, advancing the data set to a new challenge epoch. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); // resample + + uint256 nextChallengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + assertNotEq(nextChallengeEpoch, challengeEpoch); + vm.roll(nextChallengeEpoch); + + // The proof for the old challenge epoch should no longer be valid. + vm.expectRevert(); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + function testBadPiecesRejected() public { + uint256[] memory leafCounts = new uint256[](2); + // Note: either co-prime leaf counts or a challenge count > 1 are required for this test to demonstrate the failing proof. + // With a challenge count == 1 and leaf counts e.g. 10 and 20 it just so happens that the first computed challenge index is the same + // (lying in the first piece) whether the tree has one or two pieces. + // This could be prevented if the challenge index calculation included some marker of data set contents, like + // a hash of all the pieces or an edit sequence number. + leafCounts[0] = 7; + leafCounts[1] = 13; + bytes32[][][] memory trees = new bytes32[][][](2); + // Make data set initially with one piece. + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCounts[0]); + trees[0] = tree; + // Add another piece before submitting the proof. + uint256 newPieceId; + (trees[1], newPieceId) = addOnePiece(setId, leafCounts[1]); + + // Make a proof that's good for the single piece. + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + IPDPTypes.Proof[] memory proofsOneRoot = buildProofsForSingleton(setId, 3, trees[0], leafCounts[0]); + + // The proof for one piece should be invalid against the set with two. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + vm.expectRevert(); + pdpVerifier.provePossession{value: 1e18}(setId, proofsOneRoot); + + // Remove a piece and resample + uint256[] memory removePieces = new uint256[](1); + removePieces[0] = newPieceId; + pdpVerifier.schedulePieceDeletions(setId, removePieces, empty); + // flush removes + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); + + // Make a new proof that is valid with two pieces + challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + IPDPTypes.Proof[] memory proofsTwoRoots = buildProofs(pdpVerifier, setId, 10, trees, leafCounts); + + // A proof for two pieces should be invalid against the set with one. + proofsTwoRoots = buildProofs(pdpVerifier, setId, 10, trees, leafCounts); // regen as removal forced resampling challenge seed + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + vm.expectRevert(); + pdpVerifier.provePossession{value: 1e18}(setId, proofsTwoRoots); + + // But the single piece proof is now good again. + proofsOneRoot = buildProofsForSingleton(setId, 1, trees[0], leafCounts[0]); // regen as removal forced resampling challenge seed + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + pdpVerifier.provePossession{value: 1e18}(setId, proofsOneRoot); + } + + function testProveManyPieces() public { + uint256[] memory leafCounts = new uint256[](3); + // Pick a distinct size for each tree (up to some small maximum size). + for (uint256 i = 0; i < leafCounts.length; i++) { + leafCounts[i] = uint256(sha256(abi.encode(i))) % 64; + } + + (uint256 setId, bytes32[][][] memory trees) = makeDataSetWithPieces(leafCounts); + + // Advance chain until challenge epoch. + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + + // Build a proof with multiple challenges to span the pieces. + uint256 challengeCount = 11; + IPDPTypes.Proof[] memory proofs = buildProofs(pdpVerifier, setId, challengeCount, trees, leafCounts); + // Submit proof. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + function testNextProvingPeriodFlexibleScheduling() public { + // Create data set and add initial piece + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Set challenge sampling far in the future + uint256 farFutureBlock = vm.getBlockNumber() + 1000; + pdpVerifier.nextProvingPeriod(setId, farFutureBlock, empty); + assertEq( + pdpVerifier.getNextChallengeEpoch(setId), farFutureBlock, "Challenge epoch should be set to far future" + ); + + // Reset to a closer block + uint256 nearerBlock = vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY; + pdpVerifier.nextProvingPeriod(setId, nearerBlock, empty); + assertEq( + pdpVerifier.getNextChallengeEpoch(setId), nearerBlock, "Challenge epoch should be reset to nearer block" + ); + + // Verify we can still prove possession at the new block + vm.roll(nearerBlock); + + IPDPTypes.Proof[] memory proofs = buildProofsForSingleton(setId, 5, tree, 10); + RANDOMNESS_PRECOMPILE.mockBeaconRandomness( + pdpVerifier.getNextChallengeEpoch(setId), pdpVerifier.getNextChallengeEpoch(setId) + ); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + function testProveSingleFake() public { + uint256 leafCount = 10; + (uint256 setId, bytes32[][] memory tree) = makeDataSetWithOnePiece(leafCount); + + // Advance chain until challenge epoch. + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + vm.roll(challengeEpoch); + + uint256 challengeCount = 3; + // build fake proofs + IPDPTypes.Proof[] memory proofs = new IPDPTypes.Proof[](5); + for (uint256 i = 0; i < 5; i++) { + proofs[i] = IPDPTypes.Proof(tree[0][0], new bytes32[](0)); + } + + // Submit proof. + RANDOMNESS_PRECOMPILE.mockBeaconRandomness(challengeEpoch, challengeEpoch); + IPDPTypes.PieceIdAndOffset[] memory challenges = new IPDPTypes.PieceIdAndOffset[](challengeCount); + for (uint256 i = 0; i < challengeCount; i++) { + challenges[i] = IPDPTypes.PieceIdAndOffset(0, 0); + } + vm.expectRevert("proof length does not match tree height"); + pdpVerifier.provePossession{value: 1e18}(setId, proofs); + } + + ///// Helpers ///// + + // Initializes a new data set, generates trees of specified sizes, and adds pieces to the set. + function makeDataSetWithPieces(uint256[] memory leafCounts) internal returns (uint256, bytes32[][][] memory) { + // Create trees and their pieces. + bytes32[][][] memory trees = new bytes32[][][](leafCounts.length); + Cids.Cid[] memory pieces = new Cids.Cid[](leafCounts.length); + for (uint256 i = 0; i < leafCounts.length; i++) { + // Generate a uniquely-sized tree for each piece (up to some small maximum size). + if (leafCounts[i] < 4) { + trees[i] = ProofUtil.makeTree(4); + pieces[i] = makePieceBytes(trees[i], leafCounts[i] * 32); + } else { + trees[i] = ProofUtil.makeTree(leafCounts[i]); + pieces[i] = makePiece(trees[i], leafCounts[i]); + } + } + + // Create new data set and add pieces. + uint256 setId = pdpVerifier.addPieces{value: PDPFees.sybilFee()}( + NEW_DATA_SET_SENTINEL, address(listener), new Cids.Cid[](0), abi.encode(empty, empty) + ); + pdpVerifier.addPieces(setId, address(0), pieces, empty); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); // flush adds + return (setId, trees); + } + + // Initializes a new data set and adds a single generated tree. + function makeDataSetWithOnePiece(uint256 leafCount) internal returns (uint256, bytes32[][] memory) { + uint256[] memory leafCounts = new uint256[](1); + leafCounts[0] = leafCount; + (uint256 setId, bytes32[][][] memory trees) = makeDataSetWithPieces(leafCounts); + return (setId, trees[0]); + } + + // Creates a tree and adds it to a data set. + // Returns the Merkle tree and piece. + function addOnePiece(uint256 setId, uint256 leafCount) internal returns (bytes32[][] memory, uint256) { + bytes32[][] memory tree = ProofUtil.makeTree(leafCount); + Cids.Cid[] memory pieces = new Cids.Cid[](1); + pieces[0] = makePiece(tree, leafCount); + uint256 pieceId = pdpVerifier.addPieces(setId, address(0), pieces, empty); + pdpVerifier.nextProvingPeriod(setId, vm.getBlockNumber() + CHALLENGE_FINALITY_DELAY, empty); // flush adds + return (tree, pieceId); + } + + // Builds a proof of posesesion for a data set with a single piece. + function buildProofsForSingleton(uint256 setId, uint256 challengeCount, bytes32[][] memory tree, uint256 leafCount) + internal + view + returns (IPDPTypes.Proof[] memory) + { + bytes32[][][] memory trees = new bytes32[][][](1); + trees[0] = tree; + uint256[] memory leafCounts = new uint256[](1); + leafCounts[0] = leafCount; + IPDPTypes.Proof[] memory proofs = buildProofs(pdpVerifier, setId, challengeCount, trees, leafCounts); + return proofs; + } +} diff --git a/packages/pdp/test/PieceHelper.t.sol b/packages/pdp/test/PieceHelper.t.sol new file mode 100644 index 00000000..80053471 --- /dev/null +++ b/packages/pdp/test/PieceHelper.t.sol @@ -0,0 +1,115 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test, console} from "forge-std/Test.sol"; +import {Cids} from "../src/Cids.sol"; +import {BitOps} from "../src/BitOps.sol"; + +contract PieceHelper is Test { + // Constructs a PieceData structure for a Merkle tree. + function makePiece(bytes32[][] memory tree, uint256 leafCount) internal pure returns (Cids.Cid memory) { + if (leafCount == 0) { + return Cids.CommPv2FromDigest(127, 2, tree[0][0]); + } + uint8 height = uint8(256 - BitOps.clz(leafCount - 1)); + require(1 << height >= leafCount, "makePiece: height not enough to hold leaf count"); + uint256 paddingLeaves = (1 << height) - leafCount; + uint256 padding = (paddingLeaves * 32 * 127 + 127) / 128; + + console.log("leafCount", leafCount); + console.log("height", height); + console.log("paddingLeaves", paddingLeaves); + console.log("padding", padding); + assertEq(Cids.leafCount(padding, height), leafCount, "makePiece: leaf count mismatch"); + return Cids.CommPv2FromDigest(padding, height, tree[0][0]); + } + + function makePieceBytes(bytes32[][] memory tree, uint256 count) internal pure returns (Cids.Cid memory) { + if (count == 0) { + return Cids.CommPv2FromDigest(127, 2, tree[0][0]); + } + if (count == 1) { + // piece with just 1 data byte doesn't exist + // it is either 0 data bytes or two + count = 2; + } + + uint256 leafCount = (count + 31) / 32; + uint8 height = uint8(256 - BitOps.clz(leafCount - 1)); + if (height < 2) { + height = 2; + } + + require(1 << (height + 5) >= count, "makeSamplePieceBytes: height not enough to hold count"); + uint256 padding = (1 << (height + 5)) - count; + padding = (padding * 127 + 127) / 128; + + console.log("count", count); + console.log("leafCount", leafCount); + console.log("height", height); + console.log("padding", padding); + assertEq(Cids.leafCount(padding, height), leafCount, "makeSamplePieceBytes: leaf count mismatch"); + assertEq(Cids.pieceSize(padding, height), count, "makeSamplePieceBytes: piece size mismatch"); + return Cids.CommPv2FromDigest(padding, height, tree[0][0]); + } + + function makeSamplePiece(uint256 leafCount) internal pure returns (Cids.Cid memory) { + bytes32[][] memory tree = new bytes32[][](1); + tree[0] = new bytes32[](1); + tree[0][0] = bytes32(abi.encodePacked(leafCount)); + return makePiece(tree, leafCount); + } + + // count here is bytes after Fr32 padding + function makeSamplePieceBytes(uint256 count) internal pure returns (Cids.Cid memory) { + bytes32[][] memory tree = new bytes32[][](1); + tree[0] = new bytes32[](1); + tree[0][0] = bytes32(abi.encodePacked(count)); + return makePieceBytes(tree, count); + } +} + +contract PieceHelperTest is Test, PieceHelper { + function testMakePiece() public pure { + bytes32[][] memory tree = new bytes32[][](1); + tree[0] = new bytes32[](10); + Cids.Cid memory piece = makePiece(tree, 10); + Cids.validateCommPv2(piece); + } + + function testMakeSamplePiece() public pure { + makeSamplePiece(0); + Cids.Cid memory piece = makeSamplePiece(1); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(2); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(3); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(4); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(10); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(127); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(128); + Cids.validateCommPv2(piece); + piece = makeSamplePiece(1024); + Cids.validateCommPv2(piece); + } + + function testMakeSamplePieceBytes() public pure { + Cids.Cid memory piece = makeSamplePieceBytes(0); + piece = makeSamplePieceBytes(1); + Cids.validateCommPv2(piece); + piece = makeSamplePieceBytes(2); + Cids.validateCommPv2(piece); + piece = makeSamplePieceBytes(32); + Cids.validateCommPv2(piece); + piece = makeSamplePieceBytes(31); + Cids.validateCommPv2(piece); + piece = makeSamplePieceBytes(127); + Cids.validateCommPv2(piece); + piece = makeSamplePieceBytes(128); + Cids.validateCommPv2(piece); + } +} diff --git a/packages/pdp/test/ProofBuilderHelper.t.sol b/packages/pdp/test/ProofBuilderHelper.t.sol new file mode 100644 index 00000000..a0d17fbb --- /dev/null +++ b/packages/pdp/test/ProofBuilderHelper.t.sol @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test} from "forge-std/Test.sol"; +import {PDPVerifier} from "../src/PDPVerifier.sol"; +import {MerkleProve} from "../src/Proofs.sol"; +import {IPDPTypes} from "../src/interfaces/IPDPTypes.sol"; + +contract ProofBuilderHelper is Test { + // Builds a proof of possession for a data set + function buildProofs( + PDPVerifier pdpVerifier, + uint256 setId, + uint256 challengeCount, + bytes32[][][] memory trees, + uint256[] memory leafCounts + ) internal view returns (IPDPTypes.Proof[] memory) { + uint256 challengeEpoch = pdpVerifier.getNextChallengeEpoch(setId); + uint256 seed = challengeEpoch; // Seed is (temporarily) the challenge epoch + uint256 totalLeafCount = 0; + for (uint256 i = 0; i < leafCounts.length; ++i) { + totalLeafCount += leafCounts[i]; + } + + IPDPTypes.Proof[] memory proofs = new IPDPTypes.Proof[](challengeCount); + for (uint256 challengeIdx = 0; challengeIdx < challengeCount; challengeIdx++) { + // Compute challenge index + bytes memory payload = abi.encodePacked(seed, setId, uint64(challengeIdx)); + uint256 challengeOffset = uint256(keccak256(payload)) % totalLeafCount; + + uint256 treeIdx = 0; + uint256 treeOffset = 0; + for (uint256 i = 0; i < leafCounts.length; ++i) { + if (leafCounts[i] > challengeOffset) { + treeIdx = i; + treeOffset = challengeOffset; + break; + } else { + challengeOffset -= leafCounts[i]; + } + } + + bytes32[][] memory tree = trees[treeIdx]; + bytes32[] memory path = MerkleProve.buildProof(tree, treeOffset); + proofs[challengeIdx] = IPDPTypes.Proof(tree[tree.length - 1][treeOffset], path); + + // console.log("Leaf", vm.toString(proofs[0].leaf)); + // console.log("Proof"); + // for (uint j = 0; j < proofs[0].proof.length; j++) { + // console.log(vm.toString(j), vm.toString(proofs[0].proof[j])); + // } + } + + return proofs; + } +} diff --git a/packages/pdp/test/ProofUtil.sol b/packages/pdp/test/ProofUtil.sol new file mode 100644 index 00000000..aef16b8f --- /dev/null +++ b/packages/pdp/test/ProofUtil.sol @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.20; + +import {MerkleProve} from "../src/Proofs.sol"; + +// Methods for committing to data and generating proofs. +// These are only used in tests (which verify proofs). +// These functions provide a spec for the operations which providers should perform off-chain. +library ProofUtil { + /** + * Builds a Merkle tree over data that is a sequence of distinct leaf values. + */ + function makeTree(uint256 leafCount) internal view returns (bytes32[][] memory) { + bytes32[] memory data = generateLeaves(leafCount); + bytes32[][] memory tree = MerkleProve.buildTree(data); + return tree; + } + + /** + * Generates an array of leaves with distinct values. + */ + function generateLeaves(uint256 count) internal pure returns (bytes32[] memory) { + bytes32[] memory result = new bytes32[](count); + for (uint256 i = 0; i < count; i++) { + result[i] = bytes32(i); + } + return result; + } +} diff --git a/packages/pdp/test/Proofs.t.sol b/packages/pdp/test/Proofs.t.sol new file mode 100644 index 00000000..8d766309 --- /dev/null +++ b/packages/pdp/test/Proofs.t.sol @@ -0,0 +1,432 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test, console} from "forge-std/Test.sol"; +import {BitOps} from "../src/BitOps.sol"; +import {Hashes, MerkleProve, MerkleVerify} from "../src/Proofs.sol"; +import {ProofUtil} from "./ProofUtil.sol"; + +contract MerkleProveTest is Test { + function testVerifyEmptyProof() public view { + bytes32 root = sha256("hello"); + bytes32[] memory proof = new bytes32[](0); + bool result = MerkleVerify.verify(proof, root, root, 0, 1); + assertEq(result, true, "Verify should return true"); + } + + function testVerifyTreeTwoLeaves() public view { + bytes32[] memory leaves = ProofUtil.generateLeaves(2); + bytes32[][] memory tree = MerkleProve.buildTree(leaves); + bytes32 root = tree[0][0]; + + for (uint256 i = 0; i < leaves.length; i++) { + bytes32[] memory proof = MerkleProve.buildProof(tree, i); + assertTrue( + MerkleVerify.verify(proof, root, leaves[i], i, tree.length), + string.concat("Invalid proof ", vm.toString(i)) + ); + assertFalse( + MerkleVerify.verify(proof, root, leaves[i], i + 1, tree.length), + string.concat("False proof ", vm.toString(i)) + ); + } + } + + function testVerifyTreeThreeLeaves() public view { + bytes32[] memory leaves = ProofUtil.generateLeaves(3); + bytes32[][] memory tree = MerkleProve.buildTree(leaves); + bytes32 root = tree[0][0]; + + for (uint256 i = 0; i < leaves.length; i++) { + bytes32[] memory proof = MerkleProve.buildProof(tree, i); + assertTrue( + MerkleVerify.verify(proof, root, leaves[i], i, tree.length), + string.concat("Invalid proof ", vm.toString(i)) + ); + // Ensure the proof is invalid for every other index within range + for (uint256 j = 0; j < leaves.length; j++) { + if (j != i) { + assertFalse( + MerkleVerify.verify(proof, root, leaves[i], j, tree.length), + string.concat("False proof ", vm.toString(i)) + ); + } + } + } + } + + function testVerifyTreesManyLeaves() public { + bytes32[] memory leaves; + bytes32[][] memory tree; + bytes32[] memory proof; + vm.pauseGasMetering(); + for (uint256 width = 4; width < 60; width++) { + leaves = ProofUtil.generateLeaves(width); + tree = MerkleProve.buildTree(leaves); + bytes32 root = tree[0][0]; + + // Verify proof for each leaf + for (uint256 i = 0; i < leaves.length; i++) { + proof = MerkleProve.buildProof(tree, i); + assertTrue( + MerkleVerify.verify(proof, root, leaves[i], i, tree.length), + string.concat("Invalid proof ", vm.toString(i)) + ); + // Ensure the proof is invalid for every other index within range + for (uint256 j = 0; j < leaves.length; j++) { + if (j != i) { + assertFalse( + MerkleVerify.verify(proof, root, leaves[i], j, tree.length), + string.concat("False proof ", vm.toString(i)) + ); + } + } + } + } + vm.resumeGasMetering(); + } + + // Tests that the merkle root of a tree committing to known data (all zeros) matches the + // externally-known Filecoin piece commitment for the same data. + // Note that this is only testing a balanced tree (power-of-two payload). + function testFilecoinCommPEquivalance() public view { + // Known value for CommP of a 2KiB zero payload copied from built-in actors code. + uint8[32] memory zeroCommP2KiB = [ + 252, + 126, + 146, + 130, + 150, + 229, + 22, + 250, + 173, + 233, + 134, + 178, + 143, + 146, + 212, + 74, + 79, + 36, + 185, + 53, + 72, + 82, + 35, + 55, + 106, + 121, + 144, + 39, + 188, + 24, + 248, + 51 + ]; + + bytes32 expected = loadDigest(zeroCommP2KiB); + + // Build payload of of 2KiB of zeros, packed into bytes32 words + bytes32[] memory payload = new bytes32[](2048 / 32); + + bytes32[][] memory tree = MerkleProve.buildTree(payload); + assertEq(tree[0][0], expected); + } + + // Tests that the zero roots returned by the merkle library match the values computed for them here. + function testZeroRootsComputed() public view { + bytes32[] memory expected = buildZeroPaddingStack(51); + // console.log("Zero roots:"); + // for (uint i = 0; i < zeroRoots.length; i++) { + // console.log(vm.toString(i), vm.toString(zeroRoots[i])); + // } + for (uint256 height = 0; height <= 50; height++) { + assertEq(MerkleVerify.zeroRoot(height), expected[height]); + } + } + + // Tests some zero roots against known values for Filecoin sector sizes. + // The target digets are copied directly from built-in actors code. + function testZeroRootFilecoinEquivalence() public pure { + assertEq(MerkleVerify.zeroRoot(0), 0); + // 2 KiB / 32 = 64 leaves = 2^6 + assertEq( + MerkleVerify.zeroRoot(6), + loadDigest( + [ + 252, + 126, + 146, + 130, + 150, + 229, + 22, + 250, + 173, + 233, + 134, + 178, + 143, + 146, + 212, + 74, + 79, + 36, + 185, + 53, + 72, + 82, + 35, + 55, + 106, + 121, + 144, + 39, + 188, + 24, + 248, + 51 + ] + ) + ); + // 8 MiB = 256Ki leaves = 2^8 * 2^10 + assertEq( + MerkleVerify.zeroRoot(18), + loadDigest( + [ + 101, + 242, + 158, + 93, + 152, + 210, + 70, + 195, + 139, + 56, + 140, + 252, + 6, + 219, + 31, + 107, + 2, + 19, + 3, + 197, + 162, + 137, + 0, + 11, + 220, + 232, + 50, + 169, + 195, + 236, + 66, + 28 + ] + ) + ); + // 512 MiB = 16Mi leaves = 2^4 * 2^20 + assertEq( + MerkleVerify.zeroRoot(24), + loadDigest( + [ + 57, + 86, + 14, + 123, + 19, + 169, + 59, + 7, + 162, + 67, + 253, + 39, + 32, + 255, + 167, + 203, + 62, + 29, + 46, + 80, + 90, + 179, + 98, + 158, + 121, + 244, + 99, + 19, + 81, + 44, + 218, + 6 + ] + ) + ); + // 32 GiB = 1Gi leaves = 2^30 + assertEq( + MerkleVerify.zeroRoot(30), + loadDigest( + [ + 7, + 126, + 95, + 222, + 53, + 197, + 10, + 147, + 3, + 165, + 80, + 9, + 227, + 73, + 138, + 78, + 190, + 223, + 243, + 156, + 66, + 183, + 16, + 183, + 48, + 216, + 236, + 122, + 199, + 175, + 166, + 62 + ] + ) + ); + // 64 GiB = 2 * 1Gi leaves = 2^1 * 2^30 + assertEq( + MerkleVerify.zeroRoot(31), + loadDigest( + [ + 230, + 64, + 5, + 166, + 191, + 227, + 119, + 121, + 83, + 184, + 173, + 110, + 249, + 63, + 15, + 202, + 16, + 73, + 178, + 4, + 22, + 84, + 242, + 164, + 17, + 247, + 112, + 39, + 153, + 206, + 206, + 2 + ] + ) + ); + } + + // Tests that trees with explicit zero leaves produce known values for the root of the all-zero tree. + function testZeroTreeFilecoinEquivalence() public view { + for (uint256 i = 1; i <= 16; i++) { + bytes32[] memory leaves = new bytes32[](i); + bytes32[][] memory tree = MerkleProve.buildTree(leaves); + uint256 height = 256 - BitOps.clz(i - 1); + assertEq(tree[0][0], MerkleVerify.zeroRoot(height)); + } + } + + ///// Helper functions ///// + + // Returns an array of Merkle tree roots committing to all-zero data of increasing tree heights. + // The first entry is zero. + // The second entry is a node with two zero leaves. + // The third entry is a node with four zero leaves, etc. + function buildZeroPaddingStack(uint256 levels) public view returns (bytes32[] memory) { + bytes32[] memory result = new bytes32[](levels); + for (uint256 i = 1; i < levels; i++) { + result[i] = Hashes.orderedHash(result[i - 1], result[i - 1]); + } + + return result; + } + + // Loads a bytes32 hash digest from an array of 32 1-byte values. + function loadDigest(uint8[32] memory b) public pure returns (bytes32) { + bytes32 result; + for (uint256 i = 0; i < 32; i++) { + result |= bytes32(uint256(b[i]) << (8 * (31 - i))); + } + return result; + } + + function printTree(bytes32[][] memory tree) internal pure { + console.log("Tree:"); + for (uint256 i = 0; i < tree.length; i++) { + console.log("Level ", i, ":"); + for (uint256 j = 0; j < tree[i].length; j++) { + console.log(vm.toString(j), vm.toString(tree[i][j])); + } + } + console.log(); + } + + function printProof(bytes32[] memory proof) internal pure { + console.log("Proof: "); + for (uint256 j = 0; j < proof.length; j++) { + console.log(vm.toString(j), vm.toString(proof[j])); + } + } +} + +contract HashesTest is Test { + // Tests that the efficient hash function returns the same result as the expected hash function. + function testHash() public view { + bytes32 a = bytes32(0x0000000000000000000000000000000000000000000000000000000000000000); + bytes32 b = bytes32(0x0000000000000000000000000000000000000000000000000000000000000001); + verifyHash(a, a); + verifyHash(a, b); + verifyHash(b, a); + } + + function verifyHash(bytes32 a, bytes32 b) internal view { + bytes32 expected = expectedHash(a, b); + bytes32 result = Hashes.orderedHash(a, b); + assertEq(result, expected, "Hashes.commutativeHash should return the expected hash"); + } + + // Implements SHA254 hash of pairs via the standard sha256(abi.encode(a, b)). + function expectedHash(bytes32 a, bytes32 b) internal pure returns (bytes32) { + bytes memory payload = abi.encodePacked(a, b); + bytes32 digest = sha256(payload); + digest = bytes32((uint256(digest) & Hashes.SHA254_MASK)); + return digest; + } +} diff --git a/packages/pdp/test/SimplePDPService.t.sol b/packages/pdp/test/SimplePDPService.t.sol new file mode 100644 index 00000000..94598f7c --- /dev/null +++ b/packages/pdp/test/SimplePDPService.t.sol @@ -0,0 +1,428 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.13; + +import {Test} from "forge-std/Test.sol"; +import {SimplePDPService} from "../src/SimplePDPService.sol"; +import {MyERC1967Proxy} from "../src/ERC1967Proxy.sol"; +import {Cids} from "../src/Cids.sol"; + +contract SimplePDPServiceTest is Test { + SimplePDPService public pdpService; + address public pdpVerifierAddress; + bytes empty = new bytes(0); + uint256 public dataSetId; + uint256 public leafCount; + uint256 public seed; + + function setUp() public { + pdpVerifierAddress = address(this); + SimplePDPService pdpServiceImpl = new SimplePDPService(); + bytes memory initializeData = + abi.encodeWithSelector(SimplePDPService.initialize.selector, address(pdpVerifierAddress)); + MyERC1967Proxy pdpServiceProxy = new MyERC1967Proxy(address(pdpServiceImpl), initializeData); + pdpService = SimplePDPService(address(pdpServiceProxy)); + dataSetId = 1; + leafCount = 100; + seed = 12345; + } + + function testInitialState() public view { + assertEq(pdpService.pdpVerifierAddress(), pdpVerifierAddress, "PDP verifier address should be set correctly"); + } + + function testOnlyPDPVerifierCanAddRecord() public { + vm.prank(address(0xdead)); + vm.expectRevert("Caller is not the PDP verifier"); + pdpService.dataSetCreated(dataSetId, address(this), empty); + } + + function testGetMaxProvingPeriod() public view { + uint64 maxPeriod = pdpService.getMaxProvingPeriod(); + assertEq(maxPeriod, 2880, "Max proving period should be 2880"); + } + + function testGetChallengesPerProof() public view { + uint64 challenges = pdpService.getChallengesPerProof(); + assertEq(challenges, 5, "Challenges per proof should be 5"); + } + + function testInitialProvingPeriodHappyPath() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + uint256 challengeEpoch = pdpService.initChallengeWindowStart(); + + pdpService.nextProvingPeriod(dataSetId, challengeEpoch, leafCount, empty); + + assertEq( + pdpService.provingDeadlines(dataSetId), + block.number + pdpService.getMaxProvingPeriod(), + "Deadline should be set to current block + max period" + ); + assertFalse(pdpService.provenThisPeriod(dataSetId)); + } + + function testInitialProvingPeriodInvalidChallengeEpoch() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + uint256 firstDeadline = block.number + pdpService.getMaxProvingPeriod(); + + // Test too early + uint256 tooEarly = firstDeadline - pdpService.challengeWindow() - 1; + vm.expectRevert("Next challenge epoch must fall within the next challenge window"); + pdpService.nextProvingPeriod(dataSetId, tooEarly, leafCount, empty); + + // Test too late + uint256 tooLate = firstDeadline + 1; + vm.expectRevert("Next challenge epoch must fall within the next challenge window"); + pdpService.nextProvingPeriod(dataSetId, tooLate, leafCount, empty); + } + + function testProveBeforeInitialization() public { + // Create a simple mock proof + vm.expectRevert("Proving not yet started"); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + } + + function testInactivateDataSetHappyPath() public { + // Setup initial state + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // Prove possession in first period + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow()); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + + // Inactivate the data set + pdpService.nextProvingPeriod(dataSetId, pdpService.NO_CHALLENGE_SCHEDULED(), leafCount, empty); + + assertEq( + pdpService.provingDeadlines(dataSetId), + pdpService.NO_PROVING_DEADLINE(), + "Proving deadline should be set to NO_PROVING_DEADLINE" + ); + assertEq(pdpService.provenThisPeriod(dataSetId), false, "Proven this period should now be false"); + } +} + +contract SimplePDPServiceFaultsTest is Test { + SimplePDPService public pdpService; + address public pdpVerifierAddress; + uint256 public dataSetId; + uint256 public leafCount; + uint256 public seed; + uint256 public challengeCount; + bytes empty = new bytes(0); + + function setUp() public { + pdpVerifierAddress = address(this); + SimplePDPService pdpServiceImpl = new SimplePDPService(); + bytes memory initializeData = + abi.encodeWithSelector(SimplePDPService.initialize.selector, address(pdpVerifierAddress)); + MyERC1967Proxy pdpServiceProxy = new MyERC1967Proxy(address(pdpServiceImpl), initializeData); + pdpService = SimplePDPService(address(pdpServiceProxy)); + dataSetId = 1; + leafCount = 100; + seed = 12345; + challengeCount = 5; + } + + function testPossessionProvenOnTime() public { + // Set up the proving deadline + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow()); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + assertTrue(pdpService.provenThisPeriod(dataSetId)); + + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod()); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + } + + function testNextProvingPeriodCalledLastMinuteOK() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod()); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + + // wait until almost the end of proving period 2 + // this should all work fine + vm.roll(block.number + pdpService.getMaxProvingPeriod()); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + } + + function testFirstEpochLateToProve() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod() + 1); + vm.expectRevert("Current proving period passed. Open a new proving period."); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + } + + function testNextProvingPeriodTwiceFails() public { + // Set up the proving deadline + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow()); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + uint256 deadline1 = pdpService.provingDeadlines(dataSetId); + assertTrue(pdpService.provenThisPeriod(dataSetId)); + + assertEq( + pdpService.provingDeadlines(dataSetId), + deadline1, + "Proving deadline should not change until nextProvingPeriod." + ); + uint256 challengeEpoch = pdpService.nextChallengeWindowStart(dataSetId); + pdpService.nextProvingPeriod(dataSetId, challengeEpoch, leafCount, empty); + assertEq( + pdpService.provingDeadlines(dataSetId), + deadline1 + pdpService.getMaxProvingPeriod(), + "Proving deadline should be updated" + ); + assertFalse(pdpService.provenThisPeriod(dataSetId)); + + vm.expectRevert("One call to nextProvingPeriod allowed per proving period"); + pdpService.nextProvingPeriod(dataSetId, challengeEpoch, leafCount, empty); + } + + function testFaultWithinOpenPeriod() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // Move to open proving period + vm.roll(block.number + pdpService.getMaxProvingPeriod() - 100); + + // Expect fault event when calling nextProvingPeriod without proof + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 1, pdpService.provingDeadlines(dataSetId)); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + } + + function testFaultAfterPeriodOver() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // Move past proving period + vm.roll(block.number + pdpService.getMaxProvingPeriod() + 1); + + // Expect fault event when calling nextProvingPeriod without proof + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 1, pdpService.provingDeadlines(dataSetId)); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + } + + function testNextProvingPeriodWithoutProof() public { + // Set up the proving deadline without marking as proven + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + // Move to the next period + vm.roll(block.number + pdpService.getMaxProvingPeriod() + 1); + // Expect a fault event + vm.expectEmit(); + emit SimplePDPService.FaultRecord(dataSetId, 1, pdpService.provingDeadlines(dataSetId)); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + assertFalse(pdpService.provenThisPeriod(dataSetId)); + } + + function testInvalidChallengeCount() public { + uint256 invalidChallengeCount = 4; // Less than required + + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.expectRevert("Invalid challenge count < 5"); + pdpService.possessionProven(dataSetId, leafCount, seed, invalidChallengeCount); + } + + function testMultiplePeriodsLate() public { + // Set up the proving deadline + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + // Warp to 3 periods after the deadline + vm.roll(block.number + pdpService.getMaxProvingPeriod() * 3 + 1); + // unable to prove possession + vm.expectRevert("Current proving period passed. Open a new proving period."); + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 3, pdpService.provingDeadlines(dataSetId)); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + } + + function testMultiplePeriodsLateWithInitialProof() public { + // Set up the proving deadline + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + // Move to first open proving period + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow()); + + // Submit valid proof in first period + pdpService.possessionProven(dataSetId, leafCount, seed, challengeCount); + assertTrue(pdpService.provenThisPeriod(dataSetId)); + + // Warp to 3 periods after the deadline + vm.roll(block.number + pdpService.getMaxProvingPeriod() * 3 + 1); + + // Should emit fault record for 2 periods (current period not counted since not yet expired) + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 2, pdpService.provingDeadlines(dataSetId)); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + } + + function testCanOnlyProveOncePerPeriod() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // We're in the previous deadline so we fail to prove until we roll forward into challenge window + vm.expectRevert("Too early. Wait for challenge window to open"); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow() - 1); + // We're one before the challenge window so we should still fail + vm.expectRevert("Too early. Wait for challenge window to open"); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + // now we succeed + vm.roll(block.number + 1); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + vm.expectRevert("Only one proof of possession allowed per proving period. Open a new proving period."); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + } + + function testCantProveBeforePeriodIsOpen() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow()); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + vm.expectRevert("Too early. Wait for challenge window to open"); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + } + + function testMissChallengeWindow() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + vm.roll(block.number + pdpService.getMaxProvingPeriod() - 100); + // Too early + uint256 tooEarly = pdpService.nextChallengeWindowStart(dataSetId) - 1; + vm.expectRevert("Next challenge epoch must fall within the next challenge window"); + pdpService.nextProvingPeriod(dataSetId, tooEarly, leafCount, empty); + // Too late + uint256 tooLate = pdpService.nextChallengeWindowStart(dataSetId) + pdpService.challengeWindow() + 1; + vm.expectRevert("Next challenge epoch must fall within the next challenge window"); + pdpService.nextProvingPeriod(dataSetId, tooLate, leafCount, empty); + + // Works right on the deadline + pdpService.nextProvingPeriod( + dataSetId, pdpService.nextChallengeWindowStart(dataSetId) + pdpService.challengeWindow(), leafCount, empty + ); + } + + function testMissChallengeWindowAfterFaults() public { + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + // Skip 2 proving periods + vm.roll(block.number + pdpService.getMaxProvingPeriod() * 3 - 100); + + // Too early + uint256 tooEarly = pdpService.nextChallengeWindowStart(dataSetId) - 1; + vm.expectRevert("Next challenge epoch must fall within the next challenge window"); + pdpService.nextProvingPeriod(dataSetId, tooEarly, leafCount, empty); + + // Too late + uint256 tooLate = pdpService.nextChallengeWindowStart(dataSetId) + pdpService.challengeWindow() + 1; + vm.expectRevert("Next challenge epoch must fall within the next challenge window"); + pdpService.nextProvingPeriod(dataSetId, tooLate, leafCount, empty); + + // Should emit fault record for 2 periods + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 2, pdpService.provingDeadlines(dataSetId)); + // Works right on the deadline + pdpService.nextProvingPeriod( + dataSetId, pdpService.nextChallengeWindowStart(dataSetId) + pdpService.challengeWindow(), leafCount, empty + ); + } + + function testInactivateWithCurrentPeriodFault() public { + // Setup initial state + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // Move to end of period without proving + vm.roll(block.number + pdpService.getMaxProvingPeriod()); + + // Expect fault event for the unproven period + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 1, pdpService.provingDeadlines(dataSetId)); + + pdpService.nextProvingPeriod(dataSetId, pdpService.NO_CHALLENGE_SCHEDULED(), leafCount, empty); + + assertEq( + pdpService.provingDeadlines(dataSetId), + pdpService.NO_PROVING_DEADLINE(), + "Proving deadline should be set to NO_PROVING_DEADLINE" + ); + } + + function testInactivateWithMultiplePeriodFaults() public { + // Setup initial state + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // Skip 3 proving periods without proving + vm.roll(block.number + pdpService.getMaxProvingPeriod() * 3 + 1); + + // Expect fault event for all missed periods + vm.expectEmit(true, true, true, true); + emit SimplePDPService.FaultRecord(dataSetId, 3, pdpService.provingDeadlines(dataSetId)); + + pdpService.nextProvingPeriod(dataSetId, pdpService.NO_CHALLENGE_SCHEDULED(), leafCount, empty); + + assertEq( + pdpService.provingDeadlines(dataSetId), + pdpService.NO_PROVING_DEADLINE(), + "Proving deadline should be set to NO_PROVING_DEADLINE" + ); + } + + function testGetPDPConfig() public view { + (uint64 maxProvingPeriod, uint256 challengeWindow, uint256 challengesPerProof, uint256 initChallengeWindowStart) + = pdpService.getPDPConfig(); + + assertEq(maxProvingPeriod, 2880, "Max proving period should be 2880"); + assertEq(challengeWindow, 60, "Challenge window should be 60"); + assertEq(challengesPerProof, 5, "Challenges per proof should be 5"); + assertEq( + initChallengeWindowStart, + block.number + 2880 - 60, + "Init challenge window start should be calculated correctly" + ); + } + + function testNextPDPChallengeWindowStart() public { + // Setup initial state + pdpService.piecesAdded(dataSetId, 0, new Cids.Cid[](0), empty); + pdpService.nextProvingPeriod(dataSetId, pdpService.initChallengeWindowStart(), leafCount, empty); + + // Test that nextPDPChallengeWindowStart returns the same as nextChallengeWindowStart + uint256 expected = pdpService.nextChallengeWindowStart(dataSetId); + uint256 actual = pdpService.nextPDPChallengeWindowStart(dataSetId); + assertEq(actual, expected, "nextPDPChallengeWindowStart should match nextChallengeWindowStart"); + + // Move to challenge window and prove + vm.roll(block.number + pdpService.getMaxProvingPeriod() - pdpService.challengeWindow()); + pdpService.possessionProven(dataSetId, leafCount, seed, 5); + + // Open next period + pdpService.nextProvingPeriod(dataSetId, pdpService.nextChallengeWindowStart(dataSetId), leafCount, empty); + + // Test again in new period + expected = pdpService.nextChallengeWindowStart(dataSetId); + actual = pdpService.nextPDPChallengeWindowStart(dataSetId); + assertEq(actual, expected, "nextPDPChallengeWindowStart should match nextChallengeWindowStart in new period"); + } + + function testNextPDPChallengeWindowStartNotInitialized() public { + // Test that it reverts when proving period not initialized + vm.expectRevert("Proving period not yet initialized"); + pdpService.nextPDPChallengeWindowStart(dataSetId); + } +} diff --git a/packages/pdp/tools/README.md b/packages/pdp/tools/README.md new file mode 100644 index 00000000..146b4fa2 --- /dev/null +++ b/packages/pdp/tools/README.md @@ -0,0 +1,40 @@ +A place for all tools related to running and developing the PDP contracts. When adding a tool please fill in a description. + +# Tools + +## Deployment Scripts + +### deploy-devnet.sh +Deploys PDPVerifier to a local filecoin devnet. Assumes lotus binary is in path and local devnet is running with eth API enabled. The keystore will be funded automatically from lotus default address. + +### deploy-calibnet.sh +Deploys PDPVerifier to Filecoin Calibration testnet. + +### deploy-mainnet.sh +Deploys PDPVerifier to Filecoin mainnet. + +### deploy-simple-pdp-service.sh ⚠️ DEPRECATED +**As of v2.0.0, SimplePDPService is deprecated.** This optional script allows deployment of SimplePDPService for reference/community use only. Requires an existing PDPVerifier deployment. See `DEPRECATION.md` for details. + +## Upgrade Scripts + +### upgrade-contract-calibnet.sh +Generic script for upgrading proxy contracts on Calibration testnet. + +### deploy-transfer-ownership-upgrade-calibnet.sh +Deploys, upgrades, and transfers ownership of PDPVerifier on Calibration testnet. + +## PDP Interaction Scripts +We have some scripts for interacting with the PDP service contract through ETH RPC API: +- add.sh +- remove.sh +- create_data_set.sh +- find.sh +- size.sh + +To use these scripts set the following environment variables: +- KEYSTORE +- PASSWORD +- RPC_URL + +with values corresponding to local geth keystore path, the password for the keystore and the RPC URL for the network where PDP service contract is deployed. diff --git a/packages/pdp/tools/add.sh b/packages/pdp/tools/add.sh new file mode 100755 index 00000000..aa97f2a1 --- /dev/null +++ b/packages/pdp/tools/add.sh @@ -0,0 +1,7 @@ +#! /bin/bash +# Usage: ./add.sh +# add-input-list is a comma separated list of tuples of the form ((bytes),uint256) +# Example: ./add.sh 0x067fd08940ba732C25c44423005D662BF95e6763 0 '[((0x000181E20392202070FB4C14254CE86AB762E0280E469AF4E01B34A1B4B08F75C258F197798EE33C),256)]' +addCallData=$(cast calldata "addPieces(uint256,((bytes),uint256)[])(uint256)" $2 $3) + +cast send --keystore $KEYSTORE --password "$PASSWORD" --rpc-url $RPC_URL $1 $addCallData diff --git a/packages/pdp/tools/check-contract-size.sh b/packages/pdp/tools/check-contract-size.sh new file mode 100644 index 00000000..32bebd5e --- /dev/null +++ b/packages/pdp/tools/check-contract-size.sh @@ -0,0 +1,96 @@ +#!/usr/bin/env bash +# +# This script checks if any Solidity contract/library in the `src/` folder +# exceeds the EIP-170 contract runtime size limit (24,576 bytes) +# and the EIP-3860 init code size limit (49,152 bytes). +# Intended for use in CI (e.g., GitHub Actions) with Foundry. +# Exits 1 and prints the list of exceeding contracts if violations are found. +# NOTE: This script requires Bash (not sh or dash) due to use of mapfile and [[ ... ]]. + +set -euo pipefail + +command -v jq >/dev/null 2>&1 || { echo >&2 "jq is required but not installed."; exit 1; } +command -v forge >/dev/null 2>&1 || { echo >&2 "forge is required but not installed."; exit 1; } + +# Gather contract and library names from src/ +# Only matches [A-Za-z0-9_] in contract/library names (no special characters) +if [[ -d src/ ]]; then + mapfile -t contracts < <(grep -rE '^(contract|library) ' src/ 2>/dev/null | sed -E 's/.*(contract|library) ([A-Za-z0-9_]+).*/\2/') +else + contracts=() +fi + +# Exit early if none found +if [[ ${#contracts[@]} -eq 0 ]]; then + echo "No contracts or libraries found in src/." + exit 0 +fi + +# Build the contracts, get size info as JSON (ignore non-zero exit to always parse output) +forge clean || true +forge build --sizes --json | jq . > contract_sizes.json || true + +# Validate JSON output +if ! jq empty contract_sizes.json 2>/dev/null; then + echo "forge build did not return valid JSON. Output:" + cat contract_sizes.json + exit 1 +fi + +if jq -e '. == {}' contract_sizes.json >/dev/null; then + echo "forge did not find any contracts. forge build:" + # This usually means build failure + forge build + exit 1 +fi + +json=$(cat contract_sizes.json) + +# Filter JSON: keep only contracts/libraries from src/ +json=$(echo "$json" | jq --argjson keys "$(printf '%s\n' "${contracts[@]}" | jq -R . | jq -s .)" ' + to_entries + | map(select(.key as $k | $keys | index($k))) + | from_entries +') + +# Find all that violate the EIP-170 runtime size limit (24,576 bytes) +exceeding_runtime=$(echo "$json" | jq -r ' + to_entries + | map(select(.value.runtime_size > 24576)) + | .[] + | "\(.key): \(.value.runtime_size) bytes (runtime size)"' +) + +# Find all that violate the EIP-3860 init code size limit (49,152 bytes) +exceeding_initcode=$(echo "$json" | jq -r ' + to_entries + | map(select(.value.init_size > 49152)) + | .[] + | "\(.key): \(.value.init_size) bytes (init code size)"' +) + +# Initialize status +status=0 + +if [[ -n "$exceeding_runtime" ]]; then + echo "ERROR: The following contracts exceed EIP-170 runtime size (24,576 bytes):" + echo "$exceeding_runtime" + status=1 +fi + +if [[ -n "$exceeding_initcode" ]]; then + echo "ERROR: The following contracts exceed EIP-3860 init code size (49,152 bytes):" + echo "$exceeding_initcode" + status=1 +fi + +if [[ $status -eq 0 ]]; then + echo "All contracts are within the EIP-170 runtime and EIP-3860 init code size limits." +fi + +# Clean up temporary file +rm -f contract_sizes.json + +# Exit with appropriate status +exit $status + diff --git a/packages/pdp/tools/claim-owner.sh b/packages/pdp/tools/claim-owner.sh new file mode 100755 index 00000000..d20528c2 --- /dev/null +++ b/packages/pdp/tools/claim-owner.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# claim_ownership.sh - Script for claiming ownership of a data set + +# Check if correct number of arguments provided +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +# Get argument +DATA_SET_ID=$1 + +# Check required environment variables +if [ -z "$PASSWORD" ] || [ -z "$KEYSTORE" ] || [ -z "$RPC_URL" ] || [ -z "$CONTRACT_ADDRESS" ]; then + echo "Error: Missing required environment variables." + echo "Please set PASSWORD, KEYSTORE, RPC_URL, and CONTRACT_ADDRESS." + exit 1 +fi + +echo "Claiming ownership of data set ID: $DATA_SET_ID" + +# Get claimer's address from keystore +CLAIMER_ADDRESS=$(cast wallet address --keystore "$KEYSTORE") +echo "New owner address (claiming ownership): $CLAIMER_ADDRESS" + +# Construct calldata using cast calldata +CALLDATA=$(cast calldata "claimDataSetStorageProvider(uint256,bytes)" "$DATA_SET_ID" "0x") + +echo "Sending transaction..." + +# Send transaction +TX_HASH=$(cast send --rpc-url "$RPC_URL" \ + --keystore "$KEYSTORE" \ + --password "$PASSWORD" \ + "$CONTRACT_ADDRESS" \ + "$CALLDATA") + +echo "Transaction sent! Hash: $TX_HASH" +echo "Successfully claimed ownership of data set $DATA_SET_ID" \ No newline at end of file diff --git a/packages/pdp/tools/create_data_set.sh b/packages/pdp/tools/create_data_set.sh new file mode 100755 index 00000000..44f5cdb5 --- /dev/null +++ b/packages/pdp/tools/create_data_set.sh @@ -0,0 +1,22 @@ +#! /bin/bash +# Usage: ./create_data_set.sh + +# Check if required environment variables are set +if [ -z "$RPC_URL" ] || [ -z "$KEYSTORE" ] ; then + echo "Error: Please set RPC_URL, KEYSTORE, and PASSWORD environment variables." + exit 1 +fi + +# Get the contract address from the command line argument +if [ -z "$1" ] || [ -z "$2" ]; then + echo "Usage: $0 " + exit 1 +fi + +CONTRACT_ADDRESS=$1 + +# Create the calldata for createDataSet() +CALLDATA=$(cast calldata "createDataSet(address)(uint256)" $2) + +# Send the transaction +cast send --keystore $KEYSTORE --password "$PASSWORD" --rpc-url $RPC_URL $CONTRACT_ADDRESS $CALLDATA \ No newline at end of file diff --git a/packages/pdp/tools/deploy-calibnet.sh b/packages/pdp/tools/deploy-calibnet.sh new file mode 100755 index 00000000..1c8ff416 --- /dev/null +++ b/packages/pdp/tools/deploy-calibnet.sh @@ -0,0 +1,53 @@ +#! /bin/bash +# deploy-devnet deploys the PDP verifier and PDP service contracts to calibration net +# Assumption: KEYSTORE, PASSWORD, RPC_URL env vars are set to an appropriate eth keystore path and password +# and to a valid RPC_URL for the devnet. +# Assumption: forge, cast, jq are in the PATH +# Assumption: called from contracts directory so forge paths work out +# +echo "Deploying to calibnet" + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +if [ -z "$CHALLENGE_FINALITY" ]; then + echo "Error: CHALLENGE_FINALITY is not set" + exit 1 +fi + +ADDR=$(cast wallet address --keystore "$KEYSTORE" --password "$PASSWORD") +echo "Deploying PDP verifier from address $ADDR" +# Parse the output of forge create to extract the contract address + +NONCE="$(cast nonce --rpc-url "$RPC_URL" "$ADDR")" +VERIFIER_IMPLEMENTATION_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id 314159 src/PDPVerifier.sol:PDPVerifier | grep "Deployed to" | awk '{print $3}') +if [ -z "$VERIFIER_IMPLEMENTATION_ADDRESS" ]; then + echo "Error: Failed to extract PDP verifier contract address" + exit 1 +fi +echo "PDP verifier implementation deployed at: $VERIFIER_IMPLEMENTATION_ADDRESS" +echo "Deploying PDP verifier proxy" +NONCE=$(expr $NONCE + "1") + +INIT_DATA=$(cast calldata "initialize(uint256)" $CHALLENGE_FINALITY) +PDP_VERIFIER_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id 314159 src/ERC1967Proxy.sol:MyERC1967Proxy --constructor-args $VERIFIER_IMPLEMENTATION_ADDRESS $INIT_DATA | grep "Deployed to" | awk '{print $3}') +echo "PDP verifier deployed at: $PDP_VERIFIER_ADDRESS" + +echo "" +echo "=================================================" +echo "DEPLOYMENT COMPLETE" +echo "=================================================" +echo "PDPVerifier Implementation: $VERIFIER_IMPLEMENTATION_ADDRESS" +echo "PDPVerifier Proxy: $PDP_VERIFIER_ADDRESS" +echo "" +echo "NOTE: SimplePDPService is no longer deployed by default as of v2.0.0." +echo " It remains available as a reference implementation in src/SimplePDPService.sol" +echo " For community use and learning purposes." +echo "" diff --git a/packages/pdp/tools/deploy-devnet.sh b/packages/pdp/tools/deploy-devnet.sh new file mode 100755 index 00000000..3c0519d9 --- /dev/null +++ b/packages/pdp/tools/deploy-devnet.sh @@ -0,0 +1,55 @@ +#! /bin/bash +# deploy-devnet deploys the PDP service contract and all auxillary contracts to a filecoin devnet +# Assumption: KEYSTORE, PASSWORD, RPC_URL env vars are set to an appropriate eth keystore path and password +# and to a valid RPC_URL for the devnet. +# Assumption: forge, cast, lotus, jq are in the PATH +# Assumption: called from contracts directory so forge paths work out +# +echo "Deploying to devnet" + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +# Send funds from default to keystore address +# assumes lotus binary in path +clientAddr=$(cat $KEYSTORE | jq '.address' | sed -e 's/\"//g') +echo "Sending funds to $clientAddr" +lotus send $clientAddr 10000 +sleep 5 ## Sleep for 5 seconds so fund are available and actor is registered + +NONCE="$(cast nonce --rpc-url "$RPC_URL" "$clientAddr")" + +echo "Deploying PDP verifier" +# Parse the output of forge create to extract the contract address +VERIFIER_IMPLEMENTATION_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --nonce $NONCE --broadcast src/PDPVerifier.sol:PDPVerifier | grep "Deployed to" | awk '{print $3}') +if [ -z "$VERIFIER_IMPLEMENTATION_ADDRESS" ]; then + echo "Error: Failed to extract PDP verifier contract address" + exit 1 +fi +echo "PDP verifier implementation deployed at: $VERIFIER_IMPLEMENTATION_ADDRESS" + +NONCE=$(expr $NONCE + "1") + +echo "Deploying PDP verifier proxy" +INIT_DATA=$(cast calldata "initialize(uint256)" 150) +PDP_VERIFIER_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --nonce $NONCE --broadcast src/ERC1967Proxy.sol:MyERC1967Proxy --constructor-args $VERIFIER_IMPLEMENTATION_ADDRESS $INIT_DATA | grep "Deployed to" | awk '{print $3}') +echo "PDP verifier deployed at: $PDP_VERIFIER_ADDRESS" + +echo "" +echo "=================================================" +echo "DEPLOYMENT COMPLETE" +echo "=================================================" +echo "PDPVerifier Implementation: $VERIFIER_IMPLEMENTATION_ADDRESS" +echo "PDPVerifier Proxy: $PDP_VERIFIER_ADDRESS" +echo "" +echo "NOTE: SimplePDPService is no longer deployed by default as of v2.0.0." +echo " It remains available as a reference implementation in src/SimplePDPService.sol" +echo " For community use and learning purposes." +echo "" diff --git a/packages/pdp/tools/deploy-mainnet.sh b/packages/pdp/tools/deploy-mainnet.sh new file mode 100755 index 00000000..1a543820 --- /dev/null +++ b/packages/pdp/tools/deploy-mainnet.sh @@ -0,0 +1,51 @@ +#! /bin/bash +# deploy-devnet deploys the PDP verifier and PDP service contracts to calibration net +# Assumption: KEYSTORE, PASSWORD, RPC_URL env vars are set to an appropriate eth keystore path and password +# and to a valid RPC_URL for the devnet. +# Assumption: forge, cast, jq are in the PATH +# Assumption: called from contracts directory so forge paths work out +# +echo "Deploying to mainnet" + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +# CHALLENGE_FINALITY should always be 150 in production +CHALLENGE_FINALITY=150 + +ADDR=$(cast wallet address --keystore "$KEYSTORE" --password "$PASSWORD") +echo "Deploying PDP verifier from address $ADDR" +# Parse the output of forge create to extract the contract address + +NONCE="$(cast nonce --rpc-url "$RPC_URL" "$ADDR")" +VERIFIER_IMPLEMENTATION_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id 314 src/PDPVerifier.sol:PDPVerifier | grep "Deployed to" | awk '{print $3}') +if [ -z "$VERIFIER_IMPLEMENTATION_ADDRESS" ]; then + echo "Error: Failed to extract PDP verifier contract address" + exit 1 +fi +echo "PDP verifier implementation deployed at: $VERIFIER_IMPLEMENTATION_ADDRESS" +echo "Deploying PDP verifier proxy" +NONCE=$(expr $NONCE + "1") + +INIT_DATA=$(cast calldata "initialize(uint256)" $CHALLENGE_FINALITY) +PDP_VERIFIER_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id 314 src/ERC1967Proxy.sol:MyERC1967Proxy --constructor-args $VERIFIER_IMPLEMENTATION_ADDRESS $INIT_DATA | grep "Deployed to" | awk '{print $3}') +echo "PDP verifier deployed at: $PDP_VERIFIER_ADDRESS" + +echo "" +echo "=================================================" +echo "DEPLOYMENT COMPLETE" +echo "=================================================" +echo "PDPVerifier Implementation: $VERIFIER_IMPLEMENTATION_ADDRESS" +echo "PDPVerifier Proxy: $PDP_VERIFIER_ADDRESS" +echo "" +echo "NOTE: SimplePDPService is no longer deployed by default as of v2.0.0." +echo " It remains available as a reference implementation in src/SimplePDPService.sol" +echo " For community use and learning purposes." +echo "" diff --git a/packages/pdp/tools/deploy-simple-pdp-service.sh b/packages/pdp/tools/deploy-simple-pdp-service.sh new file mode 100755 index 00000000..eb81ae6d --- /dev/null +++ b/packages/pdp/tools/deploy-simple-pdp-service.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# deploy-simple-pdp-service.sh - Optional deployment script for SimplePDPService +# +# ⚠️ DEPRECATED as of v2.0.0 ⚠️ +# SimplePDPService is no longer actively maintained but remains available +# as a reference implementation for the community. +# +# This script deploys SimplePDPService to work with an existing PDPVerifier. +# +# Prerequisites: +# - PDPVerifier must already be deployed +# - Set PDP_VERIFIER_ADDRESS environment variable to the PDPVerifier proxy address +# - Set RPC_URL, KEYSTORE, PASSWORD environment variables +# +# Usage: +# export PDP_VERIFIER_ADDRESS=0x... +# export RPC_URL=https://... +# export KEYSTORE=/path/to/keystore +# export PASSWORD=your_password +# ./deploy-simple-pdp-service.sh + +echo "=================================================" +echo "⚠️ DEPRECATED: SimplePDPService Deployment ⚠️" +echo "=================================================" +echo "" +echo "SimplePDPService is no longer actively maintained as of v2.0.0." +echo "This script is provided for reference and community use only." +echo "" +echo "Consider implementing your own service layer using PDPVerifier directly." +echo "See src/SimplePDPService.sol as a reference implementation." +echo "" +read -p "Do you want to continue with SimplePDPService deployment? (y/N): " -n 1 -r +echo +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + echo "Deployment cancelled." + exit 0 +fi + +echo "" +echo "Proceeding with SimplePDPService deployment..." + +# Validate required environment variables +if [ -z "$PDP_VERIFIER_ADDRESS" ]; then + echo "Error: PDP_VERIFIER_ADDRESS is not set" + echo "Please set it to your deployed PDPVerifier proxy address" + exit 1 +fi + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +# Determine chain ID based on RPC URL +CHAIN_ID=314 # Default to mainnet +if [[ "$RPC_URL" == *"calibration"* ]]; then + CHAIN_ID=314159 +fi + +ADDR=$(cast wallet address --keystore "$KEYSTORE" --password "$PASSWORD") +echo "Deploying SimplePDPService from address $ADDR" +echo "Using PDPVerifier at: $PDP_VERIFIER_ADDRESS" + +NONCE="$(cast nonce --rpc-url "$RPC_URL" "$ADDR")" + +echo "Deploying SimplePDPService implementation..." +SERVICE_IMPLEMENTATION_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id $CHAIN_ID src/SimplePDPService.sol:SimplePDPService | grep "Deployed to" | awk '{print $3}') + +if [ -z "$SERVICE_IMPLEMENTATION_ADDRESS" ]; then + echo "Error: Failed to extract SimplePDPService contract address" + exit 1 +fi + +echo "SimplePDPService implementation deployed at: $SERVICE_IMPLEMENTATION_ADDRESS" + +NONCE=$(expr $NONCE + "1") + +echo "Deploying SimplePDPService proxy..." +INIT_DATA=$(cast calldata "initialize(address)" $PDP_VERIFIER_ADDRESS) +PDP_SERVICE_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --nonce $NONCE --chain-id $CHAIN_ID src/ERC1967Proxy.sol:MyERC1967Proxy --constructor-args $SERVICE_IMPLEMENTATION_ADDRESS $INIT_DATA | grep "Deployed to" | awk '{print $3}') + +if [ -z "$PDP_SERVICE_ADDRESS" ]; then + echo "Error: Failed to deploy SimplePDPService proxy" + exit 1 +fi + +echo "" +echo "=================================================" +echo "SimplePDPService DEPLOYMENT COMPLETE" +echo "=================================================" +echo "SimplePDPService Implementation: $SERVICE_IMPLEMENTATION_ADDRESS" +echo "SimplePDPService Proxy: $PDP_SERVICE_ADDRESS" +echo "Connected to PDPVerifier: $PDP_VERIFIER_ADDRESS" +echo "" +echo "⚠️ Remember: SimplePDPService is deprecated and not actively maintained." +echo " Consider migrating to a custom service implementation." +echo "" \ No newline at end of file diff --git a/packages/pdp/tools/deploy-transfer-ownership-upgrade-calibnet.sh b/packages/pdp/tools/deploy-transfer-ownership-upgrade-calibnet.sh new file mode 100755 index 00000000..26cee5d9 --- /dev/null +++ b/packages/pdp/tools/deploy-transfer-ownership-upgrade-calibnet.sh @@ -0,0 +1,176 @@ +#!/usr/bin/env bash +set -euo pipefail + +##################################### +# Environment variables & defaults # +##################################### + +: "${FIL_CALIBNET_RPC_URL:?FIL_CALIBNET_RPC_URL not set. Please export it and rerun.}" +: "${FIL_CALIBNET_PRIVATE_KEY:?FIL_CALIBNET_PRIVATE_KEY not set. Please export it and rerun.}" +: "${NEW_OWNER:?NEW_OWNER not set. Please export it and rerun.}" + + +CHAIN_ID="${CHAIN_ID:-314159}" +COMPILER_VERSION="${COMPILER_VERSION:-0.8.22}" + +##################################### +# 1. Create INIT_DATA # +##################################### +echo "Generating calldata for initialize(uint256) with argument 150 ..." +INIT_DATA=$(cast calldata "initialize(uint256)" 150) +echo "INIT_DATA = $INIT_DATA" +echo + +##################################### +# 1. Get deployer address # +##################################### +echo "Deriving deployer address from private key ..." +DEPLOYER_ADDRESS=$(cast wallet address "$FIL_CALIBNET_PRIVATE_KEY") +NONCE="$(cast nonce --rpc-url "$FIL_CALIBNET_RPC_URL" "$DEPLOYER_ADDRESS")" +echo "Deployer address: $DEPLOYER_ADDRESS" +echo + +##################################### +# 2. Deploy PDPVerifier contract # +##################################### +echo "Deploying PDPVerifier contract ..." +DEPLOY_OUTPUT_VERIFIER=$( + forge create \ + --rpc-url "$FIL_CALIBNET_RPC_URL" \ + --private-key "$FIL_CALIBNET_PRIVATE_KEY" \ + --chain-id "$CHAIN_ID" \ + --broadcast \ + --nonce $NONCE \ + src/PDPVerifier.sol:PDPVerifier +) +NONCE=$(expr $NONCE + "1") + + +# Extract the deployed address from JSON output +PDP_VERIFIER_ADDRESS=$(echo "$DEPLOY_OUTPUT_VERIFIER" | grep "Deployed to" | awk '{print $3}') +echo "PDPVerifier deployed at: $PDP_VERIFIER_ADDRESS" +echo + +##################################### +# 3. Deploy Proxy contract # +##################################### +echo "Deploying Proxy contract (MyERC1967Proxy) ..." +DEPLOY_OUTPUT_PROXY=$(forge create --rpc-url "$FIL_CALIBNET_RPC_URL" --private-key "$FIL_CALIBNET_PRIVATE_KEY" --chain-id "$CHAIN_ID" --broadcast --nonce $NONCE src/ERC1967Proxy.sol:MyERC1967Proxy --constructor-args "$PDP_VERIFIER_ADDRESS" "$INIT_DATA") +NONCE=$(expr $NONCE + "1") + + +# Extract the deployed proxy address +PROXY_ADDRESS=$(echo "$DEPLOY_OUTPUT_PROXY" | grep "Deployed to" | awk '{print $3}') +echo "Proxy deployed at: $PROXY_ADDRESS" +echo + +##################################### +# 4. Check owner of proxy # +##################################### +echo "Querying the proxy's owner ..." +OWNER_ADDRESS=$( + cast call \ + --rpc-url "$FIL_CALIBNET_RPC_URL" \ + "$PROXY_ADDRESS" \ + "owner()(address)" +) +echo "Proxy owner: $OWNER_ADDRESS" + +# Add validation check +if [ "${OWNER_ADDRESS,,}" != "${DEPLOYER_ADDRESS,,}" ]; then + echo "failed to validate owner address" + echo "Expected owner to be: ${DEPLOYER_ADDRESS}" + echo "Got: ${OWNER_ADDRESS}" + exit 1 +fi +echo "✓ Owner address validated successfully" +echo + +##################################### +# 5. Check implementation address # +##################################### +# The storage slot for ERC1967 implementation: +IMPLEMENTATION_SLOT="0x360894A13BA1A3210667C828492DB98DCA3E2076CC3735A920A3CA505D382BBC" + +echo "Checking proxy's implementation address from storage slot $IMPLEMENTATION_SLOT ..." +sleep 35 +IMPLEMENTATION_ADDRESS=$(cast storage --rpc-url "$FIL_CALIBNET_RPC_URL" "$PROXY_ADDRESS" "$IMPLEMENTATION_SLOT") + +echo "Implementation address in Proxy: $IMPLEMENTATION_ADDRESS" +echo + + +##################################### +# Summary # +##################################### +echo "========== DEPLOYMENT SUMMARY ==========" +echo "PDPVerifier Address: $PDP_VERIFIER_ADDRESS" +echo "Proxy Address: $PROXY_ADDRESS" +echo "Proxy Owner (should match deployer): $OWNER_ADDRESS" +echo "PDPVerifier Implementation (via Proxy): $IMPLEMENTATION_ADDRESS" +echo "========================================" + + +##################################### +# 6. Upgrade proxy # +##################################### + +echo "Deploying a new PDPVerifier contract ..." +DEPLOY_OUTPUT_VERIFIER_2=$(forge create --nonce $NONCE --broadcast --rpc-url "$FIL_CALIBNET_RPC_URL" --private-key "$FIL_CALIBNET_PRIVATE_KEY" --chain-id "$CHAIN_ID" src/PDPVerifier.sol:PDPVerifier) +NONCE=$(expr $NONCE + "1") +PDP_VERIFIER_ADDRESS_2=$(echo "$DEPLOY_OUTPUT_VERIFIER_2" | grep "Deployed to" | awk '{print $3}') +echo "PDPVerifier deployed at: $PDP_VERIFIER_ADDRESS_2" +echo + +echo +echo "Upgrading proxy to new implementation..." + +cast send --rpc-url "$FIL_CALIBNET_RPC_URL" --private-key "$FIL_CALIBNET_PRIVATE_KEY" --nonce $NONCE --chain-id "$CHAIN_ID" "$PROXY_ADDRESS" "upgradeToAndCall(address,bytes)" "$PDP_VERIFIER_ADDRESS_2" "0x" +NONCE=$(expr $NONCE + "1") + +echo "✓ Upgrade transaction submitted" + +# Verify the upgrade +echo "Verifying new implementation..." +sleep 35 +NEW_IMPLEMENTATION_ADDRESS=$(cast storage --rpc-url "$FIL_CALIBNET_RPC_URL" "$PROXY_ADDRESS" "$IMPLEMENTATION_SLOT") + +if [ "${NEW_IMPLEMENTATION_ADDRESS,,}" != "${PDP_VERIFIER_ADDRESS_2,,}" ]; then + echo "failed to upgrade implementation" + echo "Expected new implementation to be: ${PDP_VERIFIER_ADDRESS_2}" + echo "Got: ${NEW_IMPLEMENTATION_ADDRESS}" + exit 1 +fi + +echo "✓ Proxy upgraded successfully to ${PDP_VERIFIER_ADDRESS_2}" +echo + +##################################### +# 7. Transfer ownership # +##################################### +echo +echo "Transferring ownership to new owner..." + +cast send --rpc-url "$FIL_CALIBNET_RPC_URL" --private-key "$FIL_CALIBNET_PRIVATE_KEY" --nonce $NONCE --chain-id "$CHAIN_ID" "$PROXY_ADDRESS" "transferOwnership(address)" "$NEW_OWNER" +NONCE=$(expr $NONCE + "1") + +echo "✓ Ownership transfer transaction submitted" + +# Verify the ownership transfer +echo "Verifying new owner..." +NEW_OWNER_ADDRESS=$( + cast call \ + --rpc-url "$FIL_CALIBNET_RPC_URL" \ + "$PROXY_ADDRESS" \ + "owner()(address)" +) + +if [ "${NEW_OWNER_ADDRESS,,}" != "${NEW_OWNER,,}" ]; then + echo "failed to transfer ownership" + echo "Expected new owner to be: ${NEW_OWNER}" + echo "Got: ${NEW_OWNER_ADDRESS}" + exit 1 +fi + +echo "✓ Ownership transferred successfully to ${NEW_OWNER}" +echo diff --git a/packages/pdp/tools/find.sh b/packages/pdp/tools/find.sh new file mode 100755 index 00000000..9895499e --- /dev/null +++ b/packages/pdp/tools/find.sh @@ -0,0 +1,6 @@ +#! /bin/bash +# Usage: ./find.sh +# input-list is a comma separated list of uint256s representing leaf indices to search for +# Example: ./find.sh 0x067fd08940ba732C25c44423005D662BF95e6763 0 '[100,200]' +findCallData=$(cast calldata "findPieceIds(uint256,uint256[])((uint256,uint256)[])" $2 $3) +cast send --keystore $KEYSTORE --password "$PASSWORD" --rpc-url $RPC_URL $1 $findCallData diff --git a/packages/pdp/tools/propose-owner.sh b/packages/pdp/tools/propose-owner.sh new file mode 100755 index 00000000..6a13838f --- /dev/null +++ b/packages/pdp/tools/propose-owner.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# propose_owner.sh - Script for proposing a new owner for a data set + +# Check if correct number of arguments provided +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + exit 1 +fi + +# Get arguments +DATA_SET_ID=$1 +NEW_OWNER_ADDRESS=$2 + +# Check required environment variables +if [ -z "$PASSWORD" ] || [ -z "$KEYSTORE" ] || [ -z "$RPC_URL" ] || [ -z "$CONTRACT_ADDRESS" ]; then + echo "Error: Missing required environment variables." + echo "Please set PASSWORD, KEYSTORE, RPC_URL, and CONTRACT_ADDRESS." + exit 1 +fi + +echo "Proposing new owner for data set ID: $DATA_SET_ID" +echo "New owner address: $NEW_OWNER_ADDRESS" + +# Get sender's address from keystore +SENDER_ADDRESS=$(cast wallet address --keystore "$KEYSTORE") +echo "Current owner address: $SENDER_ADDRESS" + +# Construct calldata using cast calldata +CALLDATA=$(cast calldata "proposeDataSetStorageProvider(uint256,address)" "$DATA_SET_ID" "$NEW_OWNER_ADDRESS") + +echo "Sending transaction..." + +# Send transaction +TX_HASH=$(cast send --rpc-url "$RPC_URL" \ + --keystore "$KEYSTORE" \ + --password "$PASSWORD" \ + "$CONTRACT_ADDRESS" \ + "$CALLDATA") + +echo "Transaction sent! Hash: $TX_HASH" +echo "Successfully proposed $NEW_OWNER_ADDRESS as new owner for data set $DATA_SET_ID" \ No newline at end of file diff --git a/packages/pdp/tools/remove.sh b/packages/pdp/tools/remove.sh new file mode 100755 index 00000000..7f1e83a3 --- /dev/null +++ b/packages/pdp/tools/remove.sh @@ -0,0 +1,5 @@ +#! /bin/bash +# Usage: ./remove.sh +# input-list is a comma separated list of uint256s representing piece ids to remove +removeCallData=$(cast calldata "removePieces(uint256,uint256[])(uint256)" $2 $3) +cast send --keystore $KEYSTORE --password "$PASSWORD" --rpc-url $RPC_URL $1 $removeCallData diff --git a/packages/pdp/tools/size.sh b/packages/pdp/tools/size.sh new file mode 100755 index 00000000..222880d3 --- /dev/null +++ b/packages/pdp/tools/size.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Usage: ./size.sh +# Returns the total number of piece ids ever added to the data set + +# Check if required environment variables are set +if [ -z "$RPC_URL" ] || [ -z "$KEYSTORE" ]; then + echo "Error: Please set RPC_URL, KEYSTORE, and PASSWORD environment variables." + exit 1 +fi + +# Check if data set ID is provided +if [ -z "$1" ] || [ -z "$2" ]; then + echo "Usage: " + exit 1 +fi + +CONTRACT_ADDRESS=$1 +DATA_SET_ID=$2 + +# Create the calldata for getDataSetLeafCount(uint256) +CALLDATA=$(cast calldata "getNextPieceId(uint256)" $DATA_SET_ID) + +# Call the contract and get the data set size +DATA_SET_SIZE=$(cast call --keystore $KEYSTORE --password "$PASSWORD" --rpc-url $RPC_URL $CONTRACT_ADDRESS $CALLDATA) +# Remove the "0x" prefix and convert the hexadecimal output to a decimal integer +DATA_SET_SIZE=$(echo $DATA_SET_SIZE | xargs printf "%d\n") + +echo "Data set size: $DATA_SET_SIZE" \ No newline at end of file diff --git a/packages/pdp/tools/testBurnFee.sh b/packages/pdp/tools/testBurnFee.sh new file mode 100644 index 00000000..9a1c538e --- /dev/null +++ b/packages/pdp/tools/testBurnFee.sh @@ -0,0 +1,43 @@ +#! /bin/bash +# deploy-devnet deploys the PDP service contract and all auxillary contracts to a filecoin devnet +# Assumption: KEYSTORE, PASSWORD, RPC_URL env vars are set to an appropriate eth keystore path and password +# and to a valid RPC_URL for the devnet. +# Assumption: forge, cast, lotus, jq are in the PATH +# +echo "Deploying To Test Burn Fee" + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +# Send funds from default to keystore address +# assumes lotus binary in path +clientAddr=$(cat $KEYSTORE | jq '.address' | sed -e 's/\"//g') +echo "Sending funds to $clientAddr" +lotus send $clientAddr 10000 + +# Deploy PDP service contract +echo "Deploying PDP service" +# Parse the output of forge create to extract the contract address +PDP_SERVICE_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --compiler-version 0.8.23 --chain-id 31415926 contracts/src/PDPService.sol:PDPService --constructor-args 3 | grep "Deployed to" | awk '{print $3}') + +if [ -z "$PDP_SERVICE_ADDRESS" ]; then + echo "Error: Failed to extract PDP service contract address" + exit 1 +fi + +echo "PDP service deployed at: $PDP_SERVICE_ADDRESS" + +echo "Executing burnFee function" + +# Create the calldata for burnFee() +CALLDATA=$(cast calldata "burnFee(uint256 amount)" 1) + +# Send the transaction +cast send --keystore $KEYSTORE --password "$PASSWORD" --rpc-url $RPC_URL $PDP_SERVICE_ADDRESS $CALLDATA --value 1 diff --git a/packages/pdp/tools/transfer-owner.sh b/packages/pdp/tools/transfer-owner.sh new file mode 100755 index 00000000..bebe3cd0 --- /dev/null +++ b/packages/pdp/tools/transfer-owner.sh @@ -0,0 +1,65 @@ +#!/bin/bash +set -euo pipefail + +##################################### +# Environment variables & defaults # +##################################### + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +if [ -z "$CONTRACT_ADDRESS" ]; then + echo "Error: CONTRACT_ADDRESS is not set" + exit 1 +fi + +if [ -z "$NEW_OWNER" ]; then + echo "Error: NEW_OWNER is not set" + exit 1 +fi + +##################################### +# Setup # +##################################### +echo "Using keystore for authentication..." +ADDR=$(cast wallet address --keystore "$KEYSTORE" --password "$PASSWORD") +NONCE="$(cast nonce --rpc-url "$RPC_URL" "$ADDR")" +echo "Deployer address: $ADDR" +echo + +##################################### +# Transfer ownership # +##################################### +echo "Transferring ownership to new owner..." +echo "Proxy address: $CONTRACT_ADDRESS" +echo "New owner: $NEW_OWNER" + +cast send --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --nonce $NONCE "$CONTRACT_ADDRESS" "transferOwnership(address)" "$NEW_OWNER" + +echo "✓ Ownership transfer transaction submitted" + +# Verify the ownership transfer +echo "Verifying new owner..." +NEW_OWNER_ADDRESS=$( + cast call \ + --rpc-url "$RPC_URL" \ + "$CONTRACT_ADDRESS" \ + "owner()(address)" +) + +if [ "${NEW_OWNER_ADDRESS,,}" != "${NEW_OWNER,,}" ]; then + echo "Failed to transfer ownership" + echo "Expected new owner to be: ${NEW_OWNER}" + echo "Got: ${NEW_OWNER_ADDRESS}" + exit 1 +fi + +echo "✓ Ownership transferred successfully to ${NEW_OWNER}" +echo \ No newline at end of file diff --git a/packages/pdp/tools/upgrade-contract.sh b/packages/pdp/tools/upgrade-contract.sh new file mode 100755 index 00000000..df1b33d5 --- /dev/null +++ b/packages/pdp/tools/upgrade-contract.sh @@ -0,0 +1,90 @@ +#! /bin/bash +# upgrade-contract upgrades proxy at $PROXY_ADDRESS to a new deployment of the implementation +# of the contract at $IMPLEMENTATION_PATH (i.e. src/PDPService.sol:PDPService / src/PDPRecordKeeper.sol:PDPRecordKeeper) +# Assumption: KEYSTORE, PASSWORD, RPC_URL env vars are set to an appropriate eth keystore path and password +# and to a valid RPC_URL for the target network. +# Assumption: forge, cast, jq are in the PATH +# +# Set DRY_RUN=false to actually deploy and broadcast transactions (default is dry-run for safety) +DRY_RUN=${DRY_RUN:-true} + +if [ "$DRY_RUN" = "true" ]; then + echo "🧪 Running in DRY-RUN mode - simulation only, no actual deployment" +else + echo "🚀 Running in DEPLOYMENT mode - will actually deploy and upgrade contracts" +fi + +echo "Upgrading contract" + +if [ -z "$RPC_URL" ]; then + echo "Error: RPC_URL is not set" + exit 1 +fi + +if [ -z "$CHAIN_ID" ]; then + CHAIN_ID=$(cast chain-id --rpc-url "$RPC_URL") + if [ -z "$CHAIN_ID" ]; then + echo "Error: Failed to detect chain ID from RPC" + exit 1 + fi +fi + +if [ -z "$KEYSTORE" ]; then + echo "Error: KEYSTORE is not set" + exit 1 +fi + +if [ -z "$PROXY_ADDRESS" ]; then + echo "Error: PROXY_ADDRESS is not set" + exit 1 +fi + +if [ -z "$UPGRADE_DATA" ]; then + echo "Error: UPGRADE_DATA is not set" + exit 1 +fi + +if [ -z "$IMPLEMENTATION_PATH" ]; then + echo "Error: IMPLEMENTATION_PATH is not set (i.e. src/PDPService.sol:PDPService)" + exit 1 +fi + +if [ "$DRY_RUN" = "true" ]; then + echo "🔍 Simulating deployment of new $IMPLEMENTATION_PATH implementation contract" + forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --compiler-version 0.8.23 --chain-id "$CHAIN_ID" "$IMPLEMENTATION_PATH" + + if [ $? -eq 0 ]; then + echo "✅ Contract compilation and simulation successful!" + echo "🔍 Simulating proxy upgrade at $PROXY_ADDRESS" + echo " - Would call: upgradeToAndCall(address,bytes)" + echo " - With upgrade data: $UPGRADE_DATA" + echo "✅ Dry run completed successfully!" + echo "" + echo "To perform actual deployment, run with: DRY_RUN=false ./tools/upgrade-contract.sh" + else + echo "❌ Contract compilation failed during simulation" + exit 1 + fi +else + echo "🚀 Deploying new $IMPLEMENTATION_PATH implementation contract" + # Parse the output of forge create to extract the contract address + IMPLEMENTATION_ADDRESS=$(forge create --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --broadcast --compiler-version 0.8.23 --chain-id "$CHAIN_ID" "$IMPLEMENTATION_PATH" | grep "Deployed to" | awk '{print $3}') + + if [ -z "$IMPLEMENTATION_ADDRESS" ]; then + echo "❌ Error: Failed to extract PDP verifier contract address" + exit 1 + fi + echo "✅ $IMPLEMENTATION_PATH implementation deployed at: $IMPLEMENTATION_ADDRESS" + + echo "🔄 Upgrading proxy at $PROXY_ADDRESS" + cast send --rpc-url "$RPC_URL" --keystore "$KEYSTORE" --password "$PASSWORD" --chain-id "$CHAIN_ID" "$PROXY_ADDRESS" "upgradeToAndCall(address,bytes)" "$IMPLEMENTATION_ADDRESS" "$UPGRADE_DATA" + + if [ $? -eq 0 ]; then + echo "✅ Contract upgrade completed successfully!" + echo "📄 You can verify the upgrade by checking the VERSION:" + echo " cast call $PROXY_ADDRESS \"VERSION()\" --rpc-url $RPC_URL | cast --to-ascii" + else + echo "❌ Contract upgrade failed" + exit 1 + fi +fi diff --git a/packages/session-key-registry/package.json b/packages/session-key-registry/package.json new file mode 100644 index 00000000..e16a7118 --- /dev/null +++ b/packages/session-key-registry/package.json @@ -0,0 +1,30 @@ +{ + "name": "@filoz/session-key-registry", + "version": "1.0.0", + "description": "Filecoin Session Key Registry - Smart contracts for managing session keys", + "main": "src/index.js", + "files": [ + "src/**/*.sol", + "abi/**/*.json" + ], + "scripts": { + "build": "echo 'Use root forge build'", + "test": "echo 'Use root forge test'", + "clean": "echo 'Use root forge clean'", + "lint": "echo 'Use root forge fmt --check'", + "lint:fix": "echo 'Use root forge fmt'" + }, + "repository": { + "type": "git", + "url": "https://github.com/FilOzone/SessionKeyRegistry.git" + }, + "keywords": [ + "filecoin", + "session-keys", + "registry", + "solidity", + "smart-contracts" + ], + "author": "FilOzone", + "license": "Apache-2.0 OR MIT" +} diff --git a/packages/session-key-registry/src/SessionKeyRegistry.sol b/packages/session-key-registry/src/SessionKeyRegistry.sol new file mode 100644 index 00000000..801cae27 --- /dev/null +++ b/packages/session-key-registry/src/SessionKeyRegistry.sol @@ -0,0 +1,59 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.30; + +contract SessionKeyRegistry { + mapping(address user => mapping(address signer => mapping(bytes32 permission => uint256))) public + authorizationExpiry; + + event AuthorizationsUpdated( + address indexed identity, address signer, uint256 expiry, bytes32[] permissions, string origin + ); + + function _setAuthorizations(address signer, uint256 expiry, bytes32[] calldata permissions, string calldata origin) + internal + { + mapping(bytes32 => uint256) storage permissionExpiry = authorizationExpiry[msg.sender][signer]; + for (uint256 i = 0; i < permissions.length; i++) { + permissionExpiry[permissions[i]] = expiry; + } + emit AuthorizationsUpdated(msg.sender, signer, expiry, permissions, origin); + } + + /** + * @notice Caller revokes from the signer the specified permissions + * @param signer the authorized account + * @param permissions the scope of authority to revoke from the signer + * @param origin indicates what app prompted this revocation + */ + function revoke(address signer, bytes32[] calldata permissions, string calldata origin) external { + _setAuthorizations(signer, 0, permissions, origin); + } + + /** + * @notice Caller authorizes the signer with permissions until expiry + * @param signer the account authorized + * @param expiry when the authorization ends + * @param permissions the scope of authority granted to the signer + * @param origin indicates what app prompted this authorization + */ + function login(address signer, uint256 expiry, bytes32[] calldata permissions, string calldata origin) external { + _setAuthorizations(signer, expiry, permissions, origin); + } + + /** + * @notice Caller funds and authorizes the signer with permissions until expiry + * @param signer the account authorized + * @param expiry when the authorization ends + * @param permissions the scope of authority granted to the signer + * @param origin indicates what app prompted this authorization + */ + function loginAndFund( + address payable signer, + uint256 expiry, + bytes32[] calldata permissions, + string calldata origin + ) external payable { + _setAuthorizations(signer, expiry, permissions, origin); + signer.transfer(msg.value); + } +} diff --git a/packages/session-key-registry/test/SessionKeyRegistry.t.sol b/packages/session-key-registry/test/SessionKeyRegistry.t.sol new file mode 100644 index 00000000..f867e6ab --- /dev/null +++ b/packages/session-key-registry/test/SessionKeyRegistry.t.sol @@ -0,0 +1,74 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT +pragma solidity ^0.8.30; + +import {Test} from "forge-std/Test.sol"; +import {SessionKeyRegistry} from "../src/SessionKeyRegistry.sol"; + +contract SessionKeyRegistryTest is Test { + SessionKeyRegistry registry = new SessionKeyRegistry(); + + address payable constant SIGNER_ONE = payable(0x1111111111111111111111111111111111111111); + address payable constant SIGNER_TWO = payable(0x2222222222222222222222222222222222222222); + bytes32 private constant PERMISSION1 = 0x1111111111111111111111111111111111111111111111111111111111111111; + bytes32 private constant PERMISSION2 = 0x2222222222222222222222222222222222222222222222222222222222222222; + bytes32 private constant PERMISSION3 = 0x3333333333333333333333333333333333333333333333333333333333333333; + string constant ORIGIN = "SessionKeyRegistryTest"; + + uint256 private constant DAY_SECONDS = 1 days; + + function test_loginAndFund() public { + bytes32[] memory permissions = new bytes32[](3); + permissions[0] = PERMISSION1; + permissions[1] = PERMISSION2; + permissions[2] = PERMISSION3; + + assertEq(SIGNER_ONE.balance, 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION1), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION2), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION3), 0); + + uint256 expiry = block.timestamp + DAY_SECONDS; + vm.expectEmit(true, false, false, true, address(registry)); + emit SessionKeyRegistry.AuthorizationsUpdated(address(this), SIGNER_ONE, expiry, permissions, ORIGIN); + registry.loginAndFund{value: 1 ether}(SIGNER_ONE, expiry, permissions, ORIGIN); + + assertEq(SIGNER_ONE.balance, 1 ether); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION1), expiry); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION2), expiry); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION3), expiry); + + vm.expectEmit(true, false, false, true, address(registry)); + emit SessionKeyRegistry.AuthorizationsUpdated(address(this), SIGNER_ONE, 0, permissions, ORIGIN); + registry.revoke(SIGNER_ONE, permissions, ORIGIN); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION1), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION2), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_ONE, PERMISSION3), 0); + } + + function test_login() public { + bytes32[] memory permissions = new bytes32[](2); + permissions[0] = PERMISSION3; + permissions[1] = PERMISSION1; + + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION1), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION2), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION3), 0); + + uint256 expiry = block.timestamp + 4 * DAY_SECONDS; + + vm.expectEmit(true, false, false, true, address(registry)); + emit SessionKeyRegistry.AuthorizationsUpdated(address(this), SIGNER_TWO, expiry, permissions, ORIGIN); + registry.login(SIGNER_TWO, expiry, permissions, ORIGIN); + + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION1), expiry); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION2), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION3), expiry); + + vm.expectEmit(true, false, false, true, address(registry)); + emit SessionKeyRegistry.AuthorizationsUpdated(address(this), SIGNER_TWO, 0, permissions, ORIGIN); + registry.revoke(SIGNER_TWO, permissions, ORIGIN); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION1), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION2), 0); + assertEq(registry.authorizationExpiry(address(this), SIGNER_TWO, PERMISSION3), 0); + } +} diff --git a/packages/warm-storage/package.json b/packages/warm-storage/package.json new file mode 100644 index 00000000..e94044be --- /dev/null +++ b/packages/warm-storage/package.json @@ -0,0 +1,31 @@ +{ + "name": "@filoz/warm-storage", + "version": "1.0.0", + "description": "Filecoin Warm Storage Service - Comprehensive service contract combining PDP verification with integrated payment rails", + "main": "src/index.js", + "files": [ + "src/**/*.sol", + "abi/**/*.json" + ], + "scripts": { + "build": "echo 'Use root forge build'", + "test": "echo 'Use root forge test'", + "clean": "echo 'Use root forge clean'", + "lint": "echo 'Use root forge fmt --check'", + "lint:fix": "echo 'Use root forge fmt'" + }, + "repository": { + "type": "git", + "url": "https://github.com/FilOzone/filecoin-services.git" + }, + "keywords": [ + "filecoin", + "warm-storage", + "pdp", + "payments", + "solidity", + "smart-contracts" + ], + "author": "FilOzone", + "license": "Apache-2.0 OR MIT" +} diff --git a/service_contracts/src/Errors.sol b/packages/warm-storage/src/Errors.sol similarity index 100% rename from service_contracts/src/Errors.sol rename to packages/warm-storage/src/Errors.sol diff --git a/service_contracts/src/Extsload.sol b/packages/warm-storage/src/Extsload.sol similarity index 100% rename from service_contracts/src/Extsload.sol rename to packages/warm-storage/src/Extsload.sol diff --git a/service_contracts/src/FilecoinWarmStorageService.sol b/packages/warm-storage/src/FilecoinWarmStorageService.sol similarity index 100% rename from service_contracts/src/FilecoinWarmStorageService.sol rename to packages/warm-storage/src/FilecoinWarmStorageService.sol diff --git a/service_contracts/src/FilecoinWarmStorageServiceStateView.sol b/packages/warm-storage/src/FilecoinWarmStorageServiceStateView.sol similarity index 100% rename from service_contracts/src/FilecoinWarmStorageServiceStateView.sol rename to packages/warm-storage/src/FilecoinWarmStorageServiceStateView.sol diff --git a/service_contracts/src/ServiceProviderRegistry.sol b/packages/warm-storage/src/ServiceProviderRegistry.sol similarity index 100% rename from service_contracts/src/ServiceProviderRegistry.sol rename to packages/warm-storage/src/ServiceProviderRegistry.sol diff --git a/service_contracts/src/ServiceProviderRegistryStorage.sol b/packages/warm-storage/src/ServiceProviderRegistryStorage.sol similarity index 100% rename from service_contracts/src/ServiceProviderRegistryStorage.sol rename to packages/warm-storage/src/ServiceProviderRegistryStorage.sol diff --git a/service_contracts/src/lib/FilecoinWarmStorageServiceLayout.sol b/packages/warm-storage/src/lib/FilecoinWarmStorageServiceLayout.sol similarity index 100% rename from service_contracts/src/lib/FilecoinWarmStorageServiceLayout.sol rename to packages/warm-storage/src/lib/FilecoinWarmStorageServiceLayout.sol diff --git a/service_contracts/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol b/packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol similarity index 99% rename from service_contracts/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol rename to packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol index 4b6597b9..3b91f1f8 100644 --- a/service_contracts/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol +++ b/packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol @@ -3,7 +3,7 @@ pragma solidity ^0.8.20; // Code generated - DO NOT EDIT. // This file is a generated binding and any changes will be lost. -// Generated with make src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol +// Generated with make packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol import {Errors} from "../Errors.sol"; import { diff --git a/service_contracts/src/lib/FilecoinWarmStorageServiceStateLibrary.sol b/packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateLibrary.sol similarity index 100% rename from service_contracts/src/lib/FilecoinWarmStorageServiceStateLibrary.sol rename to packages/warm-storage/src/lib/FilecoinWarmStorageServiceStateLibrary.sol diff --git a/service_contracts/src/lib/SignatureVerificationLib.sol b/packages/warm-storage/src/lib/SignatureVerificationLib.sol similarity index 100% rename from service_contracts/src/lib/SignatureVerificationLib.sol rename to packages/warm-storage/src/lib/SignatureVerificationLib.sol diff --git a/service_contracts/test/Extsload.t.sol b/packages/warm-storage/test/Extsload.t.sol similarity index 100% rename from service_contracts/test/Extsload.t.sol rename to packages/warm-storage/test/Extsload.t.sol diff --git a/service_contracts/test/FilecoinWarmStorageService.t.sol b/packages/warm-storage/test/FilecoinWarmStorageService.t.sol similarity index 100% rename from service_contracts/test/FilecoinWarmStorageService.t.sol rename to packages/warm-storage/test/FilecoinWarmStorageService.t.sol diff --git a/service_contracts/test/FilecoinWarmStorageServiceOwner.t.sol b/packages/warm-storage/test/FilecoinWarmStorageServiceOwner.t.sol similarity index 100% rename from service_contracts/test/FilecoinWarmStorageServiceOwner.t.sol rename to packages/warm-storage/test/FilecoinWarmStorageServiceOwner.t.sol diff --git a/service_contracts/test/ProviderValidation.t.sol b/packages/warm-storage/test/ProviderValidation.t.sol similarity index 100% rename from service_contracts/test/ProviderValidation.t.sol rename to packages/warm-storage/test/ProviderValidation.t.sol diff --git a/service_contracts/test/ServiceProviderRegistry.t.sol b/packages/warm-storage/test/ServiceProviderRegistry.t.sol similarity index 100% rename from service_contracts/test/ServiceProviderRegistry.t.sol rename to packages/warm-storage/test/ServiceProviderRegistry.t.sol diff --git a/service_contracts/test/ServiceProviderRegistryFull.t.sol b/packages/warm-storage/test/ServiceProviderRegistryFull.t.sol similarity index 100% rename from service_contracts/test/ServiceProviderRegistryFull.t.sol rename to packages/warm-storage/test/ServiceProviderRegistryFull.t.sol diff --git a/service_contracts/test/ServiceProviderRegistryPagination.t.sol b/packages/warm-storage/test/ServiceProviderRegistryPagination.t.sol similarity index 100% rename from service_contracts/test/ServiceProviderRegistryPagination.t.sol rename to packages/warm-storage/test/ServiceProviderRegistryPagination.t.sol diff --git a/service_contracts/test/SignatureFixtureTest.t.sol b/packages/warm-storage/test/SignatureFixtureTest.t.sol similarity index 99% rename from service_contracts/test/SignatureFixtureTest.t.sol rename to packages/warm-storage/test/SignatureFixtureTest.t.sol index 29f8bf02..1655e1cd 100644 --- a/service_contracts/test/SignatureFixtureTest.t.sol +++ b/packages/warm-storage/test/SignatureFixtureTest.t.sol @@ -340,7 +340,7 @@ contract MetadataSignatureFixturesTest is Test { * @dev Test external signatures against contract verification */ function testExternalSignatures() public view { - string memory json = vm.readFile("./test/external_signatures.json"); + string memory json = vm.readFile("./packages/warm-storage/test/external_signatures.json"); address signer = vm.parseJsonAddress(json, ".signer"); console.log("Testing external signatures for signer:", signer); diff --git a/service_contracts/test/external_signatures.json b/packages/warm-storage/test/external_signatures.json similarity index 100% rename from service_contracts/test/external_signatures.json rename to packages/warm-storage/test/external_signatures.json diff --git a/service_contracts/test/mocks/SharedMocks.sol b/packages/warm-storage/test/mocks/SharedMocks.sol similarity index 100% rename from service_contracts/test/mocks/SharedMocks.sol rename to packages/warm-storage/test/mocks/SharedMocks.sol diff --git a/service_contracts/tools/README.md b/packages/warm-storage/tools/README.md similarity index 100% rename from service_contracts/tools/README.md rename to packages/warm-storage/tools/README.md diff --git a/service_contracts/tools/announce-planned-upgrade.sh b/packages/warm-storage/tools/announce-planned-upgrade.sh similarity index 100% rename from service_contracts/tools/announce-planned-upgrade.sh rename to packages/warm-storage/tools/announce-planned-upgrade.sh diff --git a/service_contracts/tools/check-contract-size.sh b/packages/warm-storage/tools/check-contract-size.sh similarity index 100% rename from service_contracts/tools/check-contract-size.sh rename to packages/warm-storage/tools/check-contract-size.sh diff --git a/service_contracts/tools/compare_contract_sizes.sh b/packages/warm-storage/tools/compare_contract_sizes.sh similarity index 100% rename from service_contracts/tools/compare_contract_sizes.sh rename to packages/warm-storage/tools/compare_contract_sizes.sh diff --git a/service_contracts/tools/create_data_set_with_payments.sh b/packages/warm-storage/tools/create_data_set_with_payments.sh similarity index 100% rename from service_contracts/tools/create_data_set_with_payments.sh rename to packages/warm-storage/tools/create_data_set_with_payments.sh diff --git a/service_contracts/tools/deploy-all-warm-storage.sh b/packages/warm-storage/tools/deploy-all-warm-storage.sh similarity index 100% rename from service_contracts/tools/deploy-all-warm-storage.sh rename to packages/warm-storage/tools/deploy-all-warm-storage.sh diff --git a/service_contracts/tools/deploy-registry-calibnet.sh b/packages/warm-storage/tools/deploy-registry-calibnet.sh similarity index 100% rename from service_contracts/tools/deploy-registry-calibnet.sh rename to packages/warm-storage/tools/deploy-registry-calibnet.sh diff --git a/service_contracts/tools/deploy-session-key-registry.sh b/packages/warm-storage/tools/deploy-session-key-registry.sh similarity index 100% rename from service_contracts/tools/deploy-session-key-registry.sh rename to packages/warm-storage/tools/deploy-session-key-registry.sh diff --git a/service_contracts/tools/deploy-warm-storage-calibnet.sh b/packages/warm-storage/tools/deploy-warm-storage-calibnet.sh similarity index 100% rename from service_contracts/tools/deploy-warm-storage-calibnet.sh rename to packages/warm-storage/tools/deploy-warm-storage-calibnet.sh diff --git a/service_contracts/tools/deploy-warm-storage-implementation-only.sh b/packages/warm-storage/tools/deploy-warm-storage-implementation-only.sh similarity index 100% rename from service_contracts/tools/deploy-warm-storage-implementation-only.sh rename to packages/warm-storage/tools/deploy-warm-storage-implementation-only.sh diff --git a/service_contracts/tools/deploy-warm-storage-view.sh b/packages/warm-storage/tools/deploy-warm-storage-view.sh similarity index 100% rename from service_contracts/tools/deploy-warm-storage-view.sh rename to packages/warm-storage/tools/deploy-warm-storage-view.sh diff --git a/service_contracts/tools/generate_storage_layout.sh b/packages/warm-storage/tools/generate_storage_layout.sh similarity index 100% rename from service_contracts/tools/generate_storage_layout.sh rename to packages/warm-storage/tools/generate_storage_layout.sh diff --git a/service_contracts/tools/generate_view_contract.sh b/packages/warm-storage/tools/generate_view_contract.sh similarity index 100% rename from service_contracts/tools/generate_view_contract.sh rename to packages/warm-storage/tools/generate_view_contract.sh diff --git a/service_contracts/tools/set-warm-storage-view.sh b/packages/warm-storage/tools/set-warm-storage-view.sh similarity index 100% rename from service_contracts/tools/set-warm-storage-view.sh rename to packages/warm-storage/tools/set-warm-storage-view.sh diff --git a/service_contracts/tools/upgrade.sh b/packages/warm-storage/tools/upgrade.sh similarity index 100% rename from service_contracts/tools/upgrade.sh rename to packages/warm-storage/tools/upgrade.sh diff --git a/service_contracts/tools/verify-contracts.sh b/packages/warm-storage/tools/verify-contracts.sh similarity index 100% rename from service_contracts/tools/verify-contracts.sh rename to packages/warm-storage/tools/verify-contracts.sh diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 00000000..446cae58 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,74 @@ +{ + "release-type": "node", + "packages": { + "packages/pay": { + "release-type": "node" + }, + "packages/pdp": { + "release-type": "node" + }, + "packages/session-key-registry": { + "release-type": "node" + } + }, + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "draft": false, + "prerelease": false, + "include-component-in-tag": true, + "include-v-in-tag": true, + "changelog-sections": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "revert", + "section": "Reverts" + }, + { + "type": "docs", + "section": "Documentation", + "hidden": true + }, + { + "type": "style", + "section": "Styles", + "hidden": true + }, + { + "type": "chore", + "section": "Miscellaneous Chores", + "hidden": true + }, + { + "type": "refactor", + "section": "Code Refactoring", + "hidden": true + }, + { + "type": "test", + "section": "Tests", + "hidden": true + }, + { + "type": "build", + "section": "Build System", + "hidden": true + }, + { + "type": "ci", + "section": "Continuous Integration", + "hidden": true + } + ] +} + diff --git a/service_contracts/.gitignore b/service_contracts/.gitignore deleted file mode 100644 index c9a8661d..00000000 --- a/service_contracts/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Compiler files -cache/ -out/ - -# Ignores development broadcast logs -broadcast/ - -# Node modules -node_modules/ - -# Lock files -package-lock.json - -# Ignore IDEs -.idea diff --git a/service_contracts/CONTRIBUTING.md b/service_contracts/CONTRIBUTING.md deleted file mode 100644 index 6630f7c2..00000000 --- a/service_contracts/CONTRIBUTING.md +++ /dev/null @@ -1,72 +0,0 @@ -# Contributing - -## Setup -After [installing forge](https://getfoundry.sh/introduction/installation/) and [jq](https://jqlang.github.io/jq/), -```sh -git clone git@github.com:FilOzone/filecoin-services.git -cd filecoin-services/service_contracts -make install # Install dependencies -make build # Build contracts -make gen # Generate code files -``` - -### Setup git hooks -To add a hook to run `forge fmt --check` on `git commit`: -```sh -cd $(git rev-parse --show-toplevel) -echo 'forge fmt --root $(git rev-parse --show-toplevel)/service_contracts --check || exit 1' > .git/hooks/pre-commit -chmod +x .git/hooks/pre-commit -``` - -## Building - -```sh -make build -# or with via-ir optimization: -forge build --via-ir -``` - -## Testing -```sh -make test -# or for more verbose output: -forge test -vvv -``` - -## Formatting -```sh -make fmt # Format code -make fmt-check # Check formatting -``` - -## Developing `view` methods for `extsload` - -Use `extsload` and `extsloadStruct` to make data `public`. - -New `view` methods should be added to `FilecoinWarmStorageServiceStateLibrary`. - -### Regenerating Code Files - -When you make changes to the storage layout or library, regenerate the necessary files: - -```sh -# Regenerate all files (recommended) -make gen - -# Or regenerate individual files: -make src/lib/FilecoinWarmStorageServiceLayout.sol # Storage layout -make src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol # Internal library -make src/FilecoinWarmStorageServiceStateView.sol # View contract -``` - -**Important Notes:** -- `FilecoinWarmStorageServiceStateInternalLibrary` and `FilecoinWarmStorageServiceStateView` are auto-generated from `FilecoinWarmStorageServiceStateLibrary` -- `FilecoinWarmStorageServiceLayout` is auto-generated from the storage layout of `FilecoinWarmStorageService` -- Always run `make gen` after modifying storage variables or the state library -- Use `make force-gen` if the generated files become corrupted - -### Deploy a new `FilecoinWarmStorageServiceStateView` -```sh -make src/FilecoinWarmStorageServiceStateView.sol -tools/generate_view_contract.sh -``` diff --git a/service_contracts/Makefile b/service_contracts/Makefile deleted file mode 100644 index 0b04f7ab..00000000 --- a/service_contracts/Makefile +++ /dev/null @@ -1,195 +0,0 @@ -# Makefile for Service Contracts - -# Variables -RPC_URL ?= -KEYSTORE ?= -PASSWORD ?= -CHALLENGE_FINALITY ?= - -# Default target -.PHONY: default -default: build test - -# All target including installation -.PHONY: all -all: install build test - -# Install dependencies -.PHONY: install -install: - forge install - -# Generated files -LAYOUT=src/lib/FilecoinWarmStorageServiceLayout.sol -INTERNAL_LIB=src/lib/FilecoinWarmStorageServiceStateInternalLibrary.sol -VIEW_CONTRACT=src/FilecoinWarmStorageServiceStateView.sol -LIBRARY_JSON=out/FilecoinWarmStorageServiceStateLibrary.sol/FilecoinWarmStorageServiceStateLibrary.json - -# Build target -.PHONY: build -build: - forge build --via-ir - -# Storage layout generation -$(LAYOUT): tools/generate_storage_layout.sh src/FilecoinWarmStorageService.sol - $^ | forge fmt -r - > $@ - -# JSON compilation for library (depends on the source library) -$(LIBRARY_JSON): src/lib/FilecoinWarmStorageServiceStateLibrary.sol - forge build --via-ir $^ - -# View contract generation (depends on JSON) -$(VIEW_CONTRACT): tools/generate_view_contract.sh $(LIBRARY_JSON) - $^ | forge fmt -r - > $@ - -# Internal library generation (simple sed transform) -%StateInternalLibrary.sol: %StateLibrary.sol - sed -e 's/public/internal/g' -e 's/StateLibrary/StateInternalLibrary/g' $< | awk 'NR == 4 { print "// Code generated - DO NOT EDIT.\n// This file is a generated binding and any changes will be lost.\n// Generated with make $@\n"} {print}' | forge fmt -r - > $@ - -# Main code generation target with proper dependencies -.PHONY: gen -gen: check-tools $(LAYOUT) $(INTERNAL_LIB) $(VIEW_CONTRACT) - @echo "Code generation complete" - -# Force regeneration - useful when things are broken -.PHONY: force-gen -force-gen: clean-gen gen - @echo "Force regeneration complete" - -# Clean generated files only -.PHONY: clean-gen -clean-gen: - @echo "Removing generated files..." - @rm -f $(LAYOUT) $(INTERNAL_LIB) $(VIEW_CONTRACT) - @rm -rf out/FilecoinWarmStorageServiceStateLibrary.sol - @echo "Generated files removed" - -# Check required tools -.PHONY: check-tools -check-tools: - @which jq >/dev/null 2>&1 || (echo "Error: jq is required but not installed" && exit 1) - @JQ_VERSION=$$(jq --version 2>/dev/null | sed 's/jq-//'); \ - MAJOR=$$(echo $$JQ_VERSION | cut -d. -f1); \ - MINOR=$$(echo $$JQ_VERSION | cut -d. -f2); \ - if [ "$$MAJOR" -lt 1 ] || ([ "$$MAJOR" -eq 1 ] && [ "$$MINOR" -lt 7 ]); then \ - echo "Warning: jq version $$JQ_VERSION detected. Version 1.7+ recommended for full functionality"; \ - fi - @which forge >/dev/null 2>&1 || (echo "Error: forge is required but not installed" && exit 1) - -# Test target -.PHONY: test -test: - forge test --via-ir -vv - -# Clean build artifacts -.PHONY: clean -clean: - forge clean - rm -rf out cache - -# Clean everything including generated files and ABIs -.PHONY: clean-all -clean-all: clean clean-gen clean-abi - @echo "All artifacts cleaned" - -# Format code -.PHONY: fmt -fmt: - forge fmt - -# Check formatting -.PHONY: fmt-check -fmt-check: - forge fmt --check - -# Coverage -# Note: Using --ir-minimum due to "stack too deep" errors in Payments.sol -# This may result in less accurate source mappings but is necessary for coverage to run -.PHONY: coverage -coverage: - @echo "Running coverage with --ir-minimum (required due to stack depth issues)..." - forge coverage --ir-minimum --report summary - -# Coverage with LCOV report (for CI) -.PHONY: coverage-lcov -coverage-lcov: - @echo "Generating LCOV coverage report with --ir-minimum..." - forge coverage --ir-minimum --report lcov - -.PHONY: contract-size-check -contract-size-check: - @echo "Checking contract sizes..." - bash tools/check-contract-size.sh src/ - -# ABI Management - -# Core contracts we publish ABIs for -ABI_CONTRACTS := \ - FilecoinWarmStorageService \ - FilecoinWarmStorageServiceStateLibrary \ - FilecoinWarmStorageServiceStateView \ - Payments \ - PDPVerifier \ - ServiceProviderRegistry \ - SessionKeyRegistry - -# Generate ABI file targets -ABI_FILES := $(addprefix abi/,$(addsuffix .abi.json,$(ABI_CONTRACTS))) - -# Define a template for ABI extraction; we use a template approach instead of -# defining a global pattern rule because patterns for files that don't exist -# at parse-time will be rejected by make, so we'll be explicit instead. -define ABI_RULE -abi/$(1).abi.json: out/$(1).sol/$(1).json | abi - @echo "Extracting ABI for $(1)..." - @jq '.abi' $$< > $$@ - -# Mark JSON as coming from build (unless it's the library which has its own rule) -ifneq ($(1),FilecoinWarmStorageServiceStateLibrary) -out/$(1).sol/$(1).json: | build -endif -endef - -# Generate rules for each contract using the template above -$(foreach contract,$(ABI_CONTRACTS),$(eval $(call ABI_RULE,$(contract)))) - -# Directory for ABIs -abi: - @mkdir -p abi - -# Update ABIs -.PHONY: update-abi -update-abi: $(ABI_FILES) - -# Clean just the ABIs -.PHONY: clean-abi -clean-abi: - @rm -rf abi - -# Help target -.PHONY: help -help: - @echo "Available targets:" - @echo " install - Install dependencies (forge and npm)" - @echo " build - Build contracts" - @echo " test - Run tests" - @echo " clean - Clean build artifacts" - @echo " fmt - Format code" - @echo " fmt-check - Check code formatting" - @echo " coverage - Generate test coverage summary (uses --ir-minimum)" - @echo " coverage-lcov - Generate LCOV coverage report for CI" - @echo "" - @echo "Code generation targets:" - @echo " gen - Generate code (layout, internal lib, view contract)" - @echo " force-gen - Clean and regenerate all files (use when broken)" - @echo " clean-gen - Remove all generated files" - @echo "" - @echo " help - Show this help message" - @echo " contract-size-check - Check contract sizes against EIP-170 and EIP-3860 limits" - @echo "" - @echo "ABI management targets:" - @echo " update-abi - Update checked-in ABIs in abi/ directory (incremental)" - @echo " clean-abi - Remove all ABI files" - @echo "" - @echo "Full cleanup:" - @echo " clean-all - Remove all artifacts (build, generated, ABIs)" diff --git a/service_contracts/README.md b/service_contracts/README.md deleted file mode 100644 index 759ca4fb..00000000 --- a/service_contracts/README.md +++ /dev/null @@ -1,139 +0,0 @@ -# Service Contracts - -This directory contains the smart contracts for different Filecoin services using [Filecoin payment service](https://github.com/FilOzone/filecoin-services-payments). - -## Structure - -- `src/` - Contract source files - - `FilecoinWarmStorageService.sol` - A service contract with [PDP](https://github.com/FilOzone/pdp) (Proof of Data Possession) and payment integration - - `FilecoinWarmStorageServiceStateView.sol` - View contract for reading `FilecoinWarmStorageService` with `eth_call`. - - `src/lib` - Library source files - - `FilecoinWarmStorageServiceLayout.sol` - Constants conveying the storage layout of `FilecoinWarmStorageService` - - `FilecoinWarmStorageServiceStateInternalLibrary.sol` - `internal` library for embedding logic to read `FilecoinWarmStorageService` - - `FilecoinWarmStorageServiceStateLibrary.sol` - `public` library for using `delegatecall` to read `FilecoinWarmStorageService` - - `SignatureVerificationLib.sol` - external library with EIP-712 metadata hashing and signature verification -- `test/` - Test files - - `FilecoinWarmStorageService.t.sol` - Tests for the service contract -- `tools/` - Deployment and utility scripts - - `create_data_set_with_payments.sh` - Script to create data sets with payments - - `deploy-warm-storage-calibnet.sh` - Deployment script for Warm Storage service on Calibnet - - `deploy-all-warm-storage-calibnet.sh` - Deployment script for all Warm Storage contracts on Calibnet - - Note: deployment scripts now deploy and link `SignatureVerificationLib` when deploying `FilecoinWarmStorageService`. - The scripts will deploy `src/lib/SignatureVerificationLib.sol` (or simulate it in dry-run) and pass the library address - to `forge create` via the `--libraries` flag so the service implementation is correctly linked. -- `lib/` - Dependencies (git submodules) - - `forge-std` - Foundry standard library - - `openzeppelin-contracts` - OpenZeppelin contracts - - `openzeppelin-contracts-upgradeable` - OpenZeppelin upgradeable contracts - - `fws-payments` - Filecoin Services payments contract - - `pdp` - PDP verifier contract (from main branch) - - -### Extsload -The allow for many view methods within the 24 KiB contract size constraint, viewing is done with `extsload` and `extsloadStruct`. -There are three recommended ways to access `view` methods. - -#### View Contract -To call the view methods off-chain, for example with `eth_call`, use the `FilecoinWarmStorageServiceStateView`: -```sh -forge build -jq .abi out/FilecoinWarmStorageServiceStateView.sol/FilecoinWarmStorageServiceStateView.json -``` - -For example to call `paymentsContractAddress()` on `$WARM_STORAGE_VIEW_ADDRESS`: -```json -{ - "id": 1, - "method": "eth_call", - "params": [ - { - "to": $WARM_STORAGE_VIEW_ADDRESS, - "data": "0xbc471469" - }, - "latest" - ] -} -``` - -`FilecoinWarmStorageServiceStateView` is best for off-chain queries but the following `library` approaches are better for smart contracts. - -#### Internal Library -To embed the view methods you use into your smart contract, use the `FilecoinWarmStorageServiceStateInternalLibrary`: -```solidity - using FilecoinWarmStorageServiceStateInternalLibrary for FilecoinWarmStorageService; -``` - -Compared to other approaches this will use the least gas. - -#### Public Library -For your smart contract to call the view methods with a `delegatecall` into a shared library, use the `FilecoinWarmStorageServiceStateLibrary`: -```solidity - using FilecoinWarmStorageServiceStateLibrary for FilecoinWarmStorageService; -``` - -Compared to other approaches this will have the least codesize. - - -## Contributing -See [CONTRIBUTING.md](./CONTRIBUTING.md) - -### Building - -```bash -make build -# or simply: -make -``` - -### Testing - -```bash -make test -``` - -### Code Generation - -The project includes several auto-generated files. To regenerate them: - -```bash -# Generate all files (layout, internal library, view contract) -make gen - -# Force regeneration if files are corrupted -make force-gen - -# Clean all generated files -make clean-gen -``` - -### ABI Management - -The project maintains checked-in ABI files in the `abi/` directory for use by scripts and external tools: - -```bash -# Update checked-in ABIs after contract changes -make update-abi -``` - -This extracts the ABIs from the compiled contracts and saves them as JSON files: -- `abi/FilecoinWarmStorageService.abi.json` - Main service contract ABI -- `abi/FilecoinWarmStorageServiceStateView.abi.json` - View contract ABI - -These ABIs are used by the code generation scripts in the `gen` target and should be updated whenever contract interfaces change. - -Note: `SignatureVerificationLib.sol` is an external library (public functions); if you rely on its ABI for external tooling or verification, -you may also extract the library ABI via `make update-abi` after compilation. The primary consumer is the service implementation which -is linked at deploy time by the scripts in `tools/`. - -### Dependencies - -The project depends on: -- PDP contracts from https://github.com/FilOzone/pdp.git (main branch) -- Filecoin Services Payments from https://github.com/FilOzone/filecoin-services-payments -- OpenZeppelin contracts (both standard and upgradeable) -- Forge standard library - -All dependencies are managed as git submodules and initialized with: -```bash -git submodule update --init --recursive -``` diff --git a/service_contracts/abi/FilecoinWarmStorageService.abi.json b/service_contracts/abi/FilecoinWarmStorageService.abi.json deleted file mode 100644 index b270af77..00000000 --- a/service_contracts/abi/FilecoinWarmStorageService.abi.json +++ /dev/null @@ -1,2247 +0,0 @@ -[ - { - "type": "constructor", - "inputs": [ - { - "name": "_pdpVerifierAddress", - "type": "address", - "internalType": "address" - }, - { - "name": "_paymentsContractAddress", - "type": "address", - "internalType": "address" - }, - { - "name": "_usdfc", - "type": "address", - "internalType": "contract IERC20Metadata" - }, - { - "name": "_filBeamBeneficiaryAddress", - "type": "address", - "internalType": "address" - }, - { - "name": "_serviceProviderRegistry", - "type": "address", - "internalType": "contract ServiceProviderRegistry" - }, - { - "name": "_sessionKeyRegistry", - "type": "address", - "internalType": "contract SessionKeyRegistry" - } - ], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "UPGRADE_INTERFACE_VERSION", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "VERSION", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "addApprovedProvider", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "announcePlannedUpgrade", - "inputs": [ - { - "name": "plannedUpgrade", - "type": "tuple", - "internalType": "struct FilecoinWarmStorageService.PlannedUpgrade", - "components": [ - { - "name": "nextImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "afterEpoch", - "type": "uint96", - "internalType": "uint96" - } - ] - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "calculateRatesPerEpoch", - "inputs": [ - { - "name": "totalBytes", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "storageRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnRate", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "configureProvingPeriod", - "inputs": [ - { - "name": "_maxProvingPeriod", - "type": "uint64", - "internalType": "uint64" - }, - { - "name": "_challengeWindowSize", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "dataSetCreated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "dataSetDeleted", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "eip712Domain", - "inputs": [], - "outputs": [ - { - "name": "fields", - "type": "bytes1", - "internalType": "bytes1" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "version", - "type": "string", - "internalType": "string" - }, - { - "name": "chainId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "verifyingContract", - "type": "address", - "internalType": "address" - }, - { - "name": "salt", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "extensions", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "extsload", - "inputs": [ - { - "name": "slot", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "outputs": [ - { - "name": "", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "extsloadStruct", - "inputs": [ - { - "name": "slot", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "size", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bytes32[]", - "internalType": "bytes32[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "filBeamBeneficiaryAddress", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getEffectiveRates", - "inputs": [], - "outputs": [ - { - "name": "serviceFee", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "spPayment", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProvingPeriodForEpoch", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "epoch", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getServicePrice", - "inputs": [], - "outputs": [ - { - "name": "pricing", - "type": "tuple", - "internalType": "struct FilecoinWarmStorageService.ServicePricing", - "components": [ - { - "name": "pricePerTiBPerMonthNoCDN", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pricePerTiBPerMonthWithCDN", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "tokenAddress", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "epochsPerMonth", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "initialize", - "inputs": [ - { - "name": "_maxProvingPeriod", - "type": "uint64", - "internalType": "uint64" - }, - { - "name": "_challengeWindowSize", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "_filBeamControllerAddress", - "type": "address", - "internalType": "address" - }, - { - "name": "_name", - "type": "string", - "internalType": "string" - }, - { - "name": "_description", - "type": "string", - "internalType": "string" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "isEpochProven", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "epoch", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "migrate", - "inputs": [ - { - "name": "_viewContract", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "nextProvingPeriod", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "challengeEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "leafCount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "owner", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "paymentsContractAddress", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "pdpVerifierAddress", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "piecesAdded", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "firstAdded", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceData", - "type": "tuple[]", - "internalType": "struct Cids.Cid[]", - "components": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ] - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "piecesScheduledRemove", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceIds", - "type": "uint256[]", - "internalType": "uint256[]" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "possessionProven", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "challengeCount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "proxiableUUID", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "railTerminated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "terminator", - "type": "address", - "internalType": "address" - }, - { - "name": "endEpoch", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "removeApprovedProvider", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "index", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "renounceOwnership", - "inputs": [], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "serviceProviderRegistry", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "contract ServiceProviderRegistry" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "sessionKeyRegistry", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "contract SessionKeyRegistry" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "setViewContract", - "inputs": [ - { - "name": "_viewContract", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "settleFilBeamPaymentRails", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissAmount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "storageProviderChanged", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "oldServiceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "newServiceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "terminateCDNService", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "terminateService", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "topUpCDNPaymentRails", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnAmountToAdd", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissAmountToAdd", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "transferFilBeamController", - "inputs": [ - { - "name": "newController", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "transferOwnership", - "inputs": [ - { - "name": "newOwner", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "updateServiceCommission", - "inputs": [ - { - "name": "newCommissionBps", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "upgradeToAndCall", - "inputs": [ - { - "name": "newImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "usdfcTokenAddress", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "contract IERC20Metadata" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "validatePayment", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "proposedAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "fromEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "toEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "result", - "type": "tuple", - "internalType": "struct IValidator.ValidationResult", - "components": [ - { - "name": "modifiedAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "settleUpto", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "note", - "type": "string", - "internalType": "string" - } - ] - } - ], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "viewContractAddress", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "event", - "name": "CDNPaymentRailsToppedUp", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "cdnAmountAdded", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "totalCdnLockup", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cacheMissAmountAdded", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "totalCacheMissLockup", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "CDNPaymentTerminated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "endEpoch", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "CDNServiceTerminated", - "inputs": [ - { - "name": "caller", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ContractUpgraded", - "inputs": [ - { - "name": "version", - "type": "string", - "indexed": false, - "internalType": "string" - }, - { - "name": "implementation", - "type": "address", - "indexed": false, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "DataSetCreated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "pdpRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "payer", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "serviceProvider", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "metadataKeys", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - }, - { - "name": "metadataValues", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "DataSetServiceProviderChanged", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "oldServiceProvider", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "newServiceProvider", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "EIP712DomainChanged", - "inputs": [], - "anonymous": false - }, - { - "type": "event", - "name": "FaultRecord", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "periodsFaulted", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "deadline", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "FilBeamControllerChanged", - "inputs": [ - { - "name": "oldController", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "newController", - "type": "address", - "indexed": false, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "FilecoinServiceDeployed", - "inputs": [ - { - "name": "name", - "type": "string", - "indexed": false, - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "indexed": false, - "internalType": "string" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "Initialized", - "inputs": [ - { - "name": "version", - "type": "uint64", - "indexed": false, - "internalType": "uint64" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "OwnershipTransferred", - "inputs": [ - { - "name": "previousOwner", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "newOwner", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "PDPPaymentTerminated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "endEpoch", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "pdpRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "PaymentArbitrated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "dataSetId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "originalAmount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "modifiedAmount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "faultedEpochs", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "PieceAdded", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "pieceCid", - "type": "tuple", - "indexed": false, - "internalType": "struct Cids.Cid", - "components": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ] - }, - { - "name": "keys", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - }, - { - "name": "values", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProviderApproved", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProviderUnapproved", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailRateUpdated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "railId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "newRate", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ServiceTerminated", - "inputs": [ - { - "name": "caller", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "dataSetId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "pdpRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "UpgradeAnnounced", - "inputs": [ - { - "name": "plannedUpgrade", - "type": "tuple", - "indexed": false, - "internalType": "struct FilecoinWarmStorageService.PlannedUpgrade", - "components": [ - { - "name": "nextImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "afterEpoch", - "type": "uint96", - "internalType": "uint96" - } - ] - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "Upgraded", - "inputs": [ - { - "name": "implementation", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ViewContractSet", - "inputs": [ - { - "name": "viewContract", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "error", - "name": "AddressAlreadySet", - "inputs": [ - { - "name": "field", - "type": "uint8", - "internalType": "enum Errors.AddressField" - } - ] - }, - { - "type": "error", - "name": "AddressEmptyCode", - "inputs": [ - { - "name": "target", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "CDNPaymentAlreadyTerminated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CacheMissPaymentAlreadyTerminated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CallerNotPayer", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "expectedPayer", - "type": "address", - "internalType": "address" - }, - { - "name": "caller", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "CallerNotPayerOrPayee", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "expectedPayer", - "type": "address", - "internalType": "address" - }, - { - "name": "expectedPayee", - "type": "address", - "internalType": "address" - }, - { - "name": "caller", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "CallerNotPayments", - "inputs": [ - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "actual", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ChallengeWindowTooEarly", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "windowStart", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "nowBlock", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ClientDataSetAlreadyRegistered", - "inputs": [ - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CommissionExceedsMaximum", - "inputs": [ - { - "name": "commissionType", - "type": "uint8", - "internalType": "enum Errors.CommissionType" - }, - { - "name": "max", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actual", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "DataSetNotFoundForRail", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "DataSetNotRegistered", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "DataSetPaymentAlreadyTerminated", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "DataSetPaymentBeyondEndEpoch", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pdpEndEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "currentBlock", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "DivisionByZero", - "inputs": [] - }, - { - "type": "error", - "name": "DuplicateMetadataKey", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ] - }, - { - "type": "error", - "name": "ERC1967InvalidImplementation", - "inputs": [ - { - "name": "implementation", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ERC1967NonPayable", - "inputs": [] - }, - { - "type": "error", - "name": "ExtraDataRequired", - "inputs": [] - }, - { - "type": "error", - "name": "FailedCall", - "inputs": [] - }, - { - "type": "error", - "name": "FilBeamServiceNotConfigured", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidChallengeCount", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "minExpected", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actual", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidChallengeEpoch", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "minAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actual", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidChallengeWindowSize", - "inputs": [ - { - "name": "maxProvingPeriod", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "challengeWindowSize", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidDataSetId", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidEpochRange", - "inputs": [ - { - "name": "fromEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "toEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidInitialization", - "inputs": [] - }, - { - "type": "error", - "name": "InvalidServiceDescriptionLength", - "inputs": [ - { - "name": "length", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidServiceNameLength", - "inputs": [ - { - "name": "length", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidTopUpAmount", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "MaxProvingPeriodZero", - "inputs": [] - }, - { - "type": "error", - "name": "MetadataArrayCountMismatch", - "inputs": [ - { - "name": "metadataArrayCount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceCount", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "MetadataKeyAndValueLengthMismatch", - "inputs": [ - { - "name": "keysLength", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "valuesLength", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "MetadataKeyExceedsMaxLength", - "inputs": [ - { - "name": "index", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "length", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "MetadataValueExceedsMaxLength", - "inputs": [ - { - "name": "index", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "length", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NextProvingPeriodAlreadyCalled", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "periodDeadline", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "nowBlock", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NoPDPPaymentRail", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NotInitializing", - "inputs": [] - }, - { - "type": "error", - "name": "OldServiceProviderMismatch", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "actual", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OnlyFilBeamControllerAllowed", - "inputs": [ - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "actual", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OnlyPDPVerifierAllowed", - "inputs": [ - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "actual", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OwnableInvalidOwner", - "inputs": [ - { - "name": "owner", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OwnableUnauthorizedAccount", - "inputs": [ - { - "name": "account", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "PaymentRailsNotFinalized", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pdpEndEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ProofAlreadySubmitted", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ProviderAlreadyApproved", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ProviderNotApproved", - "inputs": [ - { - "name": "provider", - "type": "address", - "internalType": "address" - }, - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ProviderNotInApprovedList", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ProviderNotRegistered", - "inputs": [ - { - "name": "provider", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ProvingNotStarted", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ProvingPeriodPassed", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "deadline", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "nowBlock", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "RailNotAssociated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ServiceContractMustTerminateRail", - "inputs": [] - }, - { - "type": "error", - "name": "TooManyMetadataKeys", - "inputs": [ - { - "name": "maxAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "keysLength", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "UUPSUnauthorizedCallContext", - "inputs": [] - }, - { - "type": "error", - "name": "UUPSUnsupportedProxiableUUID", - "inputs": [ - { - "name": "slot", - "type": "bytes32", - "internalType": "bytes32" - } - ] - }, - { - "type": "error", - "name": "ZeroAddress", - "inputs": [ - { - "name": "field", - "type": "uint8", - "internalType": "enum Errors.AddressField" - } - ] - } -] diff --git a/service_contracts/abi/FilecoinWarmStorageServiceStateLibrary.abi.json b/service_contracts/abi/FilecoinWarmStorageServiceStateLibrary.abi.json deleted file mode 100644 index 4b43f2b7..00000000 --- a/service_contracts/abi/FilecoinWarmStorageServiceStateLibrary.abi.json +++ /dev/null @@ -1,775 +0,0 @@ -[ - { - "type": "function", - "name": "challengeWindow", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "clientDataSetIds", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "clientDataSets", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "payer", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "dataSetIds", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "filBeamControllerAddress", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getAllDataSetMetadata", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "keys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "values", - "type": "string[]", - "internalType": "string[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getAllPieceMetadata", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "keys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "values", - "type": "string[]", - "internalType": "string[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getApprovedProviders", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "providerIds", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getApprovedProvidersLength", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "count", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getChallengesPerProof", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint64", - "internalType": "uint64" - } - ], - "stateMutability": "pure" - }, - { - "type": "function", - "name": "getClientDataSets", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "client", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "infos", - "type": "tuple[]", - "internalType": "struct FilecoinWarmStorageService.DataSetInfoView[]", - "components": [ - { - "name": "pdpRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "commissionBps", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pdpEndEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSet", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "info", - "type": "tuple", - "internalType": "struct FilecoinWarmStorageService.DataSetInfoView", - "components": [ - { - "name": "pdpRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "commissionBps", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pdpEndEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetMetadata", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ], - "outputs": [ - { - "name": "exists", - "type": "bool", - "internalType": "bool" - }, - { - "name": "value", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetSizeInBytes", - "inputs": [ - { - "name": "leafCount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "pure" - }, - { - "type": "function", - "name": "getDataSetStatus", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "status", - "type": "FilecoinWarmStorageService.DataSetStatus", - "internalType": "enum FilecoinWarmStorageService.DataSetStatus" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getMaxProvingPeriod", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "", - "type": "uint64", - "internalType": "uint64" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPDPConfig", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "maxProvingPeriod", - "type": "uint64", - "internalType": "uint64" - }, - { - "name": "challengeWindowSize", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "challengesPerProof", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "initChallengeWindowStart", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPieceMetadata", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ], - "outputs": [ - { - "name": "exists", - "type": "bool", - "internalType": "bool" - }, - { - "name": "value", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "isProviderApproved", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "nextPDPChallengeWindowStart", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "nextUpgrade", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "nextImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "afterEpoch", - "type": "uint96", - "internalType": "uint96" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provenPeriods", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "periodId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provenThisPeriod", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provingActivationEpoch", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provingDeadline", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "railToDataSet", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - }, - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "serviceCommissionBps", - "inputs": [ - { - "name": "service", - "type": "FilecoinWarmStorageService", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "error", - "name": "ProvingPeriodNotInitialized", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - } -] diff --git a/service_contracts/abi/FilecoinWarmStorageServiceStateView.abi.json b/service_contracts/abi/FilecoinWarmStorageServiceStateView.abi.json deleted file mode 100644 index 0e202aea..00000000 --- a/service_contracts/abi/FilecoinWarmStorageServiceStateView.abi.json +++ /dev/null @@ -1,672 +0,0 @@ -[ - { - "type": "constructor", - "inputs": [ - { - "name": "_service", - "type": "address", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "challengeWindow", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "clientDataSetIds", - "inputs": [ - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "clientDataSets", - "inputs": [ - { - "name": "payer", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "dataSetIds", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "filBeamControllerAddress", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getAllDataSetMetadata", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "keys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "values", - "type": "string[]", - "internalType": "string[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getAllPieceMetadata", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "keys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "values", - "type": "string[]", - "internalType": "string[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getApprovedProviders", - "inputs": [ - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "providerIds", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getApprovedProvidersLength", - "inputs": [], - "outputs": [ - { - "name": "count", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getChallengesPerProof", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint64", - "internalType": "uint64" - } - ], - "stateMutability": "pure" - }, - { - "type": "function", - "name": "getClientDataSets", - "inputs": [ - { - "name": "client", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "infos", - "type": "tuple[]", - "internalType": "struct FilecoinWarmStorageService.DataSetInfoView[]", - "components": [ - { - "name": "pdpRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "commissionBps", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pdpEndEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSet", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "info", - "type": "tuple", - "internalType": "struct FilecoinWarmStorageService.DataSetInfoView", - "components": [ - { - "name": "pdpRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cacheMissRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "cdnRailId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "commissionBps", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "clientDataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pdpEndEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetMetadata", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ], - "outputs": [ - { - "name": "exists", - "type": "bool", - "internalType": "bool" - }, - { - "name": "value", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetSizeInBytes", - "inputs": [ - { - "name": "leafCount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "pure" - }, - { - "type": "function", - "name": "getDataSetStatus", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "status", - "type": "uint8", - "internalType": "enum FilecoinWarmStorageService.DataSetStatus" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getMaxProvingPeriod", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint64", - "internalType": "uint64" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPDPConfig", - "inputs": [], - "outputs": [ - { - "name": "maxProvingPeriod", - "type": "uint64", - "internalType": "uint64" - }, - { - "name": "challengeWindowSize", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "challengesPerProof", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "initChallengeWindowStart", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPieceMetadata", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ], - "outputs": [ - { - "name": "exists", - "type": "bool", - "internalType": "bool" - }, - { - "name": "value", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "isProviderApproved", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "nextPDPChallengeWindowStart", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "nextUpgrade", - "inputs": [], - "outputs": [ - { - "name": "nextImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "afterEpoch", - "type": "uint96", - "internalType": "uint96" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provenPeriods", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "periodId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provenThisPeriod", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provingActivationEpoch", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provingDeadline", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "railToDataSet", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "service", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "contract FilecoinWarmStorageService" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "serviceCommissionBps", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "error", - "name": "ProvingPeriodNotInitialized", - "inputs": [ - { - "name": "dataSetId", - "type": "uint256", - "internalType": "uint256" - } - ] - } -] diff --git a/service_contracts/abi/PDPVerifier.abi.json b/service_contracts/abi/PDPVerifier.abi.json deleted file mode 100644 index dea36e78..00000000 --- a/service_contracts/abi/PDPVerifier.abi.json +++ /dev/null @@ -1,1284 +0,0 @@ -[ - { - "type": "constructor", - "inputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "EXTRA_DATA_MAX_SIZE", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "MAX_ENQUEUED_REMOVALS", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "MAX_PIECE_SIZE_LOG2", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "NO_CHALLENGE_SCHEDULED", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "NO_PROVEN_EPOCH", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "UPGRADE_INTERFACE_VERSION", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "VERSION", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "addPieces", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "listenerAddr", - "type": "address", - "internalType": "address" - }, - { - "name": "pieceData", - "type": "tuple[]", - "internalType": "struct Cids.Cid[]", - "components": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ] - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "calculateProofFee", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "calculateProofFeeForSize", - "inputs": [ - { - "name": "rawSize", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "claimDataSetStorageProvider", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "createDataSet", - "inputs": [ - { - "name": "listenerAddr", - "type": "address", - "internalType": "address" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "dataSetLive", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "deleteDataSet", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "feeEffectiveTime", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint64", - "internalType": "uint64" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "feePerTiB", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint96", - "internalType": "uint96" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "findPieceIds", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "leafIndexs", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "outputs": [ - { - "name": "", - "type": "tuple[]", - "internalType": "struct IPDPTypes.PieceIdAndOffset[]", - "components": [ - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getActivePieceCount", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "activeCount", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getActivePieces", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "pieces", - "type": "tuple[]", - "internalType": "struct Cids.Cid[]", - "components": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ] - }, - { - "name": "pieceIds", - "type": "uint256[]", - "internalType": "uint256[]" - }, - { - "name": "rawSizes", - "type": "uint256[]", - "internalType": "uint256[]" - }, - { - "name": "hasMore", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getChallengeFinality", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getChallengeRange", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetLastProvenEpoch", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetLeafCount", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetListener", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getDataSetStorageProvider", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - }, - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getNextChallengeEpoch", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getNextDataSetId", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint64", - "internalType": "uint64" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getNextPieceId", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPieceCid", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "tuple", - "internalType": "struct Cids.Cid", - "components": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPieceLeafCount", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getRandomness", - "inputs": [ - { - "name": "epoch", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getScheduledRemovals", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "initialize", - "inputs": [ - { - "name": "_challengeFinality", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "migrate", - "inputs": [], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "nextProvingPeriod", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "challengeEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "owner", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "pieceChallengable", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "pieceLive", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "proposeDataSetStorageProvider", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "newStorageProvider", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "proposedFeePerTiB", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint96", - "internalType": "uint96" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "provePossession", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "proofs", - "type": "tuple[]", - "internalType": "struct IPDPTypes.Proof[]", - "components": [ - { - "name": "leaf", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "proof", - "type": "bytes32[]", - "internalType": "bytes32[]" - } - ] - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "proxiableUUID", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "renounceOwnership", - "inputs": [], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "schedulePieceDeletions", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "pieceIds", - "type": "uint256[]", - "internalType": "uint256[]" - }, - { - "name": "extraData", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "transferOwnership", - "inputs": [ - { - "name": "newOwner", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "updateProofFee", - "inputs": [ - { - "name": "newFeePerTiB", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "upgradeToAndCall", - "inputs": [ - { - "name": "newImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "event", - "name": "ContractUpgraded", - "inputs": [ - { - "name": "version", - "type": "string", - "indexed": false, - "internalType": "string" - }, - { - "name": "implementation", - "type": "address", - "indexed": false, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "DataSetCreated", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "storageProvider", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "DataSetDeleted", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "deletedLeafCount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "DataSetEmpty", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "FeeUpdateProposed", - "inputs": [ - { - "name": "currentFee", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "newFee", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "effectiveTime", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "Initialized", - "inputs": [ - { - "name": "version", - "type": "uint64", - "indexed": false, - "internalType": "uint64" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "NextProvingPeriod", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "challengeEpoch", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "leafCount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "OwnershipTransferred", - "inputs": [ - { - "name": "previousOwner", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "newOwner", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "PiecesAdded", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "pieceIds", - "type": "uint256[]", - "indexed": false, - "internalType": "uint256[]" - }, - { - "name": "pieceCids", - "type": "tuple[]", - "indexed": false, - "internalType": "struct Cids.Cid[]", - "components": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ] - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "PiecesRemoved", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "pieceIds", - "type": "uint256[]", - "indexed": false, - "internalType": "uint256[]" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "PossessionProven", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "challenges", - "type": "tuple[]", - "indexed": false, - "internalType": "struct IPDPTypes.PieceIdAndOffset[]", - "components": [ - { - "name": "pieceId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - } - ] - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProofFeePaid", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "fee", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "StorageProviderChanged", - "inputs": [ - { - "name": "setId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "oldStorageProvider", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "newStorageProvider", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "Upgraded", - "inputs": [ - { - "name": "implementation", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "error", - "name": "AddressEmptyCode", - "inputs": [ - { - "name": "target", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ERC1967InvalidImplementation", - "inputs": [ - { - "name": "implementation", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ERC1967NonPayable", - "inputs": [] - }, - { - "type": "error", - "name": "FailedCall", - "inputs": [] - }, - { - "type": "error", - "name": "IndexedError", - "inputs": [ - { - "name": "idx", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "msg", - "type": "string", - "internalType": "string" - } - ] - }, - { - "type": "error", - "name": "InvalidInitialization", - "inputs": [] - }, - { - "type": "error", - "name": "NotInitializing", - "inputs": [] - }, - { - "type": "error", - "name": "OwnableInvalidOwner", - "inputs": [ - { - "name": "owner", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OwnableUnauthorizedAccount", - "inputs": [ - { - "name": "account", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "UUPSUnauthorizedCallContext", - "inputs": [] - }, - { - "type": "error", - "name": "UUPSUnsupportedProxiableUUID", - "inputs": [ - { - "name": "slot", - "type": "bytes32", - "internalType": "bytes32" - } - ] - } -] diff --git a/service_contracts/abi/Payments.abi.json b/service_contracts/abi/Payments.abi.json deleted file mode 100644 index 0872557a..00000000 --- a/service_contracts/abi/Payments.abi.json +++ /dev/null @@ -1,2424 +0,0 @@ -[ - { - "type": "constructor", - "inputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "COMMISSION_MAX_BPS", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "NETWORK_FEE_DENOMINATOR", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "NETWORK_FEE_NUMERATOR", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "accounts", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "owner", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "funds", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupCurrent", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupLastSettledAt", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "auctionInfo", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - } - ], - "outputs": [ - { - "name": "startPrice", - "type": "uint88", - "internalType": "uint88" - }, - { - "name": "startTime", - "type": "uint168", - "internalType": "uint168" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "burnForFees", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "recipient", - "type": "address", - "internalType": "address" - }, - { - "name": "requested", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "createRail", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "validator", - "type": "address", - "internalType": "address" - }, - { - "name": "commissionRateBps", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "serviceFeeRecipient", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "deposit", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "depositWithAuthorization", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC3009" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "validAfter", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "validBefore", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "nonce", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "v", - "type": "uint8", - "internalType": "uint8" - }, - { - "name": "r", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "s", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "depositWithAuthorizationAndApproveOperator", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC3009" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "validAfter", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "validBefore", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "nonce", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "v", - "type": "uint8", - "internalType": "uint8" - }, - { - "name": "r", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "s", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "rateAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxLockupPeriod", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "depositWithAuthorizationAndIncreaseOperatorApproval", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC3009" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "validAfter", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "validBefore", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "nonce", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "v", - "type": "uint8", - "internalType": "uint8" - }, - { - "name": "r", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "s", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "rateAllowanceIncrease", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowanceIncrease", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "depositWithPermit", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "deadline", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "v", - "type": "uint8", - "internalType": "uint8" - }, - { - "name": "r", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "s", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "depositWithPermitAndApproveOperator", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "deadline", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "v", - "type": "uint8", - "internalType": "uint8" - }, - { - "name": "r", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "s", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "rateAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxLockupPeriod", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "depositWithPermitAndIncreaseOperatorApproval", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "deadline", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "v", - "type": "uint8", - "internalType": "uint8" - }, - { - "name": "r", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "s", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "rateAllowanceIncrease", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowanceIncrease", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "getAccountInfoIfSettled", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "owner", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "fundedUntilEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "currentFunds", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "availableFunds", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "currentLockupRate", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getRail", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "tuple", - "internalType": "struct Payments.RailView", - "components": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "validator", - "type": "address", - "internalType": "address" - }, - { - "name": "paymentRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupPeriod", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupFixed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "settledUpTo", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "endEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "commissionRateBps", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "serviceFeeRecipient", - "type": "address", - "internalType": "address" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getRailsForPayeeAndToken", - "inputs": [ - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "results", - "type": "tuple[]", - "internalType": "struct Payments.RailInfo[]", - "components": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "isTerminated", - "type": "bool", - "internalType": "bool" - }, - { - "name": "endEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "name": "nextOffset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "total", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getRailsForPayerAndToken", - "inputs": [ - { - "name": "payer", - "type": "address", - "internalType": "address" - }, - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "results", - "type": "tuple[]", - "internalType": "struct Payments.RailInfo[]", - "components": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "isTerminated", - "type": "bool", - "internalType": "bool" - }, - { - "name": "endEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "name": "nextOffset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "total", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getRateChangeQueueSize", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "increaseOperatorApproval", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "rateAllowanceIncrease", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowanceIncrease", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "modifyRailLockup", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "period", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupFixed", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "modifyRailPayment", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "newRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "oneTimePayment", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "operatorApprovals", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "client", - "type": "address", - "internalType": "address" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "isApproved", - "type": "bool", - "internalType": "bool" - }, - { - "name": "rateAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "rateUsage", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupUsage", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxLockupPeriod", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "setOperatorApproval", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "approved", - "type": "bool", - "internalType": "bool" - }, - { - "name": "rateAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupAllowance", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxLockupPeriod", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "settleRail", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "untilEpoch", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "totalSettledAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "totalNetPayeeAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "totalOperatorCommission", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "totalNetworkFee", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "finalSettledEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "note", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "settleTerminatedRailWithoutValidation", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "totalSettledAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "totalNetPayeeAmount", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "totalOperatorCommission", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "totalNetworkFee", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "finalSettledEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "note", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "terminateRail", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "withdraw", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "withdrawTo", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "event", - "name": "AccountLockupSettled", - "inputs": [ - { - "name": "token", - "type": "address", - "indexed": true, - "internalType": "contract IERC20" - }, - { - "name": "owner", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "lockupCurrent", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "lockupRate", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "lockupLastSettledAt", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "DepositRecorded", - "inputs": [ - { - "name": "token", - "type": "address", - "indexed": true, - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "to", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "OperatorApprovalUpdated", - "inputs": [ - { - "name": "token", - "type": "address", - "indexed": true, - "internalType": "contract IERC20" - }, - { - "name": "client", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "operator", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "approved", - "type": "bool", - "indexed": false, - "internalType": "bool" - }, - { - "name": "rateAllowance", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "lockupAllowance", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "maxLockupPeriod", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailCreated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "payer", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "token", - "type": "address", - "indexed": false, - "internalType": "contract IERC20" - }, - { - "name": "operator", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "validator", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "serviceFeeRecipient", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "commissionRateBps", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailFinalized", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailLockupModified", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "oldLockupPeriod", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "newLockupPeriod", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "oldLockupFixed", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "newLockupFixed", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailOneTimePaymentProcessed", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "netPayeeAmount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "operatorCommission", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "networkFee", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailRateModified", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "oldRate", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "newRate", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailSettled", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "totalSettledAmount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "totalNetPayeeAmount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "operatorCommission", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "networkFee", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "settledUpTo", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "RailTerminated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "by", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "endEpoch", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "WithdrawRecorded", - "inputs": [ - { - "name": "token", - "type": "address", - "indexed": true, - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "to", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "error", - "name": "CannotModifyTerminatedRailBeyondEndEpoch", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxSettlementEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "blockNumber", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CannotSettleFutureEpochs", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxAllowedEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CannotSettleTerminatedRailBeforeMaxEpoch", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "requiredBlock", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "currentBlock", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CommissionRateTooHigh", - "inputs": [ - { - "name": "maxAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actual", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "CurrentLockupLessThanOldLockup", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "oldLockup", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "currentLockup", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InsufficientCurrentLockup", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "currentLockup", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupReduction", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InsufficientFundsForOneTimePayment", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "required", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actual", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InsufficientFundsForSettlement", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "available", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "required", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InsufficientLockupForSettlement", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "available", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "required", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InsufficientNativeTokenForBurn", - "inputs": [ - { - "name": "required", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "sent", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InsufficientUnlockedFunds", - "inputs": [ - { - "name": "available", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "requested", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidRateChangeQueueState", - "inputs": [ - { - "name": "nextRateChangeUntilEpoch", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "processedEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "InvalidTerminatedRailModification", - "inputs": [ - { - "name": "actualPeriod", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actualLockupFixed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedPeriod", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedLockupFixed", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupExceedsFundsInvariant", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "account", - "type": "address", - "internalType": "address" - }, - { - "name": "lockupCurrent", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "fundsCurrent", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupFixedIncreaseNotAllowedDueToInsufficientFunds", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "actualLockupFixed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedLockupFixed", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupInconsistencyDuringRailFinalization", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "expectedLockup", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actualLockup", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupNotSettledRateChangeNotAllowed", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "isSettled", - "type": "bool", - "internalType": "bool" - }, - { - "name": "currentRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedRate", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupPeriodChangeNotAllowedDueToInsufficientFunds", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "actualLockupPeriod", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedLockupPeriod", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupPeriodExceedsOperatorMaximum", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - }, - { - "name": "maxAllowedPeriod", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "requestedPeriod", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupRateInconsistent", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "paymentRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "lockupRate", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "LockupRateLessThanOldRate", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "lockupRate", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "oldRate", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "MissingServiceFeeRecipient", - "inputs": [] - }, - { - "type": "error", - "name": "MustSendExactNativeAmount", - "inputs": [ - { - "name": "required", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "sent", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NativeTokenNotAccepted", - "inputs": [ - { - "name": "sent", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NativeTokenNotSupported", - "inputs": [] - }, - { - "type": "error", - "name": "NativeTransferFailed", - "inputs": [ - { - "name": "to", - "type": "address", - "internalType": "address" - }, - { - "name": "amount", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NoProgressInSettlement", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "expectedSettledUpTo", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "actualSettledUpTo", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "NotAuthorizedToTerminateRail", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "allowedClient", - "type": "address", - "internalType": "address" - }, - { - "name": "allowedOperator", - "type": "address", - "internalType": "address" - }, - { - "name": "caller", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OneTimePaymentExceedsLockup", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "available", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "required", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "OnlyRailClientAllowed", - "inputs": [ - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "caller", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OnlyRailOperatorAllowed", - "inputs": [ - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "caller", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OperatorLockupAllowanceExceeded", - "inputs": [ - { - "name": "allowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedUsage", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "OperatorNotApproved", - "inputs": [ - { - "name": "from", - "type": "address", - "internalType": "address" - }, - { - "name": "operator", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OperatorRateAllowanceExceeded", - "inputs": [ - { - "name": "allowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedUsage", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "PRBMath_MulDiv_Overflow", - "inputs": [ - { - "name": "x", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "y", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "denominator", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "PRBMath_UD60x18_Exp2_InputTooBig", - "inputs": [ - { - "name": "x", - "type": "uint256", - "internalType": "UD60x18" - } - ] - }, - { - "type": "error", - "name": "RailAlreadyTerminated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "RailInactiveOrSettled", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "RailNotTerminated", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "RateChangeNotAllowedOnTerminatedRail", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "RateChangeQueueNotEmpty", - "inputs": [ - { - "name": "nextUntilEpoch", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ReentrancyGuardReentrantCall", - "inputs": [] - }, - { - "type": "error", - "name": "SafeERC20FailedOperation", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "SignerMustBeMsgSender", - "inputs": [ - { - "name": "expected", - "type": "address", - "internalType": "address" - }, - { - "name": "actual", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ValidatorModifiedAmountExceedsMaximum", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxAllowed", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attempted", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ValidatorSettledBeforeSegmentStart", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "allowedStart", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedStart", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ValidatorSettledBeyondSegmentEnd", - "inputs": [ - { - "name": "railId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "allowedEnd", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "attemptedEnd", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "WithdrawAmountExceedsAccumulatedFees", - "inputs": [ - { - "name": "token", - "type": "address", - "internalType": "contract IERC20" - }, - { - "name": "available", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "requested", - "type": "uint256", - "internalType": "uint256" - } - ] - }, - { - "type": "error", - "name": "ZeroAddressNotAllowed", - "inputs": [ - { - "name": "varName", - "type": "string", - "internalType": "string" - } - ] - } -] diff --git a/service_contracts/abi/ServiceProviderRegistry.abi.json b/service_contracts/abi/ServiceProviderRegistry.abi.json deleted file mode 100644 index f3ffa682..00000000 --- a/service_contracts/abi/ServiceProviderRegistry.abi.json +++ /dev/null @@ -1,1774 +0,0 @@ -[ - { - "type": "constructor", - "inputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "BURN_ACTOR", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "MAX_CAPABILITIES", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "MAX_CAPABILITY_KEY_LENGTH", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "MAX_CAPABILITY_VALUE_LENGTH", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "REGISTRATION_FEE", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "UPGRADE_INTERFACE_VERSION", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "VERSION", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "activeProductTypeProviderCount", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "outputs": [ - { - "name": "count", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "activeProviderCount", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "addProduct", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "capabilityValues", - "type": "string[]", - "internalType": "string[]" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "addressToProviderId", - "inputs": [ - { - "name": "providerAddress", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "decodePDPOffering", - "inputs": [ - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [ - { - "name": "", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.PDPOffering", - "components": [ - { - "name": "serviceURL", - "type": "string", - "internalType": "string" - }, - { - "name": "minPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "ipniPiece", - "type": "bool", - "internalType": "bool" - }, - { - "name": "ipniIpfs", - "type": "bool", - "internalType": "bool" - }, - { - "name": "storagePricePerTibPerMonth", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "minProvingPeriodInEpochs", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "location", - "type": "string", - "internalType": "string" - }, - { - "name": "paymentTokenAddress", - "type": "address", - "internalType": "contract IERC20" - } - ] - } - ], - "stateMutability": "pure" - }, - { - "type": "function", - "name": "eip712Domain", - "inputs": [], - "outputs": [ - { - "name": "fields", - "type": "bytes1", - "internalType": "bytes1" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "version", - "type": "string", - "internalType": "string" - }, - { - "name": "chainId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "verifyingContract", - "type": "address", - "internalType": "address" - }, - { - "name": "salt", - "type": "bytes32", - "internalType": "bytes32" - }, - { - "name": "extensions", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "encodePDPOffering", - "inputs": [ - { - "name": "pdpOffering", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.PDPOffering", - "components": [ - { - "name": "serviceURL", - "type": "string", - "internalType": "string" - }, - { - "name": "minPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "ipniPiece", - "type": "bool", - "internalType": "bool" - }, - { - "name": "ipniIpfs", - "type": "bool", - "internalType": "bool" - }, - { - "name": "storagePricePerTibPerMonth", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "minProvingPeriodInEpochs", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "location", - "type": "string", - "internalType": "string" - }, - { - "name": "paymentTokenAddress", - "type": "address", - "internalType": "contract IERC20" - } - ] - } - ], - "outputs": [ - { - "name": "", - "type": "bytes", - "internalType": "bytes" - } - ], - "stateMutability": "pure" - }, - { - "type": "function", - "name": "getActiveProvidersByProductType", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "result", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.PaginatedProviders", - "components": [ - { - "name": "providers", - "type": "tuple[]", - "internalType": "struct ServiceProviderRegistryStorage.ProviderWithProduct[]", - "components": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "providerInfo", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProviderInfo", - "components": [ - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - }, - { - "name": "product", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProduct", - "components": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - } - ] - }, - { - "name": "hasMore", - "type": "bool", - "internalType": "bool" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getAllActiveProviders", - "inputs": [ - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "providerIds", - "type": "uint256[]", - "internalType": "uint256[]" - }, - { - "name": "hasMore", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getNextProviderId", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getPDPService", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "pdpOffering", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.PDPOffering", - "components": [ - { - "name": "serviceURL", - "type": "string", - "internalType": "string" - }, - { - "name": "minPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "ipniPiece", - "type": "bool", - "internalType": "bool" - }, - { - "name": "ipniIpfs", - "type": "bool", - "internalType": "bool" - }, - { - "name": "storagePricePerTibPerMonth", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "minProvingPeriodInEpochs", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "location", - "type": "string", - "internalType": "string" - }, - { - "name": "paymentTokenAddress", - "type": "address", - "internalType": "contract IERC20" - } - ] - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProduct", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "outputs": [ - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProductCapabilities", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "keys", - "type": "string[]", - "internalType": "string[]" - } - ], - "outputs": [ - { - "name": "exists", - "type": "bool[]", - "internalType": "bool[]" - }, - { - "name": "values", - "type": "string[]", - "internalType": "string[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProductCapability", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ], - "outputs": [ - { - "name": "exists", - "type": "bool", - "internalType": "bool" - }, - { - "name": "value", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProvider", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "info", - "type": "tuple", - "internalType": "struct ServiceProviderRegistry.ServiceProviderInfoView", - "components": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "info", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProviderInfo", - "components": [ - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProviderByAddress", - "inputs": [ - { - "name": "providerAddress", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "info", - "type": "tuple", - "internalType": "struct ServiceProviderRegistry.ServiceProviderInfoView", - "components": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "info", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProviderInfo", - "components": [ - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProviderCount", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProviderIdByAddress", - "inputs": [ - { - "name": "providerAddress", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProviderPayee", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "payee", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProvidersByIds", - "inputs": [ - { - "name": "providerIds", - "type": "uint256[]", - "internalType": "uint256[]" - } - ], - "outputs": [ - { - "name": "providerInfos", - "type": "tuple[]", - "internalType": "struct ServiceProviderRegistry.ServiceProviderInfoView[]", - "components": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "info", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProviderInfo", - "components": [ - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - } - ] - }, - { - "name": "validIds", - "type": "bool[]", - "internalType": "bool[]" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "getProvidersByProductType", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "offset", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "limit", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "result", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.PaginatedProviders", - "components": [ - { - "name": "providers", - "type": "tuple[]", - "internalType": "struct ServiceProviderRegistryStorage.ProviderWithProduct[]", - "components": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "providerInfo", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProviderInfo", - "components": [ - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - }, - { - "name": "product", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.ServiceProduct", - "components": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ] - } - ] - }, - { - "name": "hasMore", - "type": "bool", - "internalType": "bool" - } - ] - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "initialize", - "inputs": [], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "isProviderActive", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "isRegisteredProvider", - "inputs": [ - { - "name": "provider", - "type": "address", - "internalType": "address" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "migrate", - "inputs": [ - { - "name": "newVersion", - "type": "string", - "internalType": "string" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "owner", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "address", - "internalType": "address" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "productCapabilities", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "key", - "type": "string", - "internalType": "string" - } - ], - "outputs": [ - { - "name": "value", - "type": "string", - "internalType": "string" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "productTypeProviderCount", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "outputs": [ - { - "name": "count", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "providerHasProduct", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "outputs": [ - { - "name": "", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "providerProducts", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "outputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "providers", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "outputs": [ - { - "name": "serviceProvider", - "type": "address", - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "isActive", - "type": "bool", - "internalType": "bool" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "proxiableUUID", - "inputs": [], - "outputs": [ - { - "name": "", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "registerProvider", - "inputs": [ - { - "name": "payee", - "type": "address", - "internalType": "address" - }, - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - }, - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "capabilityValues", - "type": "string[]", - "internalType": "string[]" - } - ], - "outputs": [ - { - "name": "providerId", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "removeProduct", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "removeProvider", - "inputs": [], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "renounceOwnership", - "inputs": [], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "transferOwnership", - "inputs": [ - { - "name": "newOwner", - "type": "address", - "internalType": "address" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "updatePDPServiceWithCapabilities", - "inputs": [ - { - "name": "pdpOffering", - "type": "tuple", - "internalType": "struct ServiceProviderRegistryStorage.PDPOffering", - "components": [ - { - "name": "serviceURL", - "type": "string", - "internalType": "string" - }, - { - "name": "minPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "maxPieceSizeInBytes", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "ipniPiece", - "type": "bool", - "internalType": "bool" - }, - { - "name": "ipniIpfs", - "type": "bool", - "internalType": "bool" - }, - { - "name": "storagePricePerTibPerMonth", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "minProvingPeriodInEpochs", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "location", - "type": "string", - "internalType": "string" - }, - { - "name": "paymentTokenAddress", - "type": "address", - "internalType": "contract IERC20" - } - ] - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "capabilityValues", - "type": "string[]", - "internalType": "string[]" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "updateProduct", - "inputs": [ - { - "name": "productType", - "type": "uint8", - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "productData", - "type": "bytes", - "internalType": "bytes" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "internalType": "string[]" - }, - { - "name": "capabilityValues", - "type": "string[]", - "internalType": "string[]" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "updateProviderInfo", - "inputs": [ - { - "name": "name", - "type": "string", - "internalType": "string" - }, - { - "name": "description", - "type": "string", - "internalType": "string" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "upgradeToAndCall", - "inputs": [ - { - "name": "newImplementation", - "type": "address", - "internalType": "address" - }, - { - "name": "data", - "type": "bytes", - "internalType": "bytes" - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "event", - "name": "ContractUpgraded", - "inputs": [ - { - "name": "version", - "type": "string", - "indexed": false, - "internalType": "string" - }, - { - "name": "implementation", - "type": "address", - "indexed": false, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "EIP712DomainChanged", - "inputs": [], - "anonymous": false - }, - { - "type": "event", - "name": "Initialized", - "inputs": [ - { - "name": "version", - "type": "uint64", - "indexed": false, - "internalType": "uint64" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "OwnershipTransferred", - "inputs": [ - { - "name": "previousOwner", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "newOwner", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProductAdded", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "indexed": true, - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "serviceUrl", - "type": "string", - "indexed": false, - "internalType": "string" - }, - { - "name": "serviceProvider", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - }, - { - "name": "capabilityValues", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProductRemoved", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "indexed": true, - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProductUpdated", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "productType", - "type": "uint8", - "indexed": true, - "internalType": "enum ServiceProviderRegistryStorage.ProductType" - }, - { - "name": "serviceUrl", - "type": "string", - "indexed": false, - "internalType": "string" - }, - { - "name": "serviceProvider", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "capabilityKeys", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - }, - { - "name": "capabilityValues", - "type": "string[]", - "indexed": false, - "internalType": "string[]" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProviderInfoUpdated", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProviderRegistered", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - }, - { - "name": "serviceProvider", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "payee", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "ProviderRemoved", - "inputs": [ - { - "name": "providerId", - "type": "uint256", - "indexed": true, - "internalType": "uint256" - } - ], - "anonymous": false - }, - { - "type": "event", - "name": "Upgraded", - "inputs": [ - { - "name": "implementation", - "type": "address", - "indexed": true, - "internalType": "address" - } - ], - "anonymous": false - }, - { - "type": "error", - "name": "AddressEmptyCode", - "inputs": [ - { - "name": "target", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ERC1967InvalidImplementation", - "inputs": [ - { - "name": "implementation", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "ERC1967NonPayable", - "inputs": [] - }, - { - "type": "error", - "name": "FailedCall", - "inputs": [] - }, - { - "type": "error", - "name": "InvalidInitialization", - "inputs": [] - }, - { - "type": "error", - "name": "NotInitializing", - "inputs": [] - }, - { - "type": "error", - "name": "OwnableInvalidOwner", - "inputs": [ - { - "name": "owner", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "OwnableUnauthorizedAccount", - "inputs": [ - { - "name": "account", - "type": "address", - "internalType": "address" - } - ] - }, - { - "type": "error", - "name": "UUPSUnauthorizedCallContext", - "inputs": [] - }, - { - "type": "error", - "name": "UUPSUnsupportedProxiableUUID", - "inputs": [ - { - "name": "slot", - "type": "bytes32", - "internalType": "bytes32" - } - ] - } -] diff --git a/service_contracts/abi/SessionKeyRegistry.abi.json b/service_contracts/abi/SessionKeyRegistry.abi.json deleted file mode 100644 index c542d611..00000000 --- a/service_contracts/abi/SessionKeyRegistry.abi.json +++ /dev/null @@ -1,147 +0,0 @@ -[ - { - "type": "function", - "name": "authorizationExpiry", - "inputs": [ - { - "name": "user", - "type": "address", - "internalType": "address" - }, - { - "name": "signer", - "type": "address", - "internalType": "address" - }, - { - "name": "permission", - "type": "bytes32", - "internalType": "bytes32" - } - ], - "outputs": [ - { - "name": "", - "type": "uint256", - "internalType": "uint256" - } - ], - "stateMutability": "view" - }, - { - "type": "function", - "name": "login", - "inputs": [ - { - "name": "signer", - "type": "address", - "internalType": "address" - }, - { - "name": "expiry", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "permissions", - "type": "bytes32[]", - "internalType": "bytes32[]" - }, - { - "name": "origin", - "type": "string", - "internalType": "string" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "function", - "name": "loginAndFund", - "inputs": [ - { - "name": "signer", - "type": "address", - "internalType": "address payable" - }, - { - "name": "expiry", - "type": "uint256", - "internalType": "uint256" - }, - { - "name": "permissions", - "type": "bytes32[]", - "internalType": "bytes32[]" - }, - { - "name": "origin", - "type": "string", - "internalType": "string" - } - ], - "outputs": [], - "stateMutability": "payable" - }, - { - "type": "function", - "name": "revoke", - "inputs": [ - { - "name": "signer", - "type": "address", - "internalType": "address" - }, - { - "name": "permissions", - "type": "bytes32[]", - "internalType": "bytes32[]" - }, - { - "name": "origin", - "type": "string", - "internalType": "string" - } - ], - "outputs": [], - "stateMutability": "nonpayable" - }, - { - "type": "event", - "name": "AuthorizationsUpdated", - "inputs": [ - { - "name": "identity", - "type": "address", - "indexed": true, - "internalType": "address" - }, - { - "name": "signer", - "type": "address", - "indexed": false, - "internalType": "address" - }, - { - "name": "expiry", - "type": "uint256", - "indexed": false, - "internalType": "uint256" - }, - { - "name": "permissions", - "type": "bytes32[]", - "indexed": false, - "internalType": "bytes32[]" - }, - { - "name": "origin", - "type": "string", - "indexed": false, - "internalType": "string" - } - ], - "anonymous": false - } -] diff --git a/service_contracts/foundry.toml b/service_contracts/foundry.toml deleted file mode 100644 index ee83fb78..00000000 --- a/service_contracts/foundry.toml +++ /dev/null @@ -1,35 +0,0 @@ -[profile.default] -src = 'src' -test = 'test' -script = 'script' -out = 'out' -libs = ['lib'] -cache_path = 'cache' -solc = "0.8.30" -via_ir = true -optimizer = true -optimizer_runs = 200 - -# For dependencies -remappings = [ - '@openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/', - '@openzeppelin/contracts-upgradeable/=lib/openzeppelin-contracts-upgradeable/contracts/', - 'forge-std/=lib/forge-std/src/', - '@fws-payments/=lib/fws-payments/src/', - '@pdp/=lib/pdp/src/', - '@session-key-registry/=lib/session-key-registry/src/', - '@pythnetwork/pyth-sdk-solidity/=lib/pdp/lib/pyth-sdk-solidity/', -] - -# Allow reading test data files -fs_permissions = [{ access = "read", path = "./test" }] - -[lint] -exclude_lints = [ - "asm-keccak256", - "incorrect-shift", - "mixed-case-function", - "mixed-case-variable", - "pascal-case-struct", - "screaming-snake-case-immutable", -] diff --git a/service_contracts/lib/forge-std b/service_contracts/lib/forge-std deleted file mode 160000 index f46d8301..00000000 --- a/service_contracts/lib/forge-std +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f46d8301cf732f4f83846565aa475628265e51e0 diff --git a/service_contracts/lib/fws-payments b/service_contracts/lib/fws-payments deleted file mode 160000 index d3e5dd18..00000000 --- a/service_contracts/lib/fws-payments +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d3e5dd18922ae42e6afd3297afe027f1d7ae8a45 diff --git a/service_contracts/lib/openzeppelin-contracts b/service_contracts/lib/openzeppelin-contracts deleted file mode 160000 index a6ae04ac..00000000 --- a/service_contracts/lib/openzeppelin-contracts +++ /dev/null @@ -1 +0,0 @@ -Subproject commit a6ae04acf8e38ed49a70fdce5389df2752e3ecc4 diff --git a/service_contracts/lib/openzeppelin-contracts-upgradeable b/service_contracts/lib/openzeppelin-contracts-upgradeable deleted file mode 160000 index 3bfc52f2..00000000 --- a/service_contracts/lib/openzeppelin-contracts-upgradeable +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3bfc52f2fbf5be95de632f346169dac6a669bd57 diff --git a/service_contracts/lib/pdp b/service_contracts/lib/pdp deleted file mode 160000 index 097d3cde..00000000 --- a/service_contracts/lib/pdp +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 097d3cde21085136cbbda7e0fef3026a52587691 diff --git a/service_contracts/lib/session-key-registry b/service_contracts/lib/session-key-registry deleted file mode 160000 index 74fc4e94..00000000 --- a/service_contracts/lib/session-key-registry +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 74fc4e94500859709a97b1c64981cfae52f9bdfe